1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
45 ;; 3rd operand to select_dominance_cc_mode
52 ;; conditional compare combination
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
67 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
77 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
90 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
91 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
104 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
107 ; generate correct unwind information.
108 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
109 ; correctly for PIC usage.
110 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
111 ; a given symbolic address.
112 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
113 (UNSPEC_RBIT 26) ; rbit operation.
114 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
115 ; another symbolic address.
116 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
117 (UNSPEC_PIC_UNIFIED 29) ; Create a common pic addressing form.
118 (UNSPEC_UNALIGNED_LOAD 30)
119 (UNSPEC_UNALIGNED_STORE 31)
123 ;; UNSPEC_VOLATILE Usage:
125 (define_c_enum "unspecv" [
126 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
128 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
129 ; instruction epilogue sequence that isn't expanded
130 ; into normal RTL. Used for both normal and sibcall
132 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
133 ; modes from arm to thumb.
134 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
135 ; for inlined constants.
136 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
138 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
140 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
142 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
144 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
146 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
148 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
149 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
150 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
151 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
152 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
153 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
154 VUNSPEC_EH_RETURN ; Use to override the return address for exception
156 VUNSPEC_ATOMIC_CAS ; Represent an atomic compare swap.
157 VUNSPEC_ATOMIC_XCHG ; Represent an atomic exchange.
158 VUNSPEC_ATOMIC_OP ; Represent an atomic operation.
159 VUNSPEC_LL ; Represent a load-register-exclusive.
160 VUNSPEC_SC ; Represent a store-register-exclusive.
163 ;;---------------------------------------------------------------------------
166 ;; Processor type. This is created automatically from arm-cores.def.
167 (include "arm-tune.md")
169 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
170 ; generating ARM code. This is used to control the length of some insn
171 ; patterns that share the same RTL in both ARM and Thumb code.
172 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
174 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
175 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
177 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
178 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
180 ;; Operand number of an input operand that is shifted. Zero if the
181 ;; given instruction does not shift one of its input operands.
182 (define_attr "shift" "" (const_int 0))
184 ; Floating Point Unit. If we only have floating point emulation, then there
185 ; is no point in scheduling the floating point insns. (Well, for best
186 ; performance we should try and group them together).
187 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
188 (const (symbol_ref "arm_fpu_attr")))
190 ; LENGTH of an instruction (in bytes)
191 (define_attr "length" ""
194 ; The architecture which supports the instruction (or alternative).
195 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
196 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
197 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
198 ; arm_arch6. This attribute is used to compute attribute "enabled",
199 ; use type "any" to enable an alternative in all cases.
200 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
201 (const_string "any"))
203 (define_attr "arch_enabled" "no,yes"
204 (cond [(eq_attr "arch" "any")
207 (and (eq_attr "arch" "a")
208 (match_test "TARGET_ARM"))
211 (and (eq_attr "arch" "t")
212 (match_test "TARGET_THUMB"))
215 (and (eq_attr "arch" "t1")
216 (match_test "TARGET_THUMB1"))
219 (and (eq_attr "arch" "t2")
220 (match_test "TARGET_THUMB2"))
223 (and (eq_attr "arch" "32")
224 (match_test "TARGET_32BIT"))
227 (and (eq_attr "arch" "v6")
228 (match_test "TARGET_32BIT && arm_arch6"))
231 (and (eq_attr "arch" "nov6")
232 (match_test "TARGET_32BIT && !arm_arch6"))
235 (and (eq_attr "arch" "onlya8")
236 (eq_attr "tune" "cortexa8"))
239 (and (eq_attr "arch" "nota8")
240 (not (eq_attr "tune" "cortexa8")))
241 (const_string "yes")]
242 (const_string "no")))
244 ; Allows an insn to disable certain alternatives for reasons other than
246 (define_attr "insn_enabled" "no,yes"
247 (const_string "yes"))
249 ; Enable all alternatives that are both arch_enabled and insn_enabled.
250 (define_attr "enabled" "no,yes"
251 (if_then_else (eq_attr "insn_enabled" "yes")
252 (if_then_else (eq_attr "arch_enabled" "yes")
255 (const_string "no")))
257 ; POOL_RANGE is how far away from a constant pool entry that this insn
258 ; can be placed. If the distance is zero, then this insn will never
259 ; reference the pool.
260 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
261 ; before its address. It is set to <max_range> - (8 + <data_size>).
262 (define_attr "arm_pool_range" "" (const_int 0))
263 (define_attr "thumb2_pool_range" "" (const_int 0))
264 (define_attr "arm_neg_pool_range" "" (const_int 0))
265 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
267 (define_attr "pool_range" ""
268 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
269 (attr "arm_pool_range")))
270 (define_attr "neg_pool_range" ""
271 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
272 (attr "arm_neg_pool_range")))
274 ; An assembler sequence may clobber the condition codes without us knowing.
275 ; If such an insn references the pool, then we have no way of knowing how,
276 ; so use the most conservative value for pool_range.
277 (define_asm_attributes
278 [(set_attr "conds" "clob")
279 (set_attr "length" "4")
280 (set_attr "pool_range" "250")])
282 ;; The instruction used to implement a particular pattern. This
283 ;; information is used by pipeline descriptions to provide accurate
284 ;; scheduling information.
287 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
288 (const_string "other"))
290 ; TYPE attribute is used to detect floating point instructions which, if
291 ; running on a co-processor can run in parallel with other, basic instructions
292 ; If write-buffer scheduling is enabled then it can also be used in the
293 ; scheduling of writes.
295 ; Classification of each insn
296 ; Note: vfp.md has different meanings for some of these, and some further
297 ; types as well. See that file for details.
298 ; alu any alu instruction that doesn't hit memory or fp
299 ; regs or have a shifted source operand
300 ; alu_shift any data instruction that doesn't hit memory or fp
301 ; regs, but has a source operand shifted by a constant
302 ; alu_shift_reg any data instruction that doesn't hit memory or fp
303 ; regs, but has a source operand shifted by a register value
304 ; mult a multiply instruction
305 ; block blockage insn, this blocks all functional units
306 ; float a floating point arithmetic operation (subject to expansion)
307 ; fdivd DFmode floating point division
308 ; fdivs SFmode floating point division
309 ; fmul Floating point multiply
310 ; ffmul Fast floating point multiply
311 ; farith Floating point arithmetic (4 cycle)
312 ; ffarith Fast floating point arithmetic (2 cycle)
313 ; float_em a floating point arithmetic operation that is normally emulated
314 ; even on a machine with an fpa.
315 ; f_fpa_load a floating point load from memory. Only for the FPA.
316 ; f_fpa_store a floating point store to memory. Only for the FPA.
317 ; f_load[sd] A single/double load from memory. Used for VFP unit.
318 ; f_store[sd] A single/double store to memory. Used for VFP unit.
319 ; f_flag a transfer of co-processor flags to the CPSR
320 ; f_mem_r a transfer of a floating point register to a real reg via mem
321 ; r_mem_f the reverse of f_mem_r
322 ; f_2_r fast transfer float to arm (no memory needed)
323 ; r_2_f fast transfer arm to float
324 ; f_cvt convert floating<->integral
326 ; call a subroutine call
327 ; load_byte load byte(s) from memory to arm registers
328 ; load1 load 1 word from memory to arm registers
329 ; load2 load 2 words from memory to arm registers
330 ; load3 load 3 words from memory to arm registers
331 ; load4 load 4 words from memory to arm registers
332 ; store store 1 word to memory from arm registers
333 ; store2 store 2 words
334 ; store3 store 3 words
335 ; store4 store 4 (or more) words
336 ; Additions for Cirrus Maverick co-processor:
337 ; mav_farith Floating point arithmetic (4 cycle)
338 ; mav_dmult Double multiplies (7 cycle)
342 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
344 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
345 (const_string "mult")
346 (const_string "alu")))
348 ; Is this an (integer side) multiply with a 64-bit result?
349 (define_attr "mul64" "no,yes"
351 (eq_attr "insn" "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
353 (const_string "no")))
355 ; Load scheduling, set from the arm_ld_sched variable
356 ; initialized by arm_option_override()
357 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
359 ;; Classification of NEON instructions for scheduling purposes.
360 ;; Do not set this attribute and the "type" attribute together in
361 ;; any one instruction pattern.
362 (define_attr "neon_type"
373 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
374 neon_mul_qqq_8_16_32_ddd_32,\
375 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
376 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
378 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
379 neon_mla_qqq_32_qqd_32_scalar,\
380 neon_mul_ddd_16_scalar_32_16_long_scalar,\
381 neon_mul_qqd_32_scalar,\
382 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
387 neon_vqshl_vrshl_vqrshl_qqq,\
389 neon_fp_vadd_ddd_vabs_dd,\
390 neon_fp_vadd_qqq_vabs_qq,\
396 neon_fp_vmla_ddd_scalar,\
397 neon_fp_vmla_qqq_scalar,\
398 neon_fp_vrecps_vrsqrts_ddd,\
399 neon_fp_vrecps_vrsqrts_qqq,\
407 neon_vld2_2_regs_vld1_vld2_all_lanes,\
410 neon_vst1_1_2_regs_vst2_2_regs,\
412 neon_vst2_4_regs_vst3_vst4,\
414 neon_vld1_vld2_lane,\
415 neon_vld3_vld4_lane,\
416 neon_vst1_vst2_lane,\
417 neon_vst3_vst4_lane,\
418 neon_vld3_vld4_all_lanes,\
426 (const_string "none"))
428 ; condition codes: this one is used by final_prescan_insn to speed up
429 ; conditionalizing instructions. It saves having to scan the rtl to see if
430 ; it uses or alters the condition codes.
432 ; USE means that the condition codes are used by the insn in the process of
433 ; outputting code, this means (at present) that we can't use the insn in
436 ; SET means that the purpose of the insn is to set the condition codes in a
437 ; well defined manner.
439 ; CLOB means that the condition codes are altered in an undefined manner, if
440 ; they are altered at all
442 ; UNCONDITIONAL means the instruction can not be conditionally executed and
443 ; that the instruction does not use or alter the condition codes.
445 ; NOCOND means that the instruction does not use or alter the condition
446 ; codes but can be converted into a conditionally exectuted instruction.
448 (define_attr "conds" "use,set,clob,unconditional,nocond"
450 (ior (eq_attr "is_thumb1" "yes")
451 (eq_attr "type" "call"))
452 (const_string "clob")
453 (if_then_else (eq_attr "neon_type" "none")
454 (const_string "nocond")
455 (const_string "unconditional"))))
457 ; Predicable means that the insn can be conditionally executed based on
458 ; an automatically added predicate (additional patterns are generated by
459 ; gen...). We default to 'no' because no Thumb patterns match this rule
460 ; and not all ARM patterns do.
461 (define_attr "predicable" "no,yes" (const_string "no"))
463 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
464 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
465 ; suffer blockages enough to warrant modelling this (and it can adversely
466 ; affect the schedule).
467 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
469 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
470 ; to stall the processor. Used with model_wbuf above.
471 (define_attr "write_conflict" "no,yes"
472 (if_then_else (eq_attr "type"
473 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
475 (const_string "no")))
477 ; Classify the insns into those that take one cycle and those that take more
478 ; than one on the main cpu execution unit.
479 (define_attr "core_cycles" "single,multi"
480 (if_then_else (eq_attr "type"
481 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
482 (const_string "single")
483 (const_string "multi")))
485 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
486 ;; distant label. Only applicable to Thumb code.
487 (define_attr "far_jump" "yes,no" (const_string "no"))
490 ;; The number of machine instructions this pattern expands to.
491 ;; Used for Thumb-2 conditional execution.
492 (define_attr "ce_count" "" (const_int 1))
494 ;;---------------------------------------------------------------------------
497 (include "iterators.md")
499 ;;---------------------------------------------------------------------------
502 (include "predicates.md")
503 (include "constraints.md")
505 ;;---------------------------------------------------------------------------
506 ;; Pipeline descriptions
508 (define_attr "tune_cortexr4" "yes,no"
510 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
512 (const_string "no"))))
514 ;; True if the generic scheduling description should be used.
516 (define_attr "generic_sched" "yes,no"
518 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexa15,cortexm4")
519 (eq_attr "tune_cortexr4" "yes"))
521 (const_string "yes"))))
523 (define_attr "generic_vfp" "yes,no"
525 (and (eq_attr "fpu" "vfp")
526 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
527 (eq_attr "tune_cortexr4" "no"))
529 (const_string "no"))))
531 (include "arm-generic.md")
532 (include "arm926ejs.md")
533 (include "arm1020e.md")
534 (include "arm1026ejs.md")
535 (include "arm1136jfs.md")
537 (include "fa606te.md")
538 (include "fa626te.md")
539 (include "fmp626.md")
540 (include "fa726te.md")
541 (include "cortex-a5.md")
542 (include "cortex-a8.md")
543 (include "cortex-a9.md")
544 (include "cortex-a15.md")
545 (include "cortex-r4.md")
546 (include "cortex-r4f.md")
547 (include "cortex-m4.md")
548 (include "cortex-m4-fpu.md")
552 ;;---------------------------------------------------------------------------
557 ;; Note: For DImode insns, there is normally no reason why operands should
558 ;; not be in the same register, what we don't want is for something being
559 ;; written to partially overlap something that is an input.
560 ;; Cirrus 64bit additions should not be split because we have a native
561 ;; 64bit addition instructions.
563 (define_expand "adddi3"
565 [(set (match_operand:DI 0 "s_register_operand" "")
566 (plus:DI (match_operand:DI 1 "s_register_operand" "")
567 (match_operand:DI 2 "s_register_operand" "")))
568 (clobber (reg:CC CC_REGNUM))])]
571 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
573 if (!cirrus_fp_register (operands[0], DImode))
574 operands[0] = force_reg (DImode, operands[0]);
575 if (!cirrus_fp_register (operands[1], DImode))
576 operands[1] = force_reg (DImode, operands[1]);
577 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
583 if (GET_CODE (operands[1]) != REG)
584 operands[1] = force_reg (DImode, operands[1]);
585 if (GET_CODE (operands[2]) != REG)
586 operands[2] = force_reg (DImode, operands[2]);
591 (define_insn "*thumb1_adddi3"
592 [(set (match_operand:DI 0 "register_operand" "=l")
593 (plus:DI (match_operand:DI 1 "register_operand" "%0")
594 (match_operand:DI 2 "register_operand" "l")))
595 (clobber (reg:CC CC_REGNUM))
598 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
599 [(set_attr "length" "4")]
602 (define_insn_and_split "*arm_adddi3"
603 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
604 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
605 (match_operand:DI 2 "s_register_operand" "r, 0")))
606 (clobber (reg:CC CC_REGNUM))]
607 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
609 "TARGET_32BIT && reload_completed
610 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
611 [(parallel [(set (reg:CC_C CC_REGNUM)
612 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
614 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
615 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
616 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
619 operands[3] = gen_highpart (SImode, operands[0]);
620 operands[0] = gen_lowpart (SImode, operands[0]);
621 operands[4] = gen_highpart (SImode, operands[1]);
622 operands[1] = gen_lowpart (SImode, operands[1]);
623 operands[5] = gen_highpart (SImode, operands[2]);
624 operands[2] = gen_lowpart (SImode, operands[2]);
626 [(set_attr "conds" "clob")
627 (set_attr "length" "8")]
630 (define_insn_and_split "*adddi_sesidi_di"
631 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
632 (plus:DI (sign_extend:DI
633 (match_operand:SI 2 "s_register_operand" "r,r"))
634 (match_operand:DI 1 "s_register_operand" "0,r")))
635 (clobber (reg:CC CC_REGNUM))]
636 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
638 "TARGET_32BIT && reload_completed"
639 [(parallel [(set (reg:CC_C CC_REGNUM)
640 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
642 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
643 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
646 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
649 operands[3] = gen_highpart (SImode, operands[0]);
650 operands[0] = gen_lowpart (SImode, operands[0]);
651 operands[4] = gen_highpart (SImode, operands[1]);
652 operands[1] = gen_lowpart (SImode, operands[1]);
653 operands[2] = gen_lowpart (SImode, operands[2]);
655 [(set_attr "conds" "clob")
656 (set_attr "length" "8")]
659 (define_insn_and_split "*adddi_zesidi_di"
660 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
661 (plus:DI (zero_extend:DI
662 (match_operand:SI 2 "s_register_operand" "r,r"))
663 (match_operand:DI 1 "s_register_operand" "0,r")))
664 (clobber (reg:CC CC_REGNUM))]
665 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
667 "TARGET_32BIT && reload_completed"
668 [(parallel [(set (reg:CC_C CC_REGNUM)
669 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
671 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
672 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
673 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
676 operands[3] = gen_highpart (SImode, operands[0]);
677 operands[0] = gen_lowpart (SImode, operands[0]);
678 operands[4] = gen_highpart (SImode, operands[1]);
679 operands[1] = gen_lowpart (SImode, operands[1]);
680 operands[2] = gen_lowpart (SImode, operands[2]);
682 [(set_attr "conds" "clob")
683 (set_attr "length" "8")]
686 (define_expand "addsi3"
687 [(set (match_operand:SI 0 "s_register_operand" "")
688 (plus:SI (match_operand:SI 1 "s_register_operand" "")
689 (match_operand:SI 2 "reg_or_int_operand" "")))]
692 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
694 arm_split_constant (PLUS, SImode, NULL_RTX,
695 INTVAL (operands[2]), operands[0], operands[1],
696 optimize && can_create_pseudo_p ());
702 ; If there is a scratch available, this will be faster than synthesizing the
705 [(match_scratch:SI 3 "r")
706 (set (match_operand:SI 0 "arm_general_register_operand" "")
707 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
708 (match_operand:SI 2 "const_int_operand" "")))]
710 !(const_ok_for_arm (INTVAL (operands[2]))
711 || const_ok_for_arm (-INTVAL (operands[2])))
712 && const_ok_for_arm (~INTVAL (operands[2]))"
713 [(set (match_dup 3) (match_dup 2))
714 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
718 ;; The r/r/k alternative is required when reloading the address
719 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
720 ;; put the duplicated register first, and not try the commutative version.
721 (define_insn_and_split "*arm_addsi3"
722 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
723 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
724 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
734 subw%?\\t%0, %1, #%n2
735 subw%?\\t%0, %1, #%n2
738 && GET_CODE (operands[2]) == CONST_INT
739 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
740 && (reload_completed || !arm_eliminable_register (operands[1]))"
741 [(clobber (const_int 0))]
743 arm_split_constant (PLUS, SImode, curr_insn,
744 INTVAL (operands[2]), operands[0],
748 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
749 (set_attr "predicable" "yes")
750 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
753 (define_insn_and_split "*thumb1_addsi3"
754 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
755 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
756 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
759 static const char * const asms[] =
761 \"add\\t%0, %0, %2\",
762 \"sub\\t%0, %0, #%n2\",
763 \"add\\t%0, %1, %2\",
764 \"add\\t%0, %0, %2\",
765 \"add\\t%0, %0, %2\",
766 \"add\\t%0, %1, %2\",
767 \"add\\t%0, %1, %2\",
772 if ((which_alternative == 2 || which_alternative == 6)
773 && GET_CODE (operands[2]) == CONST_INT
774 && INTVAL (operands[2]) < 0)
775 return \"sub\\t%0, %1, #%n2\";
776 return asms[which_alternative];
778 "&& reload_completed && CONST_INT_P (operands[2])
779 && ((operands[1] != stack_pointer_rtx
780 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
781 || (operands[1] == stack_pointer_rtx
782 && INTVAL (operands[2]) > 1020))"
783 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
784 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
786 HOST_WIDE_INT offset = INTVAL (operands[2]);
787 if (operands[1] == stack_pointer_rtx)
793 else if (offset < -255)
796 operands[3] = GEN_INT (offset);
797 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
799 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
802 ;; Reloading and elimination of the frame pointer can
803 ;; sometimes cause this optimization to be missed.
805 [(set (match_operand:SI 0 "arm_general_register_operand" "")
806 (match_operand:SI 1 "const_int_operand" ""))
808 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
810 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
811 && (INTVAL (operands[1]) & 3) == 0"
812 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
816 (define_insn "addsi3_compare0"
817 [(set (reg:CC_NOOV CC_REGNUM)
819 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
820 (match_operand:SI 2 "arm_add_operand" "rI,L"))
822 (set (match_operand:SI 0 "s_register_operand" "=r,r")
823 (plus:SI (match_dup 1) (match_dup 2)))]
827 sub%.\\t%0, %1, #%n2"
828 [(set_attr "conds" "set")]
831 (define_insn "*addsi3_compare0_scratch"
832 [(set (reg:CC_NOOV CC_REGNUM)
834 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
835 (match_operand:SI 1 "arm_add_operand" "rI,L"))
841 [(set_attr "conds" "set")
842 (set_attr "predicable" "yes")]
845 (define_insn "*compare_negsi_si"
846 [(set (reg:CC_Z CC_REGNUM)
848 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
849 (match_operand:SI 1 "s_register_operand" "r")))]
852 [(set_attr "conds" "set")
853 (set_attr "predicable" "yes")]
856 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
857 ;; addend is a constant.
858 (define_insn "*cmpsi2_addneg"
859 [(set (reg:CC CC_REGNUM)
861 (match_operand:SI 1 "s_register_operand" "r,r")
862 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
863 (set (match_operand:SI 0 "s_register_operand" "=r,r")
864 (plus:SI (match_dup 1)
865 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
866 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
869 sub%.\\t%0, %1, #%n3"
870 [(set_attr "conds" "set")]
873 ;; Convert the sequence
875 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
879 ;; bcs dest ((unsigned)rn >= 1)
880 ;; similarly for the beq variant using bcc.
881 ;; This is a common looping idiom (while (n--))
883 [(set (match_operand:SI 0 "arm_general_register_operand" "")
884 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
886 (set (match_operand 2 "cc_register" "")
887 (compare (match_dup 0) (const_int -1)))
889 (if_then_else (match_operator 3 "equality_operator"
890 [(match_dup 2) (const_int 0)])
891 (match_operand 4 "" "")
892 (match_operand 5 "" "")))]
893 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
897 (match_dup 1) (const_int 1)))
898 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
900 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
903 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
904 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
907 operands[2], const0_rtx);"
910 ;; The next four insns work because they compare the result with one of
911 ;; the operands, and we know that the use of the condition code is
912 ;; either GEU or LTU, so we can use the carry flag from the addition
913 ;; instead of doing the compare a second time.
914 (define_insn "*addsi3_compare_op1"
915 [(set (reg:CC_C CC_REGNUM)
917 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
918 (match_operand:SI 2 "arm_add_operand" "rI,L"))
920 (set (match_operand:SI 0 "s_register_operand" "=r,r")
921 (plus:SI (match_dup 1) (match_dup 2)))]
925 sub%.\\t%0, %1, #%n2"
926 [(set_attr "conds" "set")]
929 (define_insn "*addsi3_compare_op2"
930 [(set (reg:CC_C CC_REGNUM)
932 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
933 (match_operand:SI 2 "arm_add_operand" "rI,L"))
935 (set (match_operand:SI 0 "s_register_operand" "=r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
940 sub%.\\t%0, %1, #%n2"
941 [(set_attr "conds" "set")]
944 (define_insn "*compare_addsi2_op0"
945 [(set (reg:CC_C CC_REGNUM)
947 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
948 (match_operand:SI 1 "arm_add_operand" "rI,L"))
954 [(set_attr "conds" "set")
955 (set_attr "predicable" "yes")]
958 (define_insn "*compare_addsi2_op1"
959 [(set (reg:CC_C CC_REGNUM)
961 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
962 (match_operand:SI 1 "arm_add_operand" "rI,L"))
968 [(set_attr "conds" "set")
969 (set_attr "predicable" "yes")]
972 (define_insn "*addsi3_carryin_<optab>"
973 [(set (match_operand:SI 0 "s_register_operand" "=r")
974 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
975 (match_operand:SI 2 "arm_rhs_operand" "rI"))
976 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
979 [(set_attr "conds" "use")]
982 (define_insn "*addsi3_carryin_alt2_<optab>"
983 [(set (match_operand:SI 0 "s_register_operand" "=r")
984 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
985 (match_operand:SI 1 "s_register_operand" "%r"))
986 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
989 [(set_attr "conds" "use")]
992 (define_insn "*addsi3_carryin_shift_<optab>"
993 [(set (match_operand:SI 0 "s_register_operand" "=r")
995 (match_operator:SI 2 "shift_operator"
996 [(match_operand:SI 3 "s_register_operand" "r")
997 (match_operand:SI 4 "reg_or_int_operand" "rM")])
998 (match_operand:SI 1 "s_register_operand" "r"))
999 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1001 "adc%?\\t%0, %1, %3%S2"
1002 [(set_attr "conds" "use")
1003 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1004 (const_string "alu_shift")
1005 (const_string "alu_shift_reg")))]
1008 (define_insn "*addsi3_carryin_clobercc_<optab>"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1010 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1011 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1012 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1013 (clobber (reg:CC CC_REGNUM))]
1015 "adc%.\\t%0, %1, %2"
1016 [(set_attr "conds" "set")]
1019 (define_expand "incscc"
1020 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1021 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1022 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1023 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1028 (define_insn "*arm_incscc"
1029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1030 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1031 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1032 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1036 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1037 [(set_attr "conds" "use")
1038 (set_attr "length" "4,8")]
1041 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1043 [(set (match_operand:SI 0 "s_register_operand" "")
1044 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1045 (match_operand:SI 2 "s_register_operand" ""))
1047 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1049 [(set (match_dup 3) (match_dup 1))
1050 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1052 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1055 (define_expand "addsf3"
1056 [(set (match_operand:SF 0 "s_register_operand" "")
1057 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1058 (match_operand:SF 2 "arm_float_add_operand" "")))]
1059 "TARGET_32BIT && TARGET_HARD_FLOAT"
1062 && !cirrus_fp_register (operands[2], SFmode))
1063 operands[2] = force_reg (SFmode, operands[2]);
1066 (define_expand "adddf3"
1067 [(set (match_operand:DF 0 "s_register_operand" "")
1068 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1069 (match_operand:DF 2 "arm_float_add_operand" "")))]
1070 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1073 && !cirrus_fp_register (operands[2], DFmode))
1074 operands[2] = force_reg (DFmode, operands[2]);
1077 (define_expand "subdi3"
1079 [(set (match_operand:DI 0 "s_register_operand" "")
1080 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1081 (match_operand:DI 2 "s_register_operand" "")))
1082 (clobber (reg:CC CC_REGNUM))])]
1085 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1087 && cirrus_fp_register (operands[0], DImode)
1088 && cirrus_fp_register (operands[1], DImode))
1090 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1096 if (GET_CODE (operands[1]) != REG)
1097 operands[1] = force_reg (DImode, operands[1]);
1098 if (GET_CODE (operands[2]) != REG)
1099 operands[2] = force_reg (DImode, operands[2]);
1104 (define_insn "*arm_subdi3"
1105 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1106 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1107 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1108 (clobber (reg:CC CC_REGNUM))]
1109 "TARGET_32BIT && !TARGET_NEON"
1110 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1111 [(set_attr "conds" "clob")
1112 (set_attr "length" "8")]
1115 (define_insn "*thumb_subdi3"
1116 [(set (match_operand:DI 0 "register_operand" "=l")
1117 (minus:DI (match_operand:DI 1 "register_operand" "0")
1118 (match_operand:DI 2 "register_operand" "l")))
1119 (clobber (reg:CC CC_REGNUM))]
1121 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1122 [(set_attr "length" "4")]
1125 (define_insn "*subdi_di_zesidi"
1126 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1127 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1129 (match_operand:SI 2 "s_register_operand" "r,r"))))
1130 (clobber (reg:CC CC_REGNUM))]
1132 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1133 [(set_attr "conds" "clob")
1134 (set_attr "length" "8")]
1137 (define_insn "*subdi_di_sesidi"
1138 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1139 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1141 (match_operand:SI 2 "s_register_operand" "r,r"))))
1142 (clobber (reg:CC CC_REGNUM))]
1144 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1145 [(set_attr "conds" "clob")
1146 (set_attr "length" "8")]
1149 (define_insn "*subdi_zesidi_di"
1150 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1151 (minus:DI (zero_extend:DI
1152 (match_operand:SI 2 "s_register_operand" "r,r"))
1153 (match_operand:DI 1 "s_register_operand" "0,r")))
1154 (clobber (reg:CC CC_REGNUM))]
1156 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1157 [(set_attr "conds" "clob")
1158 (set_attr "length" "8")]
1161 (define_insn "*subdi_sesidi_di"
1162 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1163 (minus:DI (sign_extend:DI
1164 (match_operand:SI 2 "s_register_operand" "r,r"))
1165 (match_operand:DI 1 "s_register_operand" "0,r")))
1166 (clobber (reg:CC CC_REGNUM))]
1168 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1169 [(set_attr "conds" "clob")
1170 (set_attr "length" "8")]
1173 (define_insn "*subdi_zesidi_zesidi"
1174 [(set (match_operand:DI 0 "s_register_operand" "=r")
1175 (minus:DI (zero_extend:DI
1176 (match_operand:SI 1 "s_register_operand" "r"))
1178 (match_operand:SI 2 "s_register_operand" "r"))))
1179 (clobber (reg:CC CC_REGNUM))]
1181 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1182 [(set_attr "conds" "clob")
1183 (set_attr "length" "8")]
1186 (define_expand "subsi3"
1187 [(set (match_operand:SI 0 "s_register_operand" "")
1188 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1189 (match_operand:SI 2 "s_register_operand" "")))]
1192 if (GET_CODE (operands[1]) == CONST_INT)
1196 arm_split_constant (MINUS, SImode, NULL_RTX,
1197 INTVAL (operands[1]), operands[0],
1198 operands[2], optimize && can_create_pseudo_p ());
1201 else /* TARGET_THUMB1 */
1202 operands[1] = force_reg (SImode, operands[1]);
1207 (define_insn "thumb1_subsi3_insn"
1208 [(set (match_operand:SI 0 "register_operand" "=l")
1209 (minus:SI (match_operand:SI 1 "register_operand" "l")
1210 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1213 [(set_attr "length" "2")
1214 (set_attr "conds" "set")])
1216 ; ??? Check Thumb-2 split length
1217 (define_insn_and_split "*arm_subsi3_insn"
1218 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r")
1219 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n")
1220 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r")))]
1227 "&& (GET_CODE (operands[1]) == CONST_INT
1228 && !const_ok_for_arm (INTVAL (operands[1])))"
1229 [(clobber (const_int 0))]
1231 arm_split_constant (MINUS, SImode, curr_insn,
1232 INTVAL (operands[1]), operands[0], operands[2], 0);
1235 [(set_attr "length" "4,4,4,16")
1236 (set_attr "predicable" "yes")]
1240 [(match_scratch:SI 3 "r")
1241 (set (match_operand:SI 0 "arm_general_register_operand" "")
1242 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1243 (match_operand:SI 2 "arm_general_register_operand" "")))]
1245 && !const_ok_for_arm (INTVAL (operands[1]))
1246 && const_ok_for_arm (~INTVAL (operands[1]))"
1247 [(set (match_dup 3) (match_dup 1))
1248 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1252 (define_insn "*subsi3_compare0"
1253 [(set (reg:CC_NOOV CC_REGNUM)
1255 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1256 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1258 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1259 (minus:SI (match_dup 1) (match_dup 2)))]
1264 [(set_attr "conds" "set")]
1267 (define_insn "*subsi3_compare"
1268 [(set (reg:CC CC_REGNUM)
1269 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1270 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1271 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1272 (minus:SI (match_dup 1) (match_dup 2)))]
1277 [(set_attr "conds" "set")]
1280 (define_expand "decscc"
1281 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1282 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1283 (match_operator:SI 2 "arm_comparison_operator"
1284 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1289 (define_insn "*arm_decscc"
1290 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1291 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1292 (match_operator:SI 2 "arm_comparison_operator"
1293 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1297 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1298 [(set_attr "conds" "use")
1299 (set_attr "length" "*,8")]
1302 (define_expand "subsf3"
1303 [(set (match_operand:SF 0 "s_register_operand" "")
1304 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1305 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1306 "TARGET_32BIT && TARGET_HARD_FLOAT"
1308 if (TARGET_MAVERICK)
1310 if (!cirrus_fp_register (operands[1], SFmode))
1311 operands[1] = force_reg (SFmode, operands[1]);
1312 if (!cirrus_fp_register (operands[2], SFmode))
1313 operands[2] = force_reg (SFmode, operands[2]);
1317 (define_expand "subdf3"
1318 [(set (match_operand:DF 0 "s_register_operand" "")
1319 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1320 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1321 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1323 if (TARGET_MAVERICK)
1325 if (!cirrus_fp_register (operands[1], DFmode))
1326 operands[1] = force_reg (DFmode, operands[1]);
1327 if (!cirrus_fp_register (operands[2], DFmode))
1328 operands[2] = force_reg (DFmode, operands[2]);
1333 ;; Multiplication insns
1335 (define_expand "mulsi3"
1336 [(set (match_operand:SI 0 "s_register_operand" "")
1337 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1338 (match_operand:SI 1 "s_register_operand" "")))]
1343 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1344 (define_insn "*arm_mulsi3"
1345 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1346 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1347 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1348 "TARGET_32BIT && !arm_arch6"
1349 "mul%?\\t%0, %2, %1"
1350 [(set_attr "insn" "mul")
1351 (set_attr "predicable" "yes")]
1354 (define_insn "*arm_mulsi3_v6"
1355 [(set (match_operand:SI 0 "s_register_operand" "=r")
1356 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1357 (match_operand:SI 2 "s_register_operand" "r")))]
1358 "TARGET_32BIT && arm_arch6"
1359 "mul%?\\t%0, %1, %2"
1360 [(set_attr "insn" "mul")
1361 (set_attr "predicable" "yes")]
1364 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1365 ; 1 and 2; are the same, because reload will make operand 0 match
1366 ; operand 1 without realizing that this conflicts with operand 2. We fix
1367 ; this by adding another alternative to match this case, and then `reload'
1368 ; it ourselves. This alternative must come first.
1369 (define_insn "*thumb_mulsi3"
1370 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1371 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1372 (match_operand:SI 2 "register_operand" "l,l,l")))]
1373 "TARGET_THUMB1 && !arm_arch6"
1375 if (which_alternative < 2)
1376 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1378 return \"mul\\t%0, %2\";
1380 [(set_attr "length" "4,4,2")
1381 (set_attr "insn" "mul")]
1384 (define_insn "*thumb_mulsi3_v6"
1385 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1386 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1387 (match_operand:SI 2 "register_operand" "l,0,0")))]
1388 "TARGET_THUMB1 && arm_arch6"
1393 [(set_attr "length" "2")
1394 (set_attr "insn" "mul")]
1397 (define_insn "*mulsi3_compare0"
1398 [(set (reg:CC_NOOV CC_REGNUM)
1399 (compare:CC_NOOV (mult:SI
1400 (match_operand:SI 2 "s_register_operand" "r,r")
1401 (match_operand:SI 1 "s_register_operand" "%0,r"))
1403 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1404 (mult:SI (match_dup 2) (match_dup 1)))]
1405 "TARGET_ARM && !arm_arch6"
1406 "mul%.\\t%0, %2, %1"
1407 [(set_attr "conds" "set")
1408 (set_attr "insn" "muls")]
1411 (define_insn "*mulsi3_compare0_v6"
1412 [(set (reg:CC_NOOV CC_REGNUM)
1413 (compare:CC_NOOV (mult:SI
1414 (match_operand:SI 2 "s_register_operand" "r")
1415 (match_operand:SI 1 "s_register_operand" "r"))
1417 (set (match_operand:SI 0 "s_register_operand" "=r")
1418 (mult:SI (match_dup 2) (match_dup 1)))]
1419 "TARGET_ARM && arm_arch6 && optimize_size"
1420 "mul%.\\t%0, %2, %1"
1421 [(set_attr "conds" "set")
1422 (set_attr "insn" "muls")]
1425 (define_insn "*mulsi_compare0_scratch"
1426 [(set (reg:CC_NOOV CC_REGNUM)
1427 (compare:CC_NOOV (mult:SI
1428 (match_operand:SI 2 "s_register_operand" "r,r")
1429 (match_operand:SI 1 "s_register_operand" "%0,r"))
1431 (clobber (match_scratch:SI 0 "=&r,&r"))]
1432 "TARGET_ARM && !arm_arch6"
1433 "mul%.\\t%0, %2, %1"
1434 [(set_attr "conds" "set")
1435 (set_attr "insn" "muls")]
1438 (define_insn "*mulsi_compare0_scratch_v6"
1439 [(set (reg:CC_NOOV CC_REGNUM)
1440 (compare:CC_NOOV (mult:SI
1441 (match_operand:SI 2 "s_register_operand" "r")
1442 (match_operand:SI 1 "s_register_operand" "r"))
1444 (clobber (match_scratch:SI 0 "=r"))]
1445 "TARGET_ARM && arm_arch6 && optimize_size"
1446 "mul%.\\t%0, %2, %1"
1447 [(set_attr "conds" "set")
1448 (set_attr "insn" "muls")]
1451 ;; Unnamed templates to match MLA instruction.
1453 (define_insn "*mulsi3addsi"
1454 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1456 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1457 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1458 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1459 "TARGET_32BIT && !arm_arch6"
1460 "mla%?\\t%0, %2, %1, %3"
1461 [(set_attr "insn" "mla")
1462 (set_attr "predicable" "yes")]
1465 (define_insn "*mulsi3addsi_v6"
1466 [(set (match_operand:SI 0 "s_register_operand" "=r")
1468 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1469 (match_operand:SI 1 "s_register_operand" "r"))
1470 (match_operand:SI 3 "s_register_operand" "r")))]
1471 "TARGET_32BIT && arm_arch6"
1472 "mla%?\\t%0, %2, %1, %3"
1473 [(set_attr "insn" "mla")
1474 (set_attr "predicable" "yes")]
1477 (define_insn "*mulsi3addsi_compare0"
1478 [(set (reg:CC_NOOV CC_REGNUM)
1481 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1482 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1483 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1485 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1486 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1488 "TARGET_ARM && arm_arch6"
1489 "mla%.\\t%0, %2, %1, %3"
1490 [(set_attr "conds" "set")
1491 (set_attr "insn" "mlas")]
1494 (define_insn "*mulsi3addsi_compare0_v6"
1495 [(set (reg:CC_NOOV CC_REGNUM)
1498 (match_operand:SI 2 "s_register_operand" "r")
1499 (match_operand:SI 1 "s_register_operand" "r"))
1500 (match_operand:SI 3 "s_register_operand" "r"))
1502 (set (match_operand:SI 0 "s_register_operand" "=r")
1503 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1505 "TARGET_ARM && arm_arch6 && optimize_size"
1506 "mla%.\\t%0, %2, %1, %3"
1507 [(set_attr "conds" "set")
1508 (set_attr "insn" "mlas")]
1511 (define_insn "*mulsi3addsi_compare0_scratch"
1512 [(set (reg:CC_NOOV CC_REGNUM)
1515 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1516 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1517 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1519 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1520 "TARGET_ARM && !arm_arch6"
1521 "mla%.\\t%0, %2, %1, %3"
1522 [(set_attr "conds" "set")
1523 (set_attr "insn" "mlas")]
1526 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1527 [(set (reg:CC_NOOV CC_REGNUM)
1530 (match_operand:SI 2 "s_register_operand" "r")
1531 (match_operand:SI 1 "s_register_operand" "r"))
1532 (match_operand:SI 3 "s_register_operand" "r"))
1534 (clobber (match_scratch:SI 0 "=r"))]
1535 "TARGET_ARM && arm_arch6 && optimize_size"
1536 "mla%.\\t%0, %2, %1, %3"
1537 [(set_attr "conds" "set")
1538 (set_attr "insn" "mlas")]
1541 (define_insn "*mulsi3subsi"
1542 [(set (match_operand:SI 0 "s_register_operand" "=r")
1544 (match_operand:SI 3 "s_register_operand" "r")
1545 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1546 (match_operand:SI 1 "s_register_operand" "r"))))]
1547 "TARGET_32BIT && arm_arch_thumb2"
1548 "mls%?\\t%0, %2, %1, %3"
1549 [(set_attr "insn" "mla")
1550 (set_attr "predicable" "yes")]
1553 (define_expand "maddsidi4"
1554 [(set (match_operand:DI 0 "s_register_operand" "")
1557 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1558 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1559 (match_operand:DI 3 "s_register_operand" "")))]
1560 "TARGET_32BIT && arm_arch3m"
1563 (define_insn "*mulsidi3adddi"
1564 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1567 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1568 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1569 (match_operand:DI 1 "s_register_operand" "0")))]
1570 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1571 "smlal%?\\t%Q0, %R0, %3, %2"
1572 [(set_attr "insn" "smlal")
1573 (set_attr "predicable" "yes")]
1576 (define_insn "*mulsidi3adddi_v6"
1577 [(set (match_operand:DI 0 "s_register_operand" "=r")
1580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1581 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1582 (match_operand:DI 1 "s_register_operand" "0")))]
1583 "TARGET_32BIT && arm_arch6"
1584 "smlal%?\\t%Q0, %R0, %3, %2"
1585 [(set_attr "insn" "smlal")
1586 (set_attr "predicable" "yes")]
1589 ;; 32x32->64 widening multiply.
1590 ;; As with mulsi3, the only difference between the v3-5 and v6+
1591 ;; versions of these patterns is the requirement that the output not
1592 ;; overlap the inputs, but that still means we have to have a named
1593 ;; expander and two different starred insns.
1595 (define_expand "mulsidi3"
1596 [(set (match_operand:DI 0 "s_register_operand" "")
1598 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1599 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1600 "TARGET_32BIT && arm_arch3m"
1604 (define_insn "*mulsidi3_nov6"
1605 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1607 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1608 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1609 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1610 "smull%?\\t%Q0, %R0, %1, %2"
1611 [(set_attr "insn" "smull")
1612 (set_attr "predicable" "yes")]
1615 (define_insn "*mulsidi3_v6"
1616 [(set (match_operand:DI 0 "s_register_operand" "=r")
1618 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1619 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1620 "TARGET_32BIT && arm_arch6"
1621 "smull%?\\t%Q0, %R0, %1, %2"
1622 [(set_attr "insn" "smull")
1623 (set_attr "predicable" "yes")]
1626 (define_expand "umulsidi3"
1627 [(set (match_operand:DI 0 "s_register_operand" "")
1629 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1630 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1631 "TARGET_32BIT && arm_arch3m"
1635 (define_insn "*umulsidi3_nov6"
1636 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1638 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1639 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1640 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1641 "umull%?\\t%Q0, %R0, %1, %2"
1642 [(set_attr "insn" "umull")
1643 (set_attr "predicable" "yes")]
1646 (define_insn "*umulsidi3_v6"
1647 [(set (match_operand:DI 0 "s_register_operand" "=r")
1649 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1650 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1651 "TARGET_32BIT && arm_arch6"
1652 "umull%?\\t%Q0, %R0, %1, %2"
1653 [(set_attr "insn" "umull")
1654 (set_attr "predicable" "yes")]
1657 (define_expand "umaddsidi4"
1658 [(set (match_operand:DI 0 "s_register_operand" "")
1661 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1662 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1663 (match_operand:DI 3 "s_register_operand" "")))]
1664 "TARGET_32BIT && arm_arch3m"
1667 (define_insn "*umulsidi3adddi"
1668 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1671 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1672 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1673 (match_operand:DI 1 "s_register_operand" "0")))]
1674 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1675 "umlal%?\\t%Q0, %R0, %3, %2"
1676 [(set_attr "insn" "umlal")
1677 (set_attr "predicable" "yes")]
1680 (define_insn "*umulsidi3adddi_v6"
1681 [(set (match_operand:DI 0 "s_register_operand" "=r")
1684 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1685 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1686 (match_operand:DI 1 "s_register_operand" "0")))]
1687 "TARGET_32BIT && arm_arch6"
1688 "umlal%?\\t%Q0, %R0, %3, %2"
1689 [(set_attr "insn" "umlal")
1690 (set_attr "predicable" "yes")]
1693 (define_expand "smulsi3_highpart"
1695 [(set (match_operand:SI 0 "s_register_operand" "")
1699 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1700 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1702 (clobber (match_scratch:SI 3 ""))])]
1703 "TARGET_32BIT && arm_arch3m"
1707 (define_insn "*smulsi3_highpart_nov6"
1708 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1712 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1713 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1715 (clobber (match_scratch:SI 3 "=&r,&r"))]
1716 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1717 "smull%?\\t%3, %0, %2, %1"
1718 [(set_attr "insn" "smull")
1719 (set_attr "predicable" "yes")]
1722 (define_insn "*smulsi3_highpart_v6"
1723 [(set (match_operand:SI 0 "s_register_operand" "=r")
1727 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1728 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1730 (clobber (match_scratch:SI 3 "=r"))]
1731 "TARGET_32BIT && arm_arch6"
1732 "smull%?\\t%3, %0, %2, %1"
1733 [(set_attr "insn" "smull")
1734 (set_attr "predicable" "yes")]
1737 (define_expand "umulsi3_highpart"
1739 [(set (match_operand:SI 0 "s_register_operand" "")
1743 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1744 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1746 (clobber (match_scratch:SI 3 ""))])]
1747 "TARGET_32BIT && arm_arch3m"
1751 (define_insn "*umulsi3_highpart_nov6"
1752 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1756 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1757 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1759 (clobber (match_scratch:SI 3 "=&r,&r"))]
1760 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1761 "umull%?\\t%3, %0, %2, %1"
1762 [(set_attr "insn" "umull")
1763 (set_attr "predicable" "yes")]
1766 (define_insn "*umulsi3_highpart_v6"
1767 [(set (match_operand:SI 0 "s_register_operand" "=r")
1771 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1772 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1774 (clobber (match_scratch:SI 3 "=r"))]
1775 "TARGET_32BIT && arm_arch6"
1776 "umull%?\\t%3, %0, %2, %1"
1777 [(set_attr "insn" "umull")
1778 (set_attr "predicable" "yes")]
1781 (define_insn "mulhisi3"
1782 [(set (match_operand:SI 0 "s_register_operand" "=r")
1783 (mult:SI (sign_extend:SI
1784 (match_operand:HI 1 "s_register_operand" "%r"))
1786 (match_operand:HI 2 "s_register_operand" "r"))))]
1787 "TARGET_DSP_MULTIPLY"
1788 "smulbb%?\\t%0, %1, %2"
1789 [(set_attr "insn" "smulxy")
1790 (set_attr "predicable" "yes")]
1793 (define_insn "*mulhisi3tb"
1794 [(set (match_operand:SI 0 "s_register_operand" "=r")
1795 (mult:SI (ashiftrt:SI
1796 (match_operand:SI 1 "s_register_operand" "r")
1799 (match_operand:HI 2 "s_register_operand" "r"))))]
1800 "TARGET_DSP_MULTIPLY"
1801 "smultb%?\\t%0, %1, %2"
1802 [(set_attr "insn" "smulxy")
1803 (set_attr "predicable" "yes")]
1806 (define_insn "*mulhisi3bt"
1807 [(set (match_operand:SI 0 "s_register_operand" "=r")
1808 (mult:SI (sign_extend:SI
1809 (match_operand:HI 1 "s_register_operand" "r"))
1811 (match_operand:SI 2 "s_register_operand" "r")
1813 "TARGET_DSP_MULTIPLY"
1814 "smulbt%?\\t%0, %1, %2"
1815 [(set_attr "insn" "smulxy")
1816 (set_attr "predicable" "yes")]
1819 (define_insn "*mulhisi3tt"
1820 [(set (match_operand:SI 0 "s_register_operand" "=r")
1821 (mult:SI (ashiftrt:SI
1822 (match_operand:SI 1 "s_register_operand" "r")
1825 (match_operand:SI 2 "s_register_operand" "r")
1827 "TARGET_DSP_MULTIPLY"
1828 "smultt%?\\t%0, %1, %2"
1829 [(set_attr "insn" "smulxy")
1830 (set_attr "predicable" "yes")]
1833 (define_insn "maddhisi4"
1834 [(set (match_operand:SI 0 "s_register_operand" "=r")
1835 (plus:SI (mult:SI (sign_extend:SI
1836 (match_operand:HI 1 "s_register_operand" "r"))
1838 (match_operand:HI 2 "s_register_operand" "r")))
1839 (match_operand:SI 3 "s_register_operand" "r")))]
1840 "TARGET_DSP_MULTIPLY"
1841 "smlabb%?\\t%0, %1, %2, %3"
1842 [(set_attr "insn" "smlaxy")
1843 (set_attr "predicable" "yes")]
1846 ;; Note: there is no maddhisi4ibt because this one is canonical form
1847 (define_insn "*maddhisi4tb"
1848 [(set (match_operand:SI 0 "s_register_operand" "=r")
1849 (plus:SI (mult:SI (ashiftrt:SI
1850 (match_operand:SI 1 "s_register_operand" "r")
1853 (match_operand:HI 2 "s_register_operand" "r")))
1854 (match_operand:SI 3 "s_register_operand" "r")))]
1855 "TARGET_DSP_MULTIPLY"
1856 "smlatb%?\\t%0, %1, %2, %3"
1857 [(set_attr "insn" "smlaxy")
1858 (set_attr "predicable" "yes")]
1861 (define_insn "*maddhisi4tt"
1862 [(set (match_operand:SI 0 "s_register_operand" "=r")
1863 (plus:SI (mult:SI (ashiftrt:SI
1864 (match_operand:SI 1 "s_register_operand" "r")
1867 (match_operand:SI 2 "s_register_operand" "r")
1869 (match_operand:SI 3 "s_register_operand" "r")))]
1870 "TARGET_DSP_MULTIPLY"
1871 "smlatt%?\\t%0, %1, %2, %3"
1872 [(set_attr "insn" "smlaxy")
1873 (set_attr "predicable" "yes")]
1876 (define_insn "maddhidi4"
1877 [(set (match_operand:DI 0 "s_register_operand" "=r")
1879 (mult:DI (sign_extend:DI
1880 (match_operand:HI 1 "s_register_operand" "r"))
1882 (match_operand:HI 2 "s_register_operand" "r")))
1883 (match_operand:DI 3 "s_register_operand" "0")))]
1884 "TARGET_DSP_MULTIPLY"
1885 "smlalbb%?\\t%Q0, %R0, %1, %2"
1886 [(set_attr "insn" "smlalxy")
1887 (set_attr "predicable" "yes")])
1889 ;; Note: there is no maddhidi4ibt because this one is canonical form
1890 (define_insn "*maddhidi4tb"
1891 [(set (match_operand:DI 0 "s_register_operand" "=r")
1893 (mult:DI (sign_extend:DI
1895 (match_operand:SI 1 "s_register_operand" "r")
1898 (match_operand:HI 2 "s_register_operand" "r")))
1899 (match_operand:DI 3 "s_register_operand" "0")))]
1900 "TARGET_DSP_MULTIPLY"
1901 "smlaltb%?\\t%Q0, %R0, %1, %2"
1902 [(set_attr "insn" "smlalxy")
1903 (set_attr "predicable" "yes")])
1905 (define_insn "*maddhidi4tt"
1906 [(set (match_operand:DI 0 "s_register_operand" "=r")
1908 (mult:DI (sign_extend:DI
1910 (match_operand:SI 1 "s_register_operand" "r")
1914 (match_operand:SI 2 "s_register_operand" "r")
1916 (match_operand:DI 3 "s_register_operand" "0")))]
1917 "TARGET_DSP_MULTIPLY"
1918 "smlaltt%?\\t%Q0, %R0, %1, %2"
1919 [(set_attr "insn" "smlalxy")
1920 (set_attr "predicable" "yes")])
1922 (define_expand "mulsf3"
1923 [(set (match_operand:SF 0 "s_register_operand" "")
1924 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1925 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1926 "TARGET_32BIT && TARGET_HARD_FLOAT"
1929 && !cirrus_fp_register (operands[2], SFmode))
1930 operands[2] = force_reg (SFmode, operands[2]);
1933 (define_expand "muldf3"
1934 [(set (match_operand:DF 0 "s_register_operand" "")
1935 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1936 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1937 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1940 && !cirrus_fp_register (operands[2], DFmode))
1941 operands[2] = force_reg (DFmode, operands[2]);
1946 (define_expand "divsf3"
1947 [(set (match_operand:SF 0 "s_register_operand" "")
1948 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1949 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1950 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1953 (define_expand "divdf3"
1954 [(set (match_operand:DF 0 "s_register_operand" "")
1955 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1956 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1957 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1962 (define_expand "modsf3"
1963 [(set (match_operand:SF 0 "s_register_operand" "")
1964 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1965 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1966 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1969 (define_expand "moddf3"
1970 [(set (match_operand:DF 0 "s_register_operand" "")
1971 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1972 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1973 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1976 ;; Boolean and,ior,xor insns
1978 ;; Split up double word logical operations
1980 ;; Split up simple DImode logical operations. Simply perform the logical
1981 ;; operation on the upper and lower halves of the registers.
1983 [(set (match_operand:DI 0 "s_register_operand" "")
1984 (match_operator:DI 6 "logical_binary_operator"
1985 [(match_operand:DI 1 "s_register_operand" "")
1986 (match_operand:DI 2 "s_register_operand" "")]))]
1987 "TARGET_32BIT && reload_completed
1988 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1989 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1990 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1991 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1994 operands[3] = gen_highpart (SImode, operands[0]);
1995 operands[0] = gen_lowpart (SImode, operands[0]);
1996 operands[4] = gen_highpart (SImode, operands[1]);
1997 operands[1] = gen_lowpart (SImode, operands[1]);
1998 operands[5] = gen_highpart (SImode, operands[2]);
1999 operands[2] = gen_lowpart (SImode, operands[2]);
2004 [(set (match_operand:DI 0 "s_register_operand" "")
2005 (match_operator:DI 6 "logical_binary_operator"
2006 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2007 (match_operand:DI 1 "s_register_operand" "")]))]
2008 "TARGET_32BIT && reload_completed"
2009 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2010 (set (match_dup 3) (match_op_dup:SI 6
2011 [(ashiftrt:SI (match_dup 2) (const_int 31))
2015 operands[3] = gen_highpart (SImode, operands[0]);
2016 operands[0] = gen_lowpart (SImode, operands[0]);
2017 operands[4] = gen_highpart (SImode, operands[1]);
2018 operands[1] = gen_lowpart (SImode, operands[1]);
2019 operands[5] = gen_highpart (SImode, operands[2]);
2020 operands[2] = gen_lowpart (SImode, operands[2]);
2024 ;; The zero extend of operand 2 means we can just copy the high part of
2025 ;; operand1 into operand0.
2027 [(set (match_operand:DI 0 "s_register_operand" "")
2029 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2030 (match_operand:DI 1 "s_register_operand" "")))]
2031 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2032 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2033 (set (match_dup 3) (match_dup 4))]
2036 operands[4] = gen_highpart (SImode, operands[1]);
2037 operands[3] = gen_highpart (SImode, operands[0]);
2038 operands[0] = gen_lowpart (SImode, operands[0]);
2039 operands[1] = gen_lowpart (SImode, operands[1]);
2043 ;; The zero extend of operand 2 means we can just copy the high part of
2044 ;; operand1 into operand0.
2046 [(set (match_operand:DI 0 "s_register_operand" "")
2048 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2049 (match_operand:DI 1 "s_register_operand" "")))]
2050 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2051 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2052 (set (match_dup 3) (match_dup 4))]
2055 operands[4] = gen_highpart (SImode, operands[1]);
2056 operands[3] = gen_highpart (SImode, operands[0]);
2057 operands[0] = gen_lowpart (SImode, operands[0]);
2058 operands[1] = gen_lowpart (SImode, operands[1]);
2062 (define_expand "anddi3"
2063 [(set (match_operand:DI 0 "s_register_operand" "")
2064 (and:DI (match_operand:DI 1 "s_register_operand" "")
2065 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2070 (define_insn "*anddi3_insn"
2071 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2072 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2073 (match_operand:DI 2 "s_register_operand" "r,r")))]
2074 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2076 [(set_attr "length" "8")]
2079 (define_insn_and_split "*anddi_zesidi_di"
2080 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2081 (and:DI (zero_extend:DI
2082 (match_operand:SI 2 "s_register_operand" "r,r"))
2083 (match_operand:DI 1 "s_register_operand" "0,r")))]
2086 "TARGET_32BIT && reload_completed"
2087 ; The zero extend of operand 2 clears the high word of the output
2089 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2090 (set (match_dup 3) (const_int 0))]
2093 operands[3] = gen_highpart (SImode, operands[0]);
2094 operands[0] = gen_lowpart (SImode, operands[0]);
2095 operands[1] = gen_lowpart (SImode, operands[1]);
2097 [(set_attr "length" "8")]
2100 (define_insn "*anddi_sesdi_di"
2101 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2102 (and:DI (sign_extend:DI
2103 (match_operand:SI 2 "s_register_operand" "r,r"))
2104 (match_operand:DI 1 "s_register_operand" "0,r")))]
2107 [(set_attr "length" "8")]
2110 (define_expand "andsi3"
2111 [(set (match_operand:SI 0 "s_register_operand" "")
2112 (and:SI (match_operand:SI 1 "s_register_operand" "")
2113 (match_operand:SI 2 "reg_or_int_operand" "")))]
2118 if (GET_CODE (operands[2]) == CONST_INT)
2120 if (INTVAL (operands[2]) == 255 && arm_arch6)
2122 operands[1] = convert_to_mode (QImode, operands[1], 1);
2123 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2127 arm_split_constant (AND, SImode, NULL_RTX,
2128 INTVAL (operands[2]), operands[0],
2130 optimize && can_create_pseudo_p ());
2135 else /* TARGET_THUMB1 */
2137 if (GET_CODE (operands[2]) != CONST_INT)
2139 rtx tmp = force_reg (SImode, operands[2]);
2140 if (rtx_equal_p (operands[0], operands[1]))
2144 operands[2] = operands[1];
2152 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2154 operands[2] = force_reg (SImode,
2155 GEN_INT (~INTVAL (operands[2])));
2157 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2162 for (i = 9; i <= 31; i++)
2164 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2166 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2170 else if ((((HOST_WIDE_INT) 1) << i) - 1
2171 == ~INTVAL (operands[2]))
2173 rtx shift = GEN_INT (i);
2174 rtx reg = gen_reg_rtx (SImode);
2176 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2177 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2183 operands[2] = force_reg (SImode, operands[2]);
2189 ; ??? Check split length for Thumb-2
2190 (define_insn_and_split "*arm_andsi3_insn"
2191 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2192 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2193 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2197 bic%?\\t%0, %1, #%B2
2200 && GET_CODE (operands[2]) == CONST_INT
2201 && !(const_ok_for_arm (INTVAL (operands[2]))
2202 || const_ok_for_arm (~INTVAL (operands[2])))"
2203 [(clobber (const_int 0))]
2205 arm_split_constant (AND, SImode, curr_insn,
2206 INTVAL (operands[2]), operands[0], operands[1], 0);
2209 [(set_attr "length" "4,4,16")
2210 (set_attr "predicable" "yes")]
2213 (define_insn "*thumb1_andsi3_insn"
2214 [(set (match_operand:SI 0 "register_operand" "=l")
2215 (and:SI (match_operand:SI 1 "register_operand" "%0")
2216 (match_operand:SI 2 "register_operand" "l")))]
2219 [(set_attr "length" "2")
2220 (set_attr "conds" "set")])
2222 (define_insn "*andsi3_compare0"
2223 [(set (reg:CC_NOOV CC_REGNUM)
2225 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2226 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2228 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2229 (and:SI (match_dup 1) (match_dup 2)))]
2233 bic%.\\t%0, %1, #%B2"
2234 [(set_attr "conds" "set")]
2237 (define_insn "*andsi3_compare0_scratch"
2238 [(set (reg:CC_NOOV CC_REGNUM)
2240 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2241 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2243 (clobber (match_scratch:SI 2 "=X,r"))]
2247 bic%.\\t%2, %0, #%B1"
2248 [(set_attr "conds" "set")]
2251 (define_insn "*zeroextractsi_compare0_scratch"
2252 [(set (reg:CC_NOOV CC_REGNUM)
2253 (compare:CC_NOOV (zero_extract:SI
2254 (match_operand:SI 0 "s_register_operand" "r")
2255 (match_operand 1 "const_int_operand" "n")
2256 (match_operand 2 "const_int_operand" "n"))
2259 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2260 && INTVAL (operands[1]) > 0
2261 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2262 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2264 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2265 << INTVAL (operands[2]));
2266 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2269 [(set_attr "conds" "set")
2270 (set_attr "predicable" "yes")]
2273 (define_insn_and_split "*ne_zeroextractsi"
2274 [(set (match_operand:SI 0 "s_register_operand" "=r")
2275 (ne:SI (zero_extract:SI
2276 (match_operand:SI 1 "s_register_operand" "r")
2277 (match_operand:SI 2 "const_int_operand" "n")
2278 (match_operand:SI 3 "const_int_operand" "n"))
2280 (clobber (reg:CC CC_REGNUM))]
2282 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2283 && INTVAL (operands[2]) > 0
2284 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2285 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2288 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2289 && INTVAL (operands[2]) > 0
2290 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2291 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2292 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2293 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2295 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2297 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2298 (match_dup 0) (const_int 1)))]
2300 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2301 << INTVAL (operands[3]));
2303 [(set_attr "conds" "clob")
2304 (set (attr "length")
2305 (if_then_else (eq_attr "is_thumb" "yes")
2310 (define_insn_and_split "*ne_zeroextractsi_shifted"
2311 [(set (match_operand:SI 0 "s_register_operand" "=r")
2312 (ne:SI (zero_extract:SI
2313 (match_operand:SI 1 "s_register_operand" "r")
2314 (match_operand:SI 2 "const_int_operand" "n")
2317 (clobber (reg:CC CC_REGNUM))]
2321 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2322 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2324 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2326 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2327 (match_dup 0) (const_int 1)))]
2329 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2331 [(set_attr "conds" "clob")
2332 (set_attr "length" "8")]
2335 (define_insn_and_split "*ite_ne_zeroextractsi"
2336 [(set (match_operand:SI 0 "s_register_operand" "=r")
2337 (if_then_else:SI (ne (zero_extract:SI
2338 (match_operand:SI 1 "s_register_operand" "r")
2339 (match_operand:SI 2 "const_int_operand" "n")
2340 (match_operand:SI 3 "const_int_operand" "n"))
2342 (match_operand:SI 4 "arm_not_operand" "rIK")
2344 (clobber (reg:CC CC_REGNUM))]
2346 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2347 && INTVAL (operands[2]) > 0
2348 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2349 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2350 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2353 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2354 && INTVAL (operands[2]) > 0
2355 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2356 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2357 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2358 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2359 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2361 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2363 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2364 (match_dup 0) (match_dup 4)))]
2366 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2367 << INTVAL (operands[3]));
2369 [(set_attr "conds" "clob")
2370 (set_attr "length" "8")]
2373 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2374 [(set (match_operand:SI 0 "s_register_operand" "=r")
2375 (if_then_else:SI (ne (zero_extract:SI
2376 (match_operand:SI 1 "s_register_operand" "r")
2377 (match_operand:SI 2 "const_int_operand" "n")
2380 (match_operand:SI 3 "arm_not_operand" "rIK")
2382 (clobber (reg:CC CC_REGNUM))]
2383 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2385 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2386 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2387 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2389 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2391 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2392 (match_dup 0) (match_dup 3)))]
2394 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2396 [(set_attr "conds" "clob")
2397 (set_attr "length" "8")]
2401 [(set (match_operand:SI 0 "s_register_operand" "")
2402 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2403 (match_operand:SI 2 "const_int_operand" "")
2404 (match_operand:SI 3 "const_int_operand" "")))
2405 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2407 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2408 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2410 HOST_WIDE_INT temp = INTVAL (operands[2]);
2412 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2413 operands[3] = GEN_INT (32 - temp);
2417 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2419 [(set (match_operand:SI 0 "s_register_operand" "")
2420 (match_operator:SI 1 "shiftable_operator"
2421 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2422 (match_operand:SI 3 "const_int_operand" "")
2423 (match_operand:SI 4 "const_int_operand" ""))
2424 (match_operand:SI 5 "s_register_operand" "")]))
2425 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2427 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2430 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2433 HOST_WIDE_INT temp = INTVAL (operands[3]);
2435 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2436 operands[4] = GEN_INT (32 - temp);
2441 [(set (match_operand:SI 0 "s_register_operand" "")
2442 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2443 (match_operand:SI 2 "const_int_operand" "")
2444 (match_operand:SI 3 "const_int_operand" "")))]
2446 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2447 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2449 HOST_WIDE_INT temp = INTVAL (operands[2]);
2451 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2452 operands[3] = GEN_INT (32 - temp);
2457 [(set (match_operand:SI 0 "s_register_operand" "")
2458 (match_operator:SI 1 "shiftable_operator"
2459 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2460 (match_operand:SI 3 "const_int_operand" "")
2461 (match_operand:SI 4 "const_int_operand" ""))
2462 (match_operand:SI 5 "s_register_operand" "")]))
2463 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2465 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2468 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2471 HOST_WIDE_INT temp = INTVAL (operands[3]);
2473 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2474 operands[4] = GEN_INT (32 - temp);
2478 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2479 ;;; represented by the bitfield, then this will produce incorrect results.
2480 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2481 ;;; which have a real bit-field insert instruction, the truncation happens
2482 ;;; in the bit-field insert instruction itself. Since arm does not have a
2483 ;;; bit-field insert instruction, we would have to emit code here to truncate
2484 ;;; the value before we insert. This loses some of the advantage of having
2485 ;;; this insv pattern, so this pattern needs to be reevalutated.
2487 (define_expand "insv"
2488 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2489 (match_operand 1 "general_operand" "")
2490 (match_operand 2 "general_operand" ""))
2491 (match_operand 3 "reg_or_int_operand" ""))]
2492 "TARGET_ARM || arm_arch_thumb2"
2495 int start_bit = INTVAL (operands[2]);
2496 int width = INTVAL (operands[1]);
2497 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2498 rtx target, subtarget;
2500 if (arm_arch_thumb2)
2502 if (unaligned_access && MEM_P (operands[0])
2503 && s_register_operand (operands[3], GET_MODE (operands[3]))
2504 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2508 if (BYTES_BIG_ENDIAN)
2509 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2514 base_addr = adjust_address (operands[0], SImode,
2515 start_bit / BITS_PER_UNIT);
2516 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2520 rtx tmp = gen_reg_rtx (HImode);
2522 base_addr = adjust_address (operands[0], HImode,
2523 start_bit / BITS_PER_UNIT);
2524 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2525 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2529 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2531 bool use_bfi = TRUE;
2533 if (GET_CODE (operands[3]) == CONST_INT)
2535 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2539 emit_insn (gen_insv_zero (operands[0], operands[1],
2544 /* See if the set can be done with a single orr instruction. */
2545 if (val == mask && const_ok_for_arm (val << start_bit))
2551 if (GET_CODE (operands[3]) != REG)
2552 operands[3] = force_reg (SImode, operands[3]);
2554 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2563 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2566 target = copy_rtx (operands[0]);
2567 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2568 subreg as the final target. */
2569 if (GET_CODE (target) == SUBREG)
2571 subtarget = gen_reg_rtx (SImode);
2572 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2573 < GET_MODE_SIZE (SImode))
2574 target = SUBREG_REG (target);
2579 if (GET_CODE (operands[3]) == CONST_INT)
2581 /* Since we are inserting a known constant, we may be able to
2582 reduce the number of bits that we have to clear so that
2583 the mask becomes simple. */
2584 /* ??? This code does not check to see if the new mask is actually
2585 simpler. It may not be. */
2586 rtx op1 = gen_reg_rtx (SImode);
2587 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2588 start of this pattern. */
2589 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2590 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2592 emit_insn (gen_andsi3 (op1, operands[0],
2593 gen_int_mode (~mask2, SImode)));
2594 emit_insn (gen_iorsi3 (subtarget, op1,
2595 gen_int_mode (op3_value << start_bit, SImode)));
2597 else if (start_bit == 0
2598 && !(const_ok_for_arm (mask)
2599 || const_ok_for_arm (~mask)))
2601 /* A Trick, since we are setting the bottom bits in the word,
2602 we can shift operand[3] up, operand[0] down, OR them together
2603 and rotate the result back again. This takes 3 insns, and
2604 the third might be mergeable into another op. */
2605 /* The shift up copes with the possibility that operand[3] is
2606 wider than the bitfield. */
2607 rtx op0 = gen_reg_rtx (SImode);
2608 rtx op1 = gen_reg_rtx (SImode);
2610 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2611 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2612 emit_insn (gen_iorsi3 (op1, op1, op0));
2613 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2615 else if ((width + start_bit == 32)
2616 && !(const_ok_for_arm (mask)
2617 || const_ok_for_arm (~mask)))
2619 /* Similar trick, but slightly less efficient. */
2621 rtx op0 = gen_reg_rtx (SImode);
2622 rtx op1 = gen_reg_rtx (SImode);
2624 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2625 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2626 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2627 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2631 rtx op0 = gen_int_mode (mask, SImode);
2632 rtx op1 = gen_reg_rtx (SImode);
2633 rtx op2 = gen_reg_rtx (SImode);
2635 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2637 rtx tmp = gen_reg_rtx (SImode);
2639 emit_insn (gen_movsi (tmp, op0));
2643 /* Mask out any bits in operand[3] that are not needed. */
2644 emit_insn (gen_andsi3 (op1, operands[3], op0));
2646 if (GET_CODE (op0) == CONST_INT
2647 && (const_ok_for_arm (mask << start_bit)
2648 || const_ok_for_arm (~(mask << start_bit))))
2650 op0 = gen_int_mode (~(mask << start_bit), SImode);
2651 emit_insn (gen_andsi3 (op2, operands[0], op0));
2655 if (GET_CODE (op0) == CONST_INT)
2657 rtx tmp = gen_reg_rtx (SImode);
2659 emit_insn (gen_movsi (tmp, op0));
2664 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2666 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2670 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2672 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2675 if (subtarget != target)
2677 /* If TARGET is still a SUBREG, then it must be wider than a word,
2678 so we must be careful only to set the subword we were asked to. */
2679 if (GET_CODE (target) == SUBREG)
2680 emit_move_insn (target, subtarget);
2682 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2689 (define_insn "insv_zero"
2690 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2691 (match_operand:SI 1 "const_int_operand" "M")
2692 (match_operand:SI 2 "const_int_operand" "M"))
2696 [(set_attr "length" "4")
2697 (set_attr "predicable" "yes")]
2700 (define_insn "insv_t2"
2701 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2702 (match_operand:SI 1 "const_int_operand" "M")
2703 (match_operand:SI 2 "const_int_operand" "M"))
2704 (match_operand:SI 3 "s_register_operand" "r"))]
2706 "bfi%?\t%0, %3, %2, %1"
2707 [(set_attr "length" "4")
2708 (set_attr "predicable" "yes")]
2711 ; constants for op 2 will never be given to these patterns.
2712 (define_insn_and_split "*anddi_notdi_di"
2713 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2714 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2715 (match_operand:DI 2 "s_register_operand" "r,0")))]
2718 "TARGET_32BIT && reload_completed
2719 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2720 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2721 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2722 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2725 operands[3] = gen_highpart (SImode, operands[0]);
2726 operands[0] = gen_lowpart (SImode, operands[0]);
2727 operands[4] = gen_highpart (SImode, operands[1]);
2728 operands[1] = gen_lowpart (SImode, operands[1]);
2729 operands[5] = gen_highpart (SImode, operands[2]);
2730 operands[2] = gen_lowpart (SImode, operands[2]);
2732 [(set_attr "length" "8")
2733 (set_attr "predicable" "yes")]
2736 (define_insn_and_split "*anddi_notzesidi_di"
2737 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2738 (and:DI (not:DI (zero_extend:DI
2739 (match_operand:SI 2 "s_register_operand" "r,r")))
2740 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2743 bic%?\\t%Q0, %Q1, %2
2745 ; (not (zero_extend ...)) allows us to just copy the high word from
2746 ; operand1 to operand0.
2749 && operands[0] != operands[1]"
2750 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2751 (set (match_dup 3) (match_dup 4))]
2754 operands[3] = gen_highpart (SImode, operands[0]);
2755 operands[0] = gen_lowpart (SImode, operands[0]);
2756 operands[4] = gen_highpart (SImode, operands[1]);
2757 operands[1] = gen_lowpart (SImode, operands[1]);
2759 [(set_attr "length" "4,8")
2760 (set_attr "predicable" "yes")]
2763 (define_insn_and_split "*anddi_notsesidi_di"
2764 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2765 (and:DI (not:DI (sign_extend:DI
2766 (match_operand:SI 2 "s_register_operand" "r,r")))
2767 (match_operand:DI 1 "s_register_operand" "0,r")))]
2770 "TARGET_32BIT && reload_completed"
2771 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2772 (set (match_dup 3) (and:SI (not:SI
2773 (ashiftrt:SI (match_dup 2) (const_int 31)))
2777 operands[3] = gen_highpart (SImode, operands[0]);
2778 operands[0] = gen_lowpart (SImode, operands[0]);
2779 operands[4] = gen_highpart (SImode, operands[1]);
2780 operands[1] = gen_lowpart (SImode, operands[1]);
2782 [(set_attr "length" "8")
2783 (set_attr "predicable" "yes")]
2786 (define_insn "andsi_notsi_si"
2787 [(set (match_operand:SI 0 "s_register_operand" "=r")
2788 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2789 (match_operand:SI 1 "s_register_operand" "r")))]
2791 "bic%?\\t%0, %1, %2"
2792 [(set_attr "predicable" "yes")]
2795 (define_insn "thumb1_bicsi3"
2796 [(set (match_operand:SI 0 "register_operand" "=l")
2797 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2798 (match_operand:SI 2 "register_operand" "0")))]
2801 [(set_attr "length" "2")
2802 (set_attr "conds" "set")])
2804 (define_insn "andsi_not_shiftsi_si"
2805 [(set (match_operand:SI 0 "s_register_operand" "=r")
2806 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2807 [(match_operand:SI 2 "s_register_operand" "r")
2808 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2809 (match_operand:SI 1 "s_register_operand" "r")))]
2811 "bic%?\\t%0, %1, %2%S4"
2812 [(set_attr "predicable" "yes")
2813 (set_attr "shift" "2")
2814 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2815 (const_string "alu_shift")
2816 (const_string "alu_shift_reg")))]
2819 (define_insn "*andsi_notsi_si_compare0"
2820 [(set (reg:CC_NOOV CC_REGNUM)
2822 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2823 (match_operand:SI 1 "s_register_operand" "r"))
2825 (set (match_operand:SI 0 "s_register_operand" "=r")
2826 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2828 "bic%.\\t%0, %1, %2"
2829 [(set_attr "conds" "set")]
2832 (define_insn "*andsi_notsi_si_compare0_scratch"
2833 [(set (reg:CC_NOOV CC_REGNUM)
2835 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2836 (match_operand:SI 1 "s_register_operand" "r"))
2838 (clobber (match_scratch:SI 0 "=r"))]
2840 "bic%.\\t%0, %1, %2"
2841 [(set_attr "conds" "set")]
2844 (define_expand "iordi3"
2845 [(set (match_operand:DI 0 "s_register_operand" "")
2846 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2847 (match_operand:DI 2 "neon_logic_op2" "")))]
2852 (define_insn "*iordi3_insn"
2853 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2854 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2855 (match_operand:DI 2 "s_register_operand" "r,r")))]
2856 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2858 [(set_attr "length" "8")
2859 (set_attr "predicable" "yes")]
2862 (define_insn "*iordi_zesidi_di"
2863 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2864 (ior:DI (zero_extend:DI
2865 (match_operand:SI 2 "s_register_operand" "r,r"))
2866 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2869 orr%?\\t%Q0, %Q1, %2
2871 [(set_attr "length" "4,8")
2872 (set_attr "predicable" "yes")]
2875 (define_insn "*iordi_sesidi_di"
2876 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2877 (ior:DI (sign_extend:DI
2878 (match_operand:SI 2 "s_register_operand" "r,r"))
2879 (match_operand:DI 1 "s_register_operand" "0,r")))]
2882 [(set_attr "length" "8")
2883 (set_attr "predicable" "yes")]
2886 (define_expand "iorsi3"
2887 [(set (match_operand:SI 0 "s_register_operand" "")
2888 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2889 (match_operand:SI 2 "reg_or_int_operand" "")))]
2892 if (GET_CODE (operands[2]) == CONST_INT)
2896 arm_split_constant (IOR, SImode, NULL_RTX,
2897 INTVAL (operands[2]), operands[0], operands[1],
2898 optimize && can_create_pseudo_p ());
2901 else /* TARGET_THUMB1 */
2903 rtx tmp = force_reg (SImode, operands[2]);
2904 if (rtx_equal_p (operands[0], operands[1]))
2908 operands[2] = operands[1];
2916 (define_insn_and_split "*iorsi3_insn"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2918 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2919 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2923 orn%?\\t%0, %1, #%B2
2926 && GET_CODE (operands[2]) == CONST_INT
2927 && !(const_ok_for_arm (INTVAL (operands[2]))
2928 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2929 [(clobber (const_int 0))]
2931 arm_split_constant (IOR, SImode, curr_insn,
2932 INTVAL (operands[2]), operands[0], operands[1], 0);
2935 [(set_attr "length" "4,4,16")
2936 (set_attr "arch" "32,t2,32")
2937 (set_attr "predicable" "yes")])
2939 (define_insn "*thumb1_iorsi3_insn"
2940 [(set (match_operand:SI 0 "register_operand" "=l")
2941 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2942 (match_operand:SI 2 "register_operand" "l")))]
2945 [(set_attr "length" "2")
2946 (set_attr "conds" "set")])
2949 [(match_scratch:SI 3 "r")
2950 (set (match_operand:SI 0 "arm_general_register_operand" "")
2951 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2952 (match_operand:SI 2 "const_int_operand" "")))]
2954 && !const_ok_for_arm (INTVAL (operands[2]))
2955 && const_ok_for_arm (~INTVAL (operands[2]))"
2956 [(set (match_dup 3) (match_dup 2))
2957 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2961 (define_insn "*iorsi3_compare0"
2962 [(set (reg:CC_NOOV CC_REGNUM)
2963 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2964 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2966 (set (match_operand:SI 0 "s_register_operand" "=r")
2967 (ior:SI (match_dup 1) (match_dup 2)))]
2969 "orr%.\\t%0, %1, %2"
2970 [(set_attr "conds" "set")]
2973 (define_insn "*iorsi3_compare0_scratch"
2974 [(set (reg:CC_NOOV CC_REGNUM)
2975 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2976 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2978 (clobber (match_scratch:SI 0 "=r"))]
2980 "orr%.\\t%0, %1, %2"
2981 [(set_attr "conds" "set")]
2984 (define_expand "xordi3"
2985 [(set (match_operand:DI 0 "s_register_operand" "")
2986 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2987 (match_operand:DI 2 "s_register_operand" "")))]
2992 (define_insn "*xordi3_insn"
2993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2994 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2995 (match_operand:DI 2 "s_register_operand" "r,r")))]
2996 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2998 [(set_attr "length" "8")
2999 (set_attr "predicable" "yes")]
3002 (define_insn "*xordi_zesidi_di"
3003 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3004 (xor:DI (zero_extend:DI
3005 (match_operand:SI 2 "s_register_operand" "r,r"))
3006 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3009 eor%?\\t%Q0, %Q1, %2
3011 [(set_attr "length" "4,8")
3012 (set_attr "predicable" "yes")]
3015 (define_insn "*xordi_sesidi_di"
3016 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3017 (xor:DI (sign_extend:DI
3018 (match_operand:SI 2 "s_register_operand" "r,r"))
3019 (match_operand:DI 1 "s_register_operand" "0,r")))]
3022 [(set_attr "length" "8")
3023 (set_attr "predicable" "yes")]
3026 (define_expand "xorsi3"
3027 [(set (match_operand:SI 0 "s_register_operand" "")
3028 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3029 (match_operand:SI 2 "reg_or_int_operand" "")))]
3031 "if (GET_CODE (operands[2]) == CONST_INT)
3035 arm_split_constant (XOR, SImode, NULL_RTX,
3036 INTVAL (operands[2]), operands[0], operands[1],
3037 optimize && can_create_pseudo_p ());
3040 else /* TARGET_THUMB1 */
3042 rtx tmp = force_reg (SImode, operands[2]);
3043 if (rtx_equal_p (operands[0], operands[1]))
3047 operands[2] = operands[1];
3054 (define_insn_and_split "*arm_xorsi3"
3055 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3056 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3057 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
3063 && GET_CODE (operands[2]) == CONST_INT
3064 && !const_ok_for_arm (INTVAL (operands[2]))"
3065 [(clobber (const_int 0))]
3067 arm_split_constant (XOR, SImode, curr_insn,
3068 INTVAL (operands[2]), operands[0], operands[1], 0);
3071 [(set_attr "length" "4,16")
3072 (set_attr "predicable" "yes")]
3075 (define_insn "*thumb1_xorsi3_insn"
3076 [(set (match_operand:SI 0 "register_operand" "=l")
3077 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3078 (match_operand:SI 2 "register_operand" "l")))]
3081 [(set_attr "length" "2")
3082 (set_attr "conds" "set")])
3084 (define_insn "*xorsi3_compare0"
3085 [(set (reg:CC_NOOV CC_REGNUM)
3086 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3087 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3089 (set (match_operand:SI 0 "s_register_operand" "=r")
3090 (xor:SI (match_dup 1) (match_dup 2)))]
3092 "eor%.\\t%0, %1, %2"
3093 [(set_attr "conds" "set")]
3096 (define_insn "*xorsi3_compare0_scratch"
3097 [(set (reg:CC_NOOV CC_REGNUM)
3098 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3099 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3103 [(set_attr "conds" "set")]
3106 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3107 ; (NOT D) we can sometimes merge the final NOT into one of the following
3111 [(set (match_operand:SI 0 "s_register_operand" "")
3112 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3113 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3114 (match_operand:SI 3 "arm_rhs_operand" "")))
3115 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3117 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3118 (not:SI (match_dup 3))))
3119 (set (match_dup 0) (not:SI (match_dup 4)))]
3123 (define_insn "*andsi_iorsi3_notsi"
3124 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3125 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3126 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3127 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3129 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3130 [(set_attr "length" "8")
3131 (set_attr "ce_count" "2")
3132 (set_attr "predicable" "yes")]
3135 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3136 ; insns are available?
3138 [(set (match_operand:SI 0 "s_register_operand" "")
3139 (match_operator:SI 1 "logical_binary_operator"
3140 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3141 (match_operand:SI 3 "const_int_operand" "")
3142 (match_operand:SI 4 "const_int_operand" ""))
3143 (match_operator:SI 9 "logical_binary_operator"
3144 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3145 (match_operand:SI 6 "const_int_operand" ""))
3146 (match_operand:SI 7 "s_register_operand" "")])]))
3147 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3149 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3150 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3153 [(ashift:SI (match_dup 2) (match_dup 4))
3157 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3160 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3164 [(set (match_operand:SI 0 "s_register_operand" "")
3165 (match_operator:SI 1 "logical_binary_operator"
3166 [(match_operator:SI 9 "logical_binary_operator"
3167 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3168 (match_operand:SI 6 "const_int_operand" ""))
3169 (match_operand:SI 7 "s_register_operand" "")])
3170 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3171 (match_operand:SI 3 "const_int_operand" "")
3172 (match_operand:SI 4 "const_int_operand" ""))]))
3173 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3175 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3176 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3179 [(ashift:SI (match_dup 2) (match_dup 4))
3183 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3186 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3190 [(set (match_operand:SI 0 "s_register_operand" "")
3191 (match_operator:SI 1 "logical_binary_operator"
3192 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3193 (match_operand:SI 3 "const_int_operand" "")
3194 (match_operand:SI 4 "const_int_operand" ""))
3195 (match_operator:SI 9 "logical_binary_operator"
3196 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3197 (match_operand:SI 6 "const_int_operand" ""))
3198 (match_operand:SI 7 "s_register_operand" "")])]))
3199 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3201 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3202 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3205 [(ashift:SI (match_dup 2) (match_dup 4))
3209 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3212 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3216 [(set (match_operand:SI 0 "s_register_operand" "")
3217 (match_operator:SI 1 "logical_binary_operator"
3218 [(match_operator:SI 9 "logical_binary_operator"
3219 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3220 (match_operand:SI 6 "const_int_operand" ""))
3221 (match_operand:SI 7 "s_register_operand" "")])
3222 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3223 (match_operand:SI 3 "const_int_operand" "")
3224 (match_operand:SI 4 "const_int_operand" ""))]))
3225 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3227 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3228 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3231 [(ashift:SI (match_dup 2) (match_dup 4))
3235 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3238 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3242 ;; Minimum and maximum insns
3244 (define_expand "smaxsi3"
3246 (set (match_operand:SI 0 "s_register_operand" "")
3247 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3248 (match_operand:SI 2 "arm_rhs_operand" "")))
3249 (clobber (reg:CC CC_REGNUM))])]
3252 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3254 /* No need for a clobber of the condition code register here. */
3255 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3256 gen_rtx_SMAX (SImode, operands[1],
3262 (define_insn "*smax_0"
3263 [(set (match_operand:SI 0 "s_register_operand" "=r")
3264 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3267 "bic%?\\t%0, %1, %1, asr #31"
3268 [(set_attr "predicable" "yes")]
3271 (define_insn "*smax_m1"
3272 [(set (match_operand:SI 0 "s_register_operand" "=r")
3273 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3276 "orr%?\\t%0, %1, %1, asr #31"
3277 [(set_attr "predicable" "yes")]
3280 (define_insn "*arm_smax_insn"
3281 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3282 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3283 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3284 (clobber (reg:CC CC_REGNUM))]
3287 cmp\\t%1, %2\;movlt\\t%0, %2
3288 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3289 [(set_attr "conds" "clob")
3290 (set_attr "length" "8,12")]
3293 (define_expand "sminsi3"
3295 (set (match_operand:SI 0 "s_register_operand" "")
3296 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3297 (match_operand:SI 2 "arm_rhs_operand" "")))
3298 (clobber (reg:CC CC_REGNUM))])]
3301 if (operands[2] == const0_rtx)
3303 /* No need for a clobber of the condition code register here. */
3304 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3305 gen_rtx_SMIN (SImode, operands[1],
3311 (define_insn "*smin_0"
3312 [(set (match_operand:SI 0 "s_register_operand" "=r")
3313 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3316 "and%?\\t%0, %1, %1, asr #31"
3317 [(set_attr "predicable" "yes")]
3320 (define_insn "*arm_smin_insn"
3321 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3322 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3323 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3324 (clobber (reg:CC CC_REGNUM))]
3327 cmp\\t%1, %2\;movge\\t%0, %2
3328 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3329 [(set_attr "conds" "clob")
3330 (set_attr "length" "8,12")]
3333 (define_expand "umaxsi3"
3335 (set (match_operand:SI 0 "s_register_operand" "")
3336 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3337 (match_operand:SI 2 "arm_rhs_operand" "")))
3338 (clobber (reg:CC CC_REGNUM))])]
3343 (define_insn "*arm_umaxsi3"
3344 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3345 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3346 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3347 (clobber (reg:CC CC_REGNUM))]
3350 cmp\\t%1, %2\;movcc\\t%0, %2
3351 cmp\\t%1, %2\;movcs\\t%0, %1
3352 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3353 [(set_attr "conds" "clob")
3354 (set_attr "length" "8,8,12")]
3357 (define_expand "uminsi3"
3359 (set (match_operand:SI 0 "s_register_operand" "")
3360 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3361 (match_operand:SI 2 "arm_rhs_operand" "")))
3362 (clobber (reg:CC CC_REGNUM))])]
3367 (define_insn "*arm_uminsi3"
3368 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3369 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3370 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3371 (clobber (reg:CC CC_REGNUM))]
3374 cmp\\t%1, %2\;movcs\\t%0, %2
3375 cmp\\t%1, %2\;movcc\\t%0, %1
3376 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3377 [(set_attr "conds" "clob")
3378 (set_attr "length" "8,8,12")]
3381 (define_insn "*store_minmaxsi"
3382 [(set (match_operand:SI 0 "memory_operand" "=m")
3383 (match_operator:SI 3 "minmax_operator"
3384 [(match_operand:SI 1 "s_register_operand" "r")
3385 (match_operand:SI 2 "s_register_operand" "r")]))
3386 (clobber (reg:CC CC_REGNUM))]
3389 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3390 operands[1], operands[2]);
3391 output_asm_insn (\"cmp\\t%1, %2\", operands);
3393 output_asm_insn (\"ite\t%d3\", operands);
3394 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3395 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3398 [(set_attr "conds" "clob")
3399 (set (attr "length")
3400 (if_then_else (eq_attr "is_thumb" "yes")
3403 (set_attr "type" "store1")]
3406 ; Reject the frame pointer in operand[1], since reloading this after
3407 ; it has been eliminated can cause carnage.
3408 (define_insn "*minmax_arithsi"
3409 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3410 (match_operator:SI 4 "shiftable_operator"
3411 [(match_operator:SI 5 "minmax_operator"
3412 [(match_operand:SI 2 "s_register_operand" "r,r")
3413 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3414 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3415 (clobber (reg:CC CC_REGNUM))]
3416 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3419 enum rtx_code code = GET_CODE (operands[4]);
3422 if (which_alternative != 0 || operands[3] != const0_rtx
3423 || (code != PLUS && code != IOR && code != XOR))
3428 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3429 operands[2], operands[3]);
3430 output_asm_insn (\"cmp\\t%2, %3\", operands);
3434 output_asm_insn (\"ite\\t%d5\", operands);
3436 output_asm_insn (\"it\\t%d5\", operands);
3438 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3440 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3443 [(set_attr "conds" "clob")
3444 (set (attr "length")
3445 (if_then_else (eq_attr "is_thumb" "yes")
3451 ;; Shift and rotation insns
3453 (define_expand "ashldi3"
3454 [(set (match_operand:DI 0 "s_register_operand" "")
3455 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3456 (match_operand:SI 2 "reg_or_int_operand" "")))]
3459 if (GET_CODE (operands[2]) == CONST_INT)
3461 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3463 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3466 /* Ideally we shouldn't fail here if we could know that operands[1]
3467 ends up already living in an iwmmxt register. Otherwise it's
3468 cheaper to have the alternate code being generated than moving
3469 values to iwmmxt regs and back. */
3472 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3477 (define_insn "arm_ashldi3_1bit"
3478 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3479 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3481 (clobber (reg:CC CC_REGNUM))]
3483 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3484 [(set_attr "conds" "clob")
3485 (set_attr "length" "8")]
3488 (define_expand "ashlsi3"
3489 [(set (match_operand:SI 0 "s_register_operand" "")
3490 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3491 (match_operand:SI 2 "arm_rhs_operand" "")))]
3494 if (GET_CODE (operands[2]) == CONST_INT
3495 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3497 emit_insn (gen_movsi (operands[0], const0_rtx));
3503 (define_insn "*thumb1_ashlsi3"
3504 [(set (match_operand:SI 0 "register_operand" "=l,l")
3505 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3506 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3509 [(set_attr "length" "2")
3510 (set_attr "conds" "set")])
3512 (define_expand "ashrdi3"
3513 [(set (match_operand:DI 0 "s_register_operand" "")
3514 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3515 (match_operand:SI 2 "reg_or_int_operand" "")))]
3518 if (GET_CODE (operands[2]) == CONST_INT)
3520 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3522 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3525 /* Ideally we shouldn't fail here if we could know that operands[1]
3526 ends up already living in an iwmmxt register. Otherwise it's
3527 cheaper to have the alternate code being generated than moving
3528 values to iwmmxt regs and back. */
3531 else if (!TARGET_REALLY_IWMMXT)
3536 (define_insn "arm_ashrdi3_1bit"
3537 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3538 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3540 (clobber (reg:CC CC_REGNUM))]
3542 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3543 [(set_attr "conds" "clob")
3544 (set_attr "insn" "mov")
3545 (set_attr "length" "8")]
3548 (define_expand "ashrsi3"
3549 [(set (match_operand:SI 0 "s_register_operand" "")
3550 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3551 (match_operand:SI 2 "arm_rhs_operand" "")))]
3554 if (GET_CODE (operands[2]) == CONST_INT
3555 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3556 operands[2] = GEN_INT (31);
3560 (define_insn "*thumb1_ashrsi3"
3561 [(set (match_operand:SI 0 "register_operand" "=l,l")
3562 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3563 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3566 [(set_attr "length" "2")
3567 (set_attr "conds" "set")])
3569 (define_expand "lshrdi3"
3570 [(set (match_operand:DI 0 "s_register_operand" "")
3571 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3572 (match_operand:SI 2 "reg_or_int_operand" "")))]
3575 if (GET_CODE (operands[2]) == CONST_INT)
3577 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3579 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3582 /* Ideally we shouldn't fail here if we could know that operands[1]
3583 ends up already living in an iwmmxt register. Otherwise it's
3584 cheaper to have the alternate code being generated than moving
3585 values to iwmmxt regs and back. */
3588 else if (!TARGET_REALLY_IWMMXT)
3593 (define_insn "arm_lshrdi3_1bit"
3594 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3595 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3597 (clobber (reg:CC CC_REGNUM))]
3599 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3600 [(set_attr "conds" "clob")
3601 (set_attr "insn" "mov")
3602 (set_attr "length" "8")]
3605 (define_expand "lshrsi3"
3606 [(set (match_operand:SI 0 "s_register_operand" "")
3607 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3608 (match_operand:SI 2 "arm_rhs_operand" "")))]
3611 if (GET_CODE (operands[2]) == CONST_INT
3612 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3614 emit_insn (gen_movsi (operands[0], const0_rtx));
3620 (define_insn "*thumb1_lshrsi3"
3621 [(set (match_operand:SI 0 "register_operand" "=l,l")
3622 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3623 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3626 [(set_attr "length" "2")
3627 (set_attr "conds" "set")])
3629 (define_expand "rotlsi3"
3630 [(set (match_operand:SI 0 "s_register_operand" "")
3631 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3632 (match_operand:SI 2 "reg_or_int_operand" "")))]
3635 if (GET_CODE (operands[2]) == CONST_INT)
3636 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3639 rtx reg = gen_reg_rtx (SImode);
3640 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3646 (define_expand "rotrsi3"
3647 [(set (match_operand:SI 0 "s_register_operand" "")
3648 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3649 (match_operand:SI 2 "arm_rhs_operand" "")))]
3654 if (GET_CODE (operands[2]) == CONST_INT
3655 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3656 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3658 else /* TARGET_THUMB1 */
3660 if (GET_CODE (operands [2]) == CONST_INT)
3661 operands [2] = force_reg (SImode, operands[2]);
3666 (define_insn "*thumb1_rotrsi3"
3667 [(set (match_operand:SI 0 "register_operand" "=l")
3668 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3669 (match_operand:SI 2 "register_operand" "l")))]
3672 [(set_attr "length" "2")]
3675 (define_insn "*arm_shiftsi3"
3676 [(set (match_operand:SI 0 "s_register_operand" "=r")
3677 (match_operator:SI 3 "shift_operator"
3678 [(match_operand:SI 1 "s_register_operand" "r")
3679 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3681 "* return arm_output_shift(operands, 0);"
3682 [(set_attr "predicable" "yes")
3683 (set_attr "shift" "1")
3684 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3685 (const_string "alu_shift")
3686 (const_string "alu_shift_reg")))]
3689 (define_insn "*shiftsi3_compare0"
3690 [(set (reg:CC_NOOV CC_REGNUM)
3691 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3692 [(match_operand:SI 1 "s_register_operand" "r")
3693 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3695 (set (match_operand:SI 0 "s_register_operand" "=r")
3696 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3698 "* return arm_output_shift(operands, 1);"
3699 [(set_attr "conds" "set")
3700 (set_attr "shift" "1")
3701 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3702 (const_string "alu_shift")
3703 (const_string "alu_shift_reg")))]
3706 (define_insn "*shiftsi3_compare0_scratch"
3707 [(set (reg:CC_NOOV CC_REGNUM)
3708 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3709 [(match_operand:SI 1 "s_register_operand" "r")
3710 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3712 (clobber (match_scratch:SI 0 "=r"))]
3714 "* return arm_output_shift(operands, 1);"
3715 [(set_attr "conds" "set")
3716 (set_attr "shift" "1")]
3719 (define_insn "*not_shiftsi"
3720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3721 (not:SI (match_operator:SI 3 "shift_operator"
3722 [(match_operand:SI 1 "s_register_operand" "r,r")
3723 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3726 [(set_attr "predicable" "yes")
3727 (set_attr "shift" "1")
3728 (set_attr "insn" "mvn")
3729 (set_attr "arch" "32,a")
3730 (set_attr "type" "alu_shift,alu_shift_reg")])
3732 (define_insn "*not_shiftsi_compare0"
3733 [(set (reg:CC_NOOV CC_REGNUM)
3735 (not:SI (match_operator:SI 3 "shift_operator"
3736 [(match_operand:SI 1 "s_register_operand" "r,r")
3737 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3739 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3740 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3743 [(set_attr "conds" "set")
3744 (set_attr "shift" "1")
3745 (set_attr "insn" "mvn")
3746 (set_attr "arch" "32,a")
3747 (set_attr "type" "alu_shift,alu_shift_reg")])
3749 (define_insn "*not_shiftsi_compare0_scratch"
3750 [(set (reg:CC_NOOV CC_REGNUM)
3752 (not:SI (match_operator:SI 3 "shift_operator"
3753 [(match_operand:SI 1 "s_register_operand" "r,r")
3754 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3756 (clobber (match_scratch:SI 0 "=r,r"))]
3759 [(set_attr "conds" "set")
3760 (set_attr "shift" "1")
3761 (set_attr "insn" "mvn")
3762 (set_attr "arch" "32,a")
3763 (set_attr "type" "alu_shift,alu_shift_reg")])
3765 ;; We don't really have extzv, but defining this using shifts helps
3766 ;; to reduce register pressure later on.
3768 (define_expand "extzv"
3769 [(set (match_operand 0 "s_register_operand" "")
3770 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3771 (match_operand 2 "const_int_operand" "")
3772 (match_operand 3 "const_int_operand" "")))]
3773 "TARGET_THUMB1 || arm_arch_thumb2"
3776 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3777 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3779 if (arm_arch_thumb2)
3781 HOST_WIDE_INT width = INTVAL (operands[2]);
3782 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3784 if (unaligned_access && MEM_P (operands[1])
3785 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3789 if (BYTES_BIG_ENDIAN)
3790 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3795 base_addr = adjust_address (operands[1], SImode,
3796 bitpos / BITS_PER_UNIT);
3797 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3801 rtx dest = operands[0];
3802 rtx tmp = gen_reg_rtx (SImode);
3804 /* We may get a paradoxical subreg here. Strip it off. */
3805 if (GET_CODE (dest) == SUBREG
3806 && GET_MODE (dest) == SImode
3807 && GET_MODE (SUBREG_REG (dest)) == HImode)
3808 dest = SUBREG_REG (dest);
3810 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3813 base_addr = adjust_address (operands[1], HImode,
3814 bitpos / BITS_PER_UNIT);
3815 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3816 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3820 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3822 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3830 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3833 operands[3] = GEN_INT (rshift);
3837 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3841 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3842 operands[3], gen_reg_rtx (SImode)));
3847 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3849 (define_expand "extzv_t1"
3850 [(set (match_operand:SI 4 "s_register_operand" "")
3851 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3852 (match_operand:SI 2 "const_int_operand" "")))
3853 (set (match_operand:SI 0 "s_register_operand" "")
3854 (lshiftrt:SI (match_dup 4)
3855 (match_operand:SI 3 "const_int_operand" "")))]
3859 (define_expand "extv"
3860 [(set (match_operand 0 "s_register_operand" "")
3861 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3862 (match_operand 2 "const_int_operand" "")
3863 (match_operand 3 "const_int_operand" "")))]
3866 HOST_WIDE_INT width = INTVAL (operands[2]);
3867 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3869 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3870 && (bitpos % BITS_PER_UNIT) == 0)
3874 if (BYTES_BIG_ENDIAN)
3875 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3879 base_addr = adjust_address (operands[1], SImode,
3880 bitpos / BITS_PER_UNIT);
3881 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3885 rtx dest = operands[0];
3886 rtx tmp = gen_reg_rtx (SImode);
3888 /* We may get a paradoxical subreg here. Strip it off. */
3889 if (GET_CODE (dest) == SUBREG
3890 && GET_MODE (dest) == SImode
3891 && GET_MODE (SUBREG_REG (dest)) == HImode)
3892 dest = SUBREG_REG (dest);
3894 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3897 base_addr = adjust_address (operands[1], HImode,
3898 bitpos / BITS_PER_UNIT);
3899 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3900 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3905 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3907 else if (GET_MODE (operands[0]) == SImode
3908 && GET_MODE (operands[1]) == SImode)
3910 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3918 ; Helper to expand register forms of extv with the proper modes.
3920 (define_expand "extv_regsi"
3921 [(set (match_operand:SI 0 "s_register_operand" "")
3922 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
3923 (match_operand 2 "const_int_operand" "")
3924 (match_operand 3 "const_int_operand" "")))]
3929 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3931 (define_insn "unaligned_loadsi"
3932 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3933 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
3934 UNSPEC_UNALIGNED_LOAD))]
3935 "unaligned_access && TARGET_32BIT"
3936 "ldr%?\t%0, %1\t@ unaligned"
3937 [(set_attr "arch" "t2,any")
3938 (set_attr "length" "2,4")
3939 (set_attr "predicable" "yes")
3940 (set_attr "type" "load1")])
3942 (define_insn "unaligned_loadhis"
3943 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3945 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3946 UNSPEC_UNALIGNED_LOAD)))]
3947 "unaligned_access && TARGET_32BIT"
3948 "ldr%(sh%)\t%0, %1\t@ unaligned"
3949 [(set_attr "arch" "t2,any")
3950 (set_attr "length" "2,4")
3951 (set_attr "predicable" "yes")
3952 (set_attr "type" "load_byte")])
3954 (define_insn "unaligned_loadhiu"
3955 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3957 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3958 UNSPEC_UNALIGNED_LOAD)))]
3959 "unaligned_access && TARGET_32BIT"
3960 "ldr%(h%)\t%0, %1\t@ unaligned"
3961 [(set_attr "arch" "t2,any")
3962 (set_attr "length" "2,4")
3963 (set_attr "predicable" "yes")
3964 (set_attr "type" "load_byte")])
3966 (define_insn "unaligned_storesi"
3967 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
3968 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
3969 UNSPEC_UNALIGNED_STORE))]
3970 "unaligned_access && TARGET_32BIT"
3971 "str%?\t%1, %0\t@ unaligned"
3972 [(set_attr "arch" "t2,any")
3973 (set_attr "length" "2,4")
3974 (set_attr "predicable" "yes")
3975 (set_attr "type" "store1")])
3977 (define_insn "unaligned_storehi"
3978 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
3979 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
3980 UNSPEC_UNALIGNED_STORE))]
3981 "unaligned_access && TARGET_32BIT"
3982 "str%(h%)\t%1, %0\t@ unaligned"
3983 [(set_attr "arch" "t2,any")
3984 (set_attr "length" "2,4")
3985 (set_attr "predicable" "yes")
3986 (set_attr "type" "store1")])
3988 (define_insn "*extv_reg"
3989 [(set (match_operand:SI 0 "s_register_operand" "=r")
3990 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3991 (match_operand:SI 2 "const_int_operand" "M")
3992 (match_operand:SI 3 "const_int_operand" "M")))]
3994 "sbfx%?\t%0, %1, %3, %2"
3995 [(set_attr "length" "4")
3996 (set_attr "predicable" "yes")]
3999 (define_insn "extzv_t2"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4002 (match_operand:SI 2 "const_int_operand" "M")
4003 (match_operand:SI 3 "const_int_operand" "M")))]
4005 "ubfx%?\t%0, %1, %3, %2"
4006 [(set_attr "length" "4")
4007 (set_attr "predicable" "yes")]
4011 ;; Division instructions
4012 (define_insn "divsi3"
4013 [(set (match_operand:SI 0 "s_register_operand" "=r")
4014 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4015 (match_operand:SI 2 "s_register_operand" "r")))]
4017 "sdiv%?\t%0, %1, %2"
4018 [(set_attr "predicable" "yes")
4019 (set_attr "insn" "sdiv")]
4022 (define_insn "udivsi3"
4023 [(set (match_operand:SI 0 "s_register_operand" "=r")
4024 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4025 (match_operand:SI 2 "s_register_operand" "r")))]
4027 "udiv%?\t%0, %1, %2"
4028 [(set_attr "predicable" "yes")
4029 (set_attr "insn" "udiv")]
4033 ;; Unary arithmetic insns
4035 (define_expand "negdi2"
4037 [(set (match_operand:DI 0 "s_register_operand" "")
4038 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4039 (clobber (reg:CC CC_REGNUM))])]
4044 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4045 ;; The first alternative allows the common case of a *full* overlap.
4046 (define_insn "*arm_negdi2"
4047 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4048 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4049 (clobber (reg:CC CC_REGNUM))]
4051 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4052 [(set_attr "conds" "clob")
4053 (set_attr "length" "8")]
4056 (define_insn "*thumb1_negdi2"
4057 [(set (match_operand:DI 0 "register_operand" "=&l")
4058 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4059 (clobber (reg:CC CC_REGNUM))]
4061 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4062 [(set_attr "length" "6")]
4065 (define_expand "negsi2"
4066 [(set (match_operand:SI 0 "s_register_operand" "")
4067 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4072 (define_insn "*arm_negsi2"
4073 [(set (match_operand:SI 0 "s_register_operand" "=r")
4074 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4076 "rsb%?\\t%0, %1, #0"
4077 [(set_attr "predicable" "yes")]
4080 (define_insn "*thumb1_negsi2"
4081 [(set (match_operand:SI 0 "register_operand" "=l")
4082 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4085 [(set_attr "length" "2")]
4088 (define_expand "negsf2"
4089 [(set (match_operand:SF 0 "s_register_operand" "")
4090 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4091 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4095 (define_expand "negdf2"
4096 [(set (match_operand:DF 0 "s_register_operand" "")
4097 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4098 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4101 ;; abssi2 doesn't really clobber the condition codes if a different register
4102 ;; is being set. To keep things simple, assume during rtl manipulations that
4103 ;; it does, but tell the final scan operator the truth. Similarly for
4106 (define_expand "abssi2"
4108 [(set (match_operand:SI 0 "s_register_operand" "")
4109 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4110 (clobber (match_dup 2))])]
4114 operands[2] = gen_rtx_SCRATCH (SImode);
4116 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4119 (define_insn "*arm_abssi2"
4120 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4121 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4122 (clobber (reg:CC CC_REGNUM))]
4125 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4126 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4127 [(set_attr "conds" "clob,*")
4128 (set_attr "shift" "1")
4129 ;; predicable can't be set based on the variant, so left as no
4130 (set_attr "length" "8")]
4133 (define_insn_and_split "*thumb1_abssi2"
4134 [(set (match_operand:SI 0 "s_register_operand" "=l")
4135 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4136 (clobber (match_scratch:SI 2 "=&l"))]
4139 "TARGET_THUMB1 && reload_completed"
4140 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4141 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4142 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4144 [(set_attr "length" "6")]
4147 (define_insn "*arm_neg_abssi2"
4148 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4149 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4150 (clobber (reg:CC CC_REGNUM))]
4153 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4154 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4155 [(set_attr "conds" "clob,*")
4156 (set_attr "shift" "1")
4157 ;; predicable can't be set based on the variant, so left as no
4158 (set_attr "length" "8")]
4161 (define_insn_and_split "*thumb1_neg_abssi2"
4162 [(set (match_operand:SI 0 "s_register_operand" "=l")
4163 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4164 (clobber (match_scratch:SI 2 "=&l"))]
4167 "TARGET_THUMB1 && reload_completed"
4168 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4169 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4170 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4172 [(set_attr "length" "6")]
4175 (define_expand "abssf2"
4176 [(set (match_operand:SF 0 "s_register_operand" "")
4177 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4178 "TARGET_32BIT && TARGET_HARD_FLOAT"
4181 (define_expand "absdf2"
4182 [(set (match_operand:DF 0 "s_register_operand" "")
4183 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4184 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4187 (define_expand "sqrtsf2"
4188 [(set (match_operand:SF 0 "s_register_operand" "")
4189 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4190 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4193 (define_expand "sqrtdf2"
4194 [(set (match_operand:DF 0 "s_register_operand" "")
4195 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4196 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4199 (define_insn_and_split "one_cmpldi2"
4200 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
4201 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
4204 "TARGET_32BIT && reload_completed"
4205 [(set (match_dup 0) (not:SI (match_dup 1)))
4206 (set (match_dup 2) (not:SI (match_dup 3)))]
4209 operands[2] = gen_highpart (SImode, operands[0]);
4210 operands[0] = gen_lowpart (SImode, operands[0]);
4211 operands[3] = gen_highpart (SImode, operands[1]);
4212 operands[1] = gen_lowpart (SImode, operands[1]);
4214 [(set_attr "length" "8")
4215 (set_attr "predicable" "yes")]
4218 (define_expand "one_cmplsi2"
4219 [(set (match_operand:SI 0 "s_register_operand" "")
4220 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4225 (define_insn "*arm_one_cmplsi2"
4226 [(set (match_operand:SI 0 "s_register_operand" "=r")
4227 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4230 [(set_attr "predicable" "yes")
4231 (set_attr "insn" "mvn")]
4234 (define_insn "*thumb1_one_cmplsi2"
4235 [(set (match_operand:SI 0 "register_operand" "=l")
4236 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4239 [(set_attr "length" "2")
4240 (set_attr "insn" "mvn")]
4243 (define_insn "*notsi_compare0"
4244 [(set (reg:CC_NOOV CC_REGNUM)
4245 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4247 (set (match_operand:SI 0 "s_register_operand" "=r")
4248 (not:SI (match_dup 1)))]
4251 [(set_attr "conds" "set")
4252 (set_attr "insn" "mvn")]
4255 (define_insn "*notsi_compare0_scratch"
4256 [(set (reg:CC_NOOV CC_REGNUM)
4257 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4259 (clobber (match_scratch:SI 0 "=r"))]
4262 [(set_attr "conds" "set")
4263 (set_attr "insn" "mvn")]
4266 ;; Fixed <--> Floating conversion insns
4268 (define_expand "floatsihf2"
4269 [(set (match_operand:HF 0 "general_operand" "")
4270 (float:HF (match_operand:SI 1 "general_operand" "")))]
4274 rtx op1 = gen_reg_rtx (SFmode);
4275 expand_float (op1, operands[1], 0);
4276 op1 = convert_to_mode (HFmode, op1, 0);
4277 emit_move_insn (operands[0], op1);
4282 (define_expand "floatdihf2"
4283 [(set (match_operand:HF 0 "general_operand" "")
4284 (float:HF (match_operand:DI 1 "general_operand" "")))]
4288 rtx op1 = gen_reg_rtx (SFmode);
4289 expand_float (op1, operands[1], 0);
4290 op1 = convert_to_mode (HFmode, op1, 0);
4291 emit_move_insn (operands[0], op1);
4296 (define_expand "floatsisf2"
4297 [(set (match_operand:SF 0 "s_register_operand" "")
4298 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4299 "TARGET_32BIT && TARGET_HARD_FLOAT"
4301 if (TARGET_MAVERICK)
4303 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4308 (define_expand "floatsidf2"
4309 [(set (match_operand:DF 0 "s_register_operand" "")
4310 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4311 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4313 if (TARGET_MAVERICK)
4315 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4320 (define_expand "fix_trunchfsi2"
4321 [(set (match_operand:SI 0 "general_operand" "")
4322 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4326 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4327 expand_fix (operands[0], op1, 0);
4332 (define_expand "fix_trunchfdi2"
4333 [(set (match_operand:DI 0 "general_operand" "")
4334 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4338 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4339 expand_fix (operands[0], op1, 0);
4344 (define_expand "fix_truncsfsi2"
4345 [(set (match_operand:SI 0 "s_register_operand" "")
4346 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4347 "TARGET_32BIT && TARGET_HARD_FLOAT"
4349 if (TARGET_MAVERICK)
4351 if (!cirrus_fp_register (operands[0], SImode))
4352 operands[0] = force_reg (SImode, operands[0]);
4353 if (!cirrus_fp_register (operands[1], SFmode))
4354 operands[1] = force_reg (SFmode, operands[0]);
4355 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4360 (define_expand "fix_truncdfsi2"
4361 [(set (match_operand:SI 0 "s_register_operand" "")
4362 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4363 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4365 if (TARGET_MAVERICK)
4367 if (!cirrus_fp_register (operands[1], DFmode))
4368 operands[1] = force_reg (DFmode, operands[0]);
4369 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4376 (define_expand "truncdfsf2"
4377 [(set (match_operand:SF 0 "s_register_operand" "")
4379 (match_operand:DF 1 "s_register_operand" "")))]
4380 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4384 /* DFmode -> HFmode conversions have to go through SFmode. */
4385 (define_expand "truncdfhf2"
4386 [(set (match_operand:HF 0 "general_operand" "")
4388 (match_operand:DF 1 "general_operand" "")))]
4393 op1 = convert_to_mode (SFmode, operands[1], 0);
4394 op1 = convert_to_mode (HFmode, op1, 0);
4395 emit_move_insn (operands[0], op1);
4400 ;; Zero and sign extension instructions.
4402 (define_insn "zero_extend<mode>di2"
4403 [(set (match_operand:DI 0 "s_register_operand" "=r")
4404 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4405 "<qhs_zextenddi_cstr>")))]
4406 "TARGET_32BIT <qhs_zextenddi_cond>"
4408 [(set_attr "length" "8")
4409 (set_attr "ce_count" "2")
4410 (set_attr "predicable" "yes")]
4413 (define_insn "extend<mode>di2"
4414 [(set (match_operand:DI 0 "s_register_operand" "=r")
4415 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4416 "<qhs_extenddi_cstr>")))]
4417 "TARGET_32BIT <qhs_sextenddi_cond>"
4419 [(set_attr "length" "8")
4420 (set_attr "ce_count" "2")
4421 (set_attr "shift" "1")
4422 (set_attr "predicable" "yes")]
4425 ;; Splits for all extensions to DImode
4427 [(set (match_operand:DI 0 "s_register_operand" "")
4428 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4430 [(set (match_dup 0) (match_dup 1))]
4432 rtx lo_part = gen_lowpart (SImode, operands[0]);
4433 enum machine_mode src_mode = GET_MODE (operands[1]);
4435 if (REG_P (operands[0])
4436 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4437 emit_clobber (operands[0]);
4438 if (!REG_P (lo_part) || src_mode != SImode
4439 || !rtx_equal_p (lo_part, operands[1]))
4441 if (src_mode == SImode)
4442 emit_move_insn (lo_part, operands[1]);
4444 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4445 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4446 operands[1] = lo_part;
4448 operands[0] = gen_highpart (SImode, operands[0]);
4449 operands[1] = const0_rtx;
4453 [(set (match_operand:DI 0 "s_register_operand" "")
4454 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4456 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4458 rtx lo_part = gen_lowpart (SImode, operands[0]);
4459 enum machine_mode src_mode = GET_MODE (operands[1]);
4461 if (REG_P (operands[0])
4462 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4463 emit_clobber (operands[0]);
4465 if (!REG_P (lo_part) || src_mode != SImode
4466 || !rtx_equal_p (lo_part, operands[1]))
4468 if (src_mode == SImode)
4469 emit_move_insn (lo_part, operands[1]);
4471 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4472 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4473 operands[1] = lo_part;
4475 operands[0] = gen_highpart (SImode, operands[0]);
4478 (define_expand "zero_extendhisi2"
4479 [(set (match_operand:SI 0 "s_register_operand" "")
4480 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4483 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4485 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4488 if (!arm_arch6 && !MEM_P (operands[1]))
4490 rtx t = gen_lowpart (SImode, operands[1]);
4491 rtx tmp = gen_reg_rtx (SImode);
4492 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4493 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4499 [(set (match_operand:SI 0 "s_register_operand" "")
4500 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4501 "!TARGET_THUMB2 && !arm_arch6"
4502 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4503 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4505 operands[2] = gen_lowpart (SImode, operands[1]);
4508 (define_insn "*thumb1_zero_extendhisi2"
4509 [(set (match_operand:SI 0 "register_operand" "=l,l")
4510 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4515 if (which_alternative == 0 && arm_arch6)
4516 return "uxth\t%0, %1";
4517 if (which_alternative == 0)
4520 mem = XEXP (operands[1], 0);
4522 if (GET_CODE (mem) == CONST)
4523 mem = XEXP (mem, 0);
4525 if (GET_CODE (mem) == PLUS)
4527 rtx a = XEXP (mem, 0);
4529 /* This can happen due to bugs in reload. */
4530 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4533 ops[0] = operands[0];
4536 output_asm_insn ("mov\t%0, %1", ops);
4538 XEXP (mem, 0) = operands[0];
4542 return "ldrh\t%0, %1";
4544 [(set_attr_alternative "length"
4545 [(if_then_else (eq_attr "is_arch6" "yes")
4546 (const_int 2) (const_int 4))
4548 (set_attr "type" "alu_shift,load_byte")]
4551 (define_insn "*arm_zero_extendhisi2"
4552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4553 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4554 "TARGET_ARM && arm_arch4 && !arm_arch6"
4558 [(set_attr "type" "alu_shift,load_byte")
4559 (set_attr "predicable" "yes")]
4562 (define_insn "*arm_zero_extendhisi2_v6"
4563 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4564 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4565 "TARGET_ARM && arm_arch6"
4569 [(set_attr "type" "alu_shift,load_byte")
4570 (set_attr "predicable" "yes")]
4573 (define_insn "*arm_zero_extendhisi2addsi"
4574 [(set (match_operand:SI 0 "s_register_operand" "=r")
4575 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4576 (match_operand:SI 2 "s_register_operand" "r")))]
4578 "uxtah%?\\t%0, %2, %1"
4579 [(set_attr "type" "alu_shift")
4580 (set_attr "predicable" "yes")]
4583 (define_expand "zero_extendqisi2"
4584 [(set (match_operand:SI 0 "s_register_operand" "")
4585 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4588 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4590 emit_insn (gen_andsi3 (operands[0],
4591 gen_lowpart (SImode, operands[1]),
4595 if (!arm_arch6 && !MEM_P (operands[1]))
4597 rtx t = gen_lowpart (SImode, operands[1]);
4598 rtx tmp = gen_reg_rtx (SImode);
4599 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4600 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4606 [(set (match_operand:SI 0 "s_register_operand" "")
4607 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4609 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4610 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4612 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4615 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4620 (define_insn "*thumb1_zero_extendqisi2"
4621 [(set (match_operand:SI 0 "register_operand" "=l,l")
4622 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4623 "TARGET_THUMB1 && !arm_arch6"
4627 [(set_attr "length" "4,2")
4628 (set_attr "type" "alu_shift,load_byte")
4629 (set_attr "pool_range" "*,32")]
4632 (define_insn "*thumb1_zero_extendqisi2_v6"
4633 [(set (match_operand:SI 0 "register_operand" "=l,l")
4634 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4635 "TARGET_THUMB1 && arm_arch6"
4639 [(set_attr "length" "2")
4640 (set_attr "type" "alu_shift,load_byte")]
4643 (define_insn "*arm_zero_extendqisi2"
4644 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4645 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4646 "TARGET_ARM && !arm_arch6"
4649 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4650 [(set_attr "length" "8,4")
4651 (set_attr "type" "alu_shift,load_byte")
4652 (set_attr "predicable" "yes")]
4655 (define_insn "*arm_zero_extendqisi2_v6"
4656 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4657 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4658 "TARGET_ARM && arm_arch6"
4661 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4662 [(set_attr "type" "alu_shift,load_byte")
4663 (set_attr "predicable" "yes")]
4666 (define_insn "*arm_zero_extendqisi2addsi"
4667 [(set (match_operand:SI 0 "s_register_operand" "=r")
4668 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4669 (match_operand:SI 2 "s_register_operand" "r")))]
4671 "uxtab%?\\t%0, %2, %1"
4672 [(set_attr "predicable" "yes")
4673 (set_attr "insn" "xtab")
4674 (set_attr "type" "alu_shift")]
4678 [(set (match_operand:SI 0 "s_register_operand" "")
4679 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4680 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4681 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4682 [(set (match_dup 2) (match_dup 1))
4683 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4688 [(set (match_operand:SI 0 "s_register_operand" "")
4689 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4690 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4691 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4692 [(set (match_dup 2) (match_dup 1))
4693 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4699 [(set (match_operand:SI 0 "s_register_operand" "")
4700 (ior_xor:SI (and:SI (ashift:SI
4701 (match_operand:SI 1 "s_register_operand" "")
4702 (match_operand:SI 2 "const_int_operand" ""))
4703 (match_operand:SI 3 "const_int_operand" ""))
4705 (match_operator 5 "subreg_lowpart_operator"
4706 [(match_operand:SI 4 "s_register_operand" "")]))))]
4708 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4709 == (GET_MODE_MASK (GET_MODE (operands[5]))
4710 & (GET_MODE_MASK (GET_MODE (operands[5]))
4711 << (INTVAL (operands[2])))))"
4712 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4714 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4715 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4718 (define_insn "*compareqi_eq0"
4719 [(set (reg:CC_Z CC_REGNUM)
4720 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4724 [(set_attr "conds" "set")
4725 (set_attr "predicable" "yes")]
4728 (define_expand "extendhisi2"
4729 [(set (match_operand:SI 0 "s_register_operand" "")
4730 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4735 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4738 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4740 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4744 if (!arm_arch6 && !MEM_P (operands[1]))
4746 rtx t = gen_lowpart (SImode, operands[1]);
4747 rtx tmp = gen_reg_rtx (SImode);
4748 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4749 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4756 [(set (match_operand:SI 0 "register_operand" "")
4757 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4758 (clobber (match_scratch:SI 2 ""))])]
4760 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4761 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4763 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4766 ;; We used to have an early-clobber on the scratch register here.
4767 ;; However, there's a bug somewhere in reload which means that this
4768 ;; can be partially ignored during spill allocation if the memory
4769 ;; address also needs reloading; this causes us to die later on when
4770 ;; we try to verify the operands. Fortunately, we don't really need
4771 ;; the early-clobber: we can always use operand 0 if operand 2
4772 ;; overlaps the address.
4773 (define_insn "thumb1_extendhisi2"
4774 [(set (match_operand:SI 0 "register_operand" "=l,l")
4775 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4776 (clobber (match_scratch:SI 2 "=X,l"))]
4783 if (which_alternative == 0 && !arm_arch6)
4785 if (which_alternative == 0)
4786 return \"sxth\\t%0, %1\";
4788 mem = XEXP (operands[1], 0);
4790 /* This code used to try to use 'V', and fix the address only if it was
4791 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4792 range of QImode offsets, and offsettable_address_p does a QImode
4795 if (GET_CODE (mem) == CONST)
4796 mem = XEXP (mem, 0);
4798 if (GET_CODE (mem) == LABEL_REF)
4799 return \"ldr\\t%0, %1\";
4801 if (GET_CODE (mem) == PLUS)
4803 rtx a = XEXP (mem, 0);
4804 rtx b = XEXP (mem, 1);
4806 if (GET_CODE (a) == LABEL_REF
4807 && GET_CODE (b) == CONST_INT)
4808 return \"ldr\\t%0, %1\";
4810 if (GET_CODE (b) == REG)
4811 return \"ldrsh\\t%0, %1\";
4819 ops[2] = const0_rtx;
4822 gcc_assert (GET_CODE (ops[1]) == REG);
4824 ops[0] = operands[0];
4825 if (reg_mentioned_p (operands[2], ops[1]))
4828 ops[3] = operands[2];
4829 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4832 [(set_attr_alternative "length"
4833 [(if_then_else (eq_attr "is_arch6" "yes")
4834 (const_int 2) (const_int 4))
4836 (set_attr "type" "alu_shift,load_byte")
4837 (set_attr "pool_range" "*,1020")]
4840 ;; This pattern will only be used when ldsh is not available
4841 (define_expand "extendhisi2_mem"
4842 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4844 (zero_extend:SI (match_dup 7)))
4845 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4846 (set (match_operand:SI 0 "" "")
4847 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4852 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4854 mem1 = change_address (operands[1], QImode, addr);
4855 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4856 operands[0] = gen_lowpart (SImode, operands[0]);
4858 operands[2] = gen_reg_rtx (SImode);
4859 operands[3] = gen_reg_rtx (SImode);
4860 operands[6] = gen_reg_rtx (SImode);
4863 if (BYTES_BIG_ENDIAN)
4865 operands[4] = operands[2];
4866 operands[5] = operands[3];
4870 operands[4] = operands[3];
4871 operands[5] = operands[2];
4877 [(set (match_operand:SI 0 "register_operand" "")
4878 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4880 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4881 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4883 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4886 (define_insn "*arm_extendhisi2"
4887 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4888 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4889 "TARGET_ARM && arm_arch4 && !arm_arch6"
4893 [(set_attr "length" "8,4")
4894 (set_attr "type" "alu_shift,load_byte")
4895 (set_attr "predicable" "yes")
4896 (set_attr "pool_range" "*,256")
4897 (set_attr "neg_pool_range" "*,244")]
4900 ;; ??? Check Thumb-2 pool range
4901 (define_insn "*arm_extendhisi2_v6"
4902 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4903 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4904 "TARGET_32BIT && arm_arch6"
4908 [(set_attr "type" "alu_shift,load_byte")
4909 (set_attr "predicable" "yes")
4910 (set_attr "pool_range" "*,256")
4911 (set_attr "neg_pool_range" "*,244")]
4914 (define_insn "*arm_extendhisi2addsi"
4915 [(set (match_operand:SI 0 "s_register_operand" "=r")
4916 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4917 (match_operand:SI 2 "s_register_operand" "r")))]
4919 "sxtah%?\\t%0, %2, %1"
4922 (define_expand "extendqihi2"
4924 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4926 (set (match_operand:HI 0 "s_register_operand" "")
4927 (ashiftrt:SI (match_dup 2)
4932 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4934 emit_insn (gen_rtx_SET (VOIDmode,
4936 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4939 if (!s_register_operand (operands[1], QImode))
4940 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4941 operands[0] = gen_lowpart (SImode, operands[0]);
4942 operands[1] = gen_lowpart (SImode, operands[1]);
4943 operands[2] = gen_reg_rtx (SImode);
4947 (define_insn "*arm_extendqihi_insn"
4948 [(set (match_operand:HI 0 "s_register_operand" "=r")
4949 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4950 "TARGET_ARM && arm_arch4"
4951 "ldr%(sb%)\\t%0, %1"
4952 [(set_attr "type" "load_byte")
4953 (set_attr "predicable" "yes")
4954 (set_attr "pool_range" "256")
4955 (set_attr "neg_pool_range" "244")]
4958 (define_expand "extendqisi2"
4959 [(set (match_operand:SI 0 "s_register_operand" "")
4960 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4963 if (!arm_arch4 && MEM_P (operands[1]))
4964 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4966 if (!arm_arch6 && !MEM_P (operands[1]))
4968 rtx t = gen_lowpart (SImode, operands[1]);
4969 rtx tmp = gen_reg_rtx (SImode);
4970 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4971 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4977 [(set (match_operand:SI 0 "register_operand" "")
4978 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4980 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4981 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4983 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4986 (define_insn "*arm_extendqisi"
4987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4988 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4989 "TARGET_ARM && arm_arch4 && !arm_arch6"
4993 [(set_attr "length" "8,4")
4994 (set_attr "type" "alu_shift,load_byte")
4995 (set_attr "predicable" "yes")
4996 (set_attr "pool_range" "*,256")
4997 (set_attr "neg_pool_range" "*,244")]
5000 (define_insn "*arm_extendqisi_v6"
5001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5003 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5004 "TARGET_ARM && arm_arch6"
5008 [(set_attr "type" "alu_shift,load_byte")
5009 (set_attr "predicable" "yes")
5010 (set_attr "pool_range" "*,256")
5011 (set_attr "neg_pool_range" "*,244")]
5014 (define_insn "*arm_extendqisi2addsi"
5015 [(set (match_operand:SI 0 "s_register_operand" "=r")
5016 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5017 (match_operand:SI 2 "s_register_operand" "r")))]
5019 "sxtab%?\\t%0, %2, %1"
5020 [(set_attr "type" "alu_shift")
5021 (set_attr "insn" "xtab")
5022 (set_attr "predicable" "yes")]
5026 [(set (match_operand:SI 0 "register_operand" "")
5027 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5028 "TARGET_THUMB1 && reload_completed"
5029 [(set (match_dup 0) (match_dup 2))
5030 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5032 rtx addr = XEXP (operands[1], 0);
5034 if (GET_CODE (addr) == CONST)
5035 addr = XEXP (addr, 0);
5037 if (GET_CODE (addr) == PLUS
5038 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5039 /* No split necessary. */
5042 if (GET_CODE (addr) == PLUS
5043 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5046 if (reg_overlap_mentioned_p (operands[0], addr))
5048 rtx t = gen_lowpart (QImode, operands[0]);
5049 emit_move_insn (t, operands[1]);
5050 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5056 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5057 operands[2] = const0_rtx;
5059 else if (GET_CODE (addr) != PLUS)
5061 else if (REG_P (XEXP (addr, 0)))
5063 operands[2] = XEXP (addr, 1);
5064 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5068 operands[2] = XEXP (addr, 0);
5069 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5072 operands[3] = change_address (operands[1], QImode, addr);
5076 [(set (match_operand:SI 0 "register_operand" "")
5077 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5078 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5079 (set (match_operand:SI 3 "register_operand" "")
5080 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5082 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5083 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5084 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5085 && (peep2_reg_dead_p (3, operands[0])
5086 || rtx_equal_p (operands[0], operands[3]))
5087 && (peep2_reg_dead_p (3, operands[2])
5088 || rtx_equal_p (operands[2], operands[3]))"
5089 [(set (match_dup 2) (match_dup 1))
5090 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5092 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5093 operands[4] = change_address (operands[4], QImode, addr);
5096 (define_insn "thumb1_extendqisi2"
5097 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5098 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5103 if (which_alternative == 0 && arm_arch6)
5104 return "sxtb\\t%0, %1";
5105 if (which_alternative == 0)
5108 addr = XEXP (operands[1], 0);
5109 if (GET_CODE (addr) == PLUS
5110 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5111 return "ldrsb\\t%0, %1";
5115 [(set_attr_alternative "length"
5116 [(if_then_else (eq_attr "is_arch6" "yes")
5117 (const_int 2) (const_int 4))
5119 (if_then_else (eq_attr "is_arch6" "yes")
5120 (const_int 4) (const_int 6))])
5121 (set_attr "type" "alu_shift,load_byte,load_byte")]
5124 (define_expand "extendsfdf2"
5125 [(set (match_operand:DF 0 "s_register_operand" "")
5126 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5127 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5131 /* HFmode -> DFmode conversions have to go through SFmode. */
5132 (define_expand "extendhfdf2"
5133 [(set (match_operand:DF 0 "general_operand" "")
5134 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5139 op1 = convert_to_mode (SFmode, operands[1], 0);
5140 op1 = convert_to_mode (DFmode, op1, 0);
5141 emit_insn (gen_movdf (operands[0], op1));
5146 ;; Move insns (including loads and stores)
5148 ;; XXX Just some ideas about movti.
5149 ;; I don't think these are a good idea on the arm, there just aren't enough
5151 ;;(define_expand "loadti"
5152 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5153 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5156 ;;(define_expand "storeti"
5157 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5158 ;; (match_operand:TI 1 "s_register_operand" ""))]
5161 ;;(define_expand "movti"
5162 ;; [(set (match_operand:TI 0 "general_operand" "")
5163 ;; (match_operand:TI 1 "general_operand" ""))]
5169 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
5170 ;; operands[1] = copy_to_reg (operands[1]);
5171 ;; if (GET_CODE (operands[0]) == MEM)
5172 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5173 ;; else if (GET_CODE (operands[1]) == MEM)
5174 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5178 ;; emit_insn (insn);
5182 ;; Recognize garbage generated above.
5185 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5186 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5190 ;; register mem = (which_alternative < 3);
5191 ;; register const char *template;
5193 ;; operands[mem] = XEXP (operands[mem], 0);
5194 ;; switch (which_alternative)
5196 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5197 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5198 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5199 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5200 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5201 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5203 ;; output_asm_insn (template, operands);
5207 (define_expand "movdi"
5208 [(set (match_operand:DI 0 "general_operand" "")
5209 (match_operand:DI 1 "general_operand" ""))]
5212 if (can_create_pseudo_p ())
5214 if (GET_CODE (operands[0]) != REG)
5215 operands[1] = force_reg (DImode, operands[1]);
5220 (define_insn "*arm_movdi"
5221 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5222 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5224 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
5226 && ( register_operand (operands[0], DImode)
5227 || register_operand (operands[1], DImode))"
5229 switch (which_alternative)
5236 return output_move_double (operands, true, NULL);
5239 [(set_attr "length" "8,12,16,8,8")
5240 (set_attr "type" "*,*,*,load2,store2")
5241 (set_attr "arm_pool_range" "*,*,*,1020,*")
5242 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5243 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5244 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5248 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5249 (match_operand:ANY64 1 "const_double_operand" ""))]
5252 && (arm_const_double_inline_cost (operands[1])
5253 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5256 arm_split_constant (SET, SImode, curr_insn,
5257 INTVAL (gen_lowpart (SImode, operands[1])),
5258 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5259 arm_split_constant (SET, SImode, curr_insn,
5260 INTVAL (gen_highpart_mode (SImode,
5261 GET_MODE (operands[0]),
5263 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5268 ; If optimizing for size, or if we have load delay slots, then
5269 ; we want to split the constant into two separate operations.
5270 ; In both cases this may split a trivial part into a single data op
5271 ; leaving a single complex constant to load. We can also get longer
5272 ; offsets in a LDR which means we get better chances of sharing the pool
5273 ; entries. Finally, we can normally do a better job of scheduling
5274 ; LDR instructions than we can with LDM.
5275 ; This pattern will only match if the one above did not.
5277 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5278 (match_operand:ANY64 1 "const_double_operand" ""))]
5279 "TARGET_ARM && reload_completed
5280 && arm_const_double_by_parts (operands[1])"
5281 [(set (match_dup 0) (match_dup 1))
5282 (set (match_dup 2) (match_dup 3))]
5284 operands[2] = gen_highpart (SImode, operands[0]);
5285 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5287 operands[0] = gen_lowpart (SImode, operands[0]);
5288 operands[1] = gen_lowpart (SImode, operands[1]);
5293 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5294 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5295 "TARGET_EITHER && reload_completed"
5296 [(set (match_dup 0) (match_dup 1))
5297 (set (match_dup 2) (match_dup 3))]
5299 operands[2] = gen_highpart (SImode, operands[0]);
5300 operands[3] = gen_highpart (SImode, operands[1]);
5301 operands[0] = gen_lowpart (SImode, operands[0]);
5302 operands[1] = gen_lowpart (SImode, operands[1]);
5304 /* Handle a partial overlap. */
5305 if (rtx_equal_p (operands[0], operands[3]))
5307 rtx tmp0 = operands[0];
5308 rtx tmp1 = operands[1];
5310 operands[0] = operands[2];
5311 operands[1] = operands[3];
5318 ;; We can't actually do base+index doubleword loads if the index and
5319 ;; destination overlap. Split here so that we at least have chance to
5322 [(set (match_operand:DI 0 "s_register_operand" "")
5323 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5324 (match_operand:SI 2 "s_register_operand" ""))))]
5326 && reg_overlap_mentioned_p (operands[0], operands[1])
5327 && reg_overlap_mentioned_p (operands[0], operands[2])"
5329 (plus:SI (match_dup 1)
5332 (mem:DI (match_dup 4)))]
5334 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5338 ;;; ??? This should have alternatives for constants.
5339 ;;; ??? This was originally identical to the movdf_insn pattern.
5340 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5341 ;;; thumb_reorg with a memory reference.
5342 (define_insn "*thumb1_movdi_insn"
5343 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5344 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5346 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5347 && ( register_operand (operands[0], DImode)
5348 || register_operand (operands[1], DImode))"
5351 switch (which_alternative)
5355 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5356 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5357 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5359 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5361 operands[1] = GEN_INT (- INTVAL (operands[1]));
5362 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5364 return \"ldmia\\t%1, {%0, %H0}\";
5366 return \"stmia\\t%0, {%1, %H1}\";
5368 return thumb_load_double_from_address (operands);
5370 operands[2] = gen_rtx_MEM (SImode,
5371 plus_constant (XEXP (operands[0], 0), 4));
5372 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5375 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5376 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5377 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5380 [(set_attr "length" "4,4,6,2,2,6,4,4")
5381 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5382 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5383 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5386 (define_expand "movsi"
5387 [(set (match_operand:SI 0 "general_operand" "")
5388 (match_operand:SI 1 "general_operand" ""))]
5392 rtx base, offset, tmp;
5396 /* Everything except mem = const or mem = mem can be done easily. */
5397 if (GET_CODE (operands[0]) == MEM)
5398 operands[1] = force_reg (SImode, operands[1]);
5399 if (arm_general_register_operand (operands[0], SImode)
5400 && GET_CODE (operands[1]) == CONST_INT
5401 && !(const_ok_for_arm (INTVAL (operands[1]))
5402 || const_ok_for_arm (~INTVAL (operands[1]))))
5404 arm_split_constant (SET, SImode, NULL_RTX,
5405 INTVAL (operands[1]), operands[0], NULL_RTX,
5406 optimize && can_create_pseudo_p ());
5410 if (TARGET_USE_MOVT && !target_word_relocations
5411 && GET_CODE (operands[1]) == SYMBOL_REF
5412 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5414 arm_emit_movpair (operands[0], operands[1]);
5418 else /* TARGET_THUMB1... */
5420 if (can_create_pseudo_p ())
5422 if (GET_CODE (operands[0]) != REG)
5423 operands[1] = force_reg (SImode, operands[1]);
5427 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5429 split_const (operands[1], &base, &offset);
5430 if (GET_CODE (base) == SYMBOL_REF
5431 && !offset_within_block_p (base, INTVAL (offset)))
5433 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5434 emit_move_insn (tmp, base);
5435 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5440 /* Recognize the case where operand[1] is a reference to thread-local
5441 data and load its address to a register. */
5442 if (arm_tls_referenced_p (operands[1]))
5444 rtx tmp = operands[1];
5447 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5449 addend = XEXP (XEXP (tmp, 0), 1);
5450 tmp = XEXP (XEXP (tmp, 0), 0);
5453 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5454 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5456 tmp = legitimize_tls_address (tmp,
5457 !can_create_pseudo_p () ? operands[0] : 0);
5460 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5461 tmp = force_operand (tmp, operands[0]);
5466 && (CONSTANT_P (operands[1])
5467 || symbol_mentioned_p (operands[1])
5468 || label_mentioned_p (operands[1])))
5469 operands[1] = legitimize_pic_address (operands[1], SImode,
5470 (!can_create_pseudo_p ()
5477 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5478 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5479 ;; so this does not matter.
5480 (define_insn "*arm_movt"
5481 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5482 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5483 (match_operand:SI 2 "general_operand" "i")))]
5485 "movt%?\t%0, #:upper16:%c2"
5486 [(set_attr "predicable" "yes")
5487 (set_attr "length" "4")]
5490 (define_insn "*arm_movsi_insn"
5491 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5492 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5493 "TARGET_ARM && ! TARGET_IWMMXT
5494 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5495 && ( register_operand (operands[0], SImode)
5496 || register_operand (operands[1], SImode))"
5504 [(set_attr "type" "*,*,*,*,load1,store1")
5505 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5506 (set_attr "predicable" "yes")
5507 (set_attr "pool_range" "*,*,*,*,4096,*")
5508 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5512 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5513 (match_operand:SI 1 "const_int_operand" ""))]
5515 && (!(const_ok_for_arm (INTVAL (operands[1]))
5516 || const_ok_for_arm (~INTVAL (operands[1]))))"
5517 [(clobber (const_int 0))]
5519 arm_split_constant (SET, SImode, NULL_RTX,
5520 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5525 (define_insn "*thumb1_movsi_insn"
5526 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5527 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5529 && ( register_operand (operands[0], SImode)
5530 || register_operand (operands[1], SImode))"
5541 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5542 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5543 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5544 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5547 [(set (match_operand:SI 0 "register_operand" "")
5548 (match_operand:SI 1 "const_int_operand" ""))]
5549 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5550 [(set (match_dup 2) (match_dup 1))
5551 (set (match_dup 0) (neg:SI (match_dup 2)))]
5554 operands[1] = GEN_INT (- INTVAL (operands[1]));
5555 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5560 [(set (match_operand:SI 0 "register_operand" "")
5561 (match_operand:SI 1 "const_int_operand" ""))]
5562 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5563 [(set (match_dup 2) (match_dup 1))
5564 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5567 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5568 unsigned HOST_WIDE_INT mask = 0xff;
5571 for (i = 0; i < 25; i++)
5572 if ((val & (mask << i)) == val)
5575 /* Don't split if the shift is zero. */
5579 operands[1] = GEN_INT (val >> i);
5580 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5581 operands[3] = GEN_INT (i);
5585 ;; When generating pic, we need to load the symbol offset into a register.
5586 ;; So that the optimizer does not confuse this with a normal symbol load
5587 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5588 ;; since that is the only type of relocation we can use.
5590 ;; Wrap calculation of the whole PIC address in a single pattern for the
5591 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5592 ;; a PIC address involves two loads from memory, so we want to CSE it
5593 ;; as often as possible.
5594 ;; This pattern will be split into one of the pic_load_addr_* patterns
5595 ;; and a move after GCSE optimizations.
5597 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5598 (define_expand "calculate_pic_address"
5599 [(set (match_operand:SI 0 "register_operand" "")
5600 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5601 (unspec:SI [(match_operand:SI 2 "" "")]
5606 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5608 [(set (match_operand:SI 0 "register_operand" "")
5609 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5610 (unspec:SI [(match_operand:SI 2 "" "")]
5613 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5614 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5615 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5618 ;; operand1 is the memory address to go into
5619 ;; pic_load_addr_32bit.
5620 ;; operand2 is the PIC label to be emitted
5621 ;; from pic_add_dot_plus_eight.
5622 ;; We do this to allow hoisting of the entire insn.
5623 (define_insn_and_split "pic_load_addr_unified"
5624 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5625 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5626 (match_operand:SI 2 "" "")]
5627 UNSPEC_PIC_UNIFIED))]
5630 "&& reload_completed"
5631 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5632 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5633 (match_dup 2)] UNSPEC_PIC_BASE))]
5634 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5635 [(set_attr "type" "load1,load1,load1")
5636 (set_attr "pool_range" "4096,4096,1024")
5637 (set_attr "neg_pool_range" "4084,0,0")
5638 (set_attr "arch" "a,t2,t1")
5639 (set_attr "length" "8,6,4")]
5642 ;; The rather odd constraints on the following are to force reload to leave
5643 ;; the insn alone, and to force the minipool generation pass to then move
5644 ;; the GOT symbol to memory.
5646 (define_insn "pic_load_addr_32bit"
5647 [(set (match_operand:SI 0 "s_register_operand" "=r")
5648 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5649 "TARGET_32BIT && flag_pic"
5651 [(set_attr "type" "load1")
5652 (set_attr "pool_range" "4096")
5653 (set (attr "neg_pool_range")
5654 (if_then_else (eq_attr "is_thumb" "no")
5659 (define_insn "pic_load_addr_thumb1"
5660 [(set (match_operand:SI 0 "s_register_operand" "=l")
5661 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5662 "TARGET_THUMB1 && flag_pic"
5664 [(set_attr "type" "load1")
5665 (set (attr "pool_range") (const_int 1024))]
5668 (define_insn "pic_add_dot_plus_four"
5669 [(set (match_operand:SI 0 "register_operand" "=r")
5670 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5672 (match_operand 2 "" "")]
5676 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5677 INTVAL (operands[2]));
5678 return \"add\\t%0, %|pc\";
5680 [(set_attr "length" "2")]
5683 (define_insn "pic_add_dot_plus_eight"
5684 [(set (match_operand:SI 0 "register_operand" "=r")
5685 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5687 (match_operand 2 "" "")]
5691 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5692 INTVAL (operands[2]));
5693 return \"add%?\\t%0, %|pc, %1\";
5695 [(set_attr "predicable" "yes")]
5698 (define_insn "tls_load_dot_plus_eight"
5699 [(set (match_operand:SI 0 "register_operand" "=r")
5700 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5702 (match_operand 2 "" "")]
5706 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5707 INTVAL (operands[2]));
5708 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5710 [(set_attr "predicable" "yes")]
5713 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5714 ;; followed by a load. These sequences can be crunched down to
5715 ;; tls_load_dot_plus_eight by a peephole.
5718 [(set (match_operand:SI 0 "register_operand" "")
5719 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5721 (match_operand 1 "" "")]
5723 (set (match_operand:SI 2 "arm_general_register_operand" "")
5724 (mem:SI (match_dup 0)))]
5725 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5727 (mem:SI (unspec:SI [(match_dup 3)
5734 (define_insn "pic_offset_arm"
5735 [(set (match_operand:SI 0 "register_operand" "=r")
5736 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5737 (unspec:SI [(match_operand:SI 2 "" "X")]
5738 UNSPEC_PIC_OFFSET))))]
5739 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5740 "ldr%?\\t%0, [%1,%2]"
5741 [(set_attr "type" "load1")]
5744 (define_expand "builtin_setjmp_receiver"
5745 [(label_ref (match_operand 0 "" ""))]
5749 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5751 if (arm_pic_register != INVALID_REGNUM)
5752 arm_load_pic_register (1UL << 3);
5756 ;; If copying one reg to another we can set the condition codes according to
5757 ;; its value. Such a move is common after a return from subroutine and the
5758 ;; result is being tested against zero.
5760 (define_insn "*movsi_compare0"
5761 [(set (reg:CC CC_REGNUM)
5762 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5764 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5770 [(set_attr "conds" "set")]
5773 ;; Subroutine to store a half word from a register into memory.
5774 ;; Operand 0 is the source register (HImode)
5775 ;; Operand 1 is the destination address in a register (SImode)
5777 ;; In both this routine and the next, we must be careful not to spill
5778 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5779 ;; can generate unrecognizable rtl.
5781 (define_expand "storehi"
5782 [;; store the low byte
5783 (set (match_operand 1 "" "") (match_dup 3))
5784 ;; extract the high byte
5786 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5787 ;; store the high byte
5788 (set (match_dup 4) (match_dup 5))]
5792 rtx op1 = operands[1];
5793 rtx addr = XEXP (op1, 0);
5794 enum rtx_code code = GET_CODE (addr);
5796 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5798 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5800 operands[4] = adjust_address (op1, QImode, 1);
5801 operands[1] = adjust_address (operands[1], QImode, 0);
5802 operands[3] = gen_lowpart (QImode, operands[0]);
5803 operands[0] = gen_lowpart (SImode, operands[0]);
5804 operands[2] = gen_reg_rtx (SImode);
5805 operands[5] = gen_lowpart (QImode, operands[2]);
5809 (define_expand "storehi_bigend"
5810 [(set (match_dup 4) (match_dup 3))
5812 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5813 (set (match_operand 1 "" "") (match_dup 5))]
5817 rtx op1 = operands[1];
5818 rtx addr = XEXP (op1, 0);
5819 enum rtx_code code = GET_CODE (addr);
5821 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5823 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5825 operands[4] = adjust_address (op1, QImode, 1);
5826 operands[1] = adjust_address (operands[1], QImode, 0);
5827 operands[3] = gen_lowpart (QImode, operands[0]);
5828 operands[0] = gen_lowpart (SImode, operands[0]);
5829 operands[2] = gen_reg_rtx (SImode);
5830 operands[5] = gen_lowpart (QImode, operands[2]);
5834 ;; Subroutine to store a half word integer constant into memory.
5835 (define_expand "storeinthi"
5836 [(set (match_operand 0 "" "")
5837 (match_operand 1 "" ""))
5838 (set (match_dup 3) (match_dup 2))]
5842 HOST_WIDE_INT value = INTVAL (operands[1]);
5843 rtx addr = XEXP (operands[0], 0);
5844 rtx op0 = operands[0];
5845 enum rtx_code code = GET_CODE (addr);
5847 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5849 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5851 operands[1] = gen_reg_rtx (SImode);
5852 if (BYTES_BIG_ENDIAN)
5854 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5855 if ((value & 255) == ((value >> 8) & 255))
5856 operands[2] = operands[1];
5859 operands[2] = gen_reg_rtx (SImode);
5860 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5865 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5866 if ((value & 255) == ((value >> 8) & 255))
5867 operands[2] = operands[1];
5870 operands[2] = gen_reg_rtx (SImode);
5871 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5875 operands[3] = adjust_address (op0, QImode, 1);
5876 operands[0] = adjust_address (operands[0], QImode, 0);
5877 operands[2] = gen_lowpart (QImode, operands[2]);
5878 operands[1] = gen_lowpart (QImode, operands[1]);
5882 (define_expand "storehi_single_op"
5883 [(set (match_operand:HI 0 "memory_operand" "")
5884 (match_operand:HI 1 "general_operand" ""))]
5885 "TARGET_32BIT && arm_arch4"
5887 if (!s_register_operand (operands[1], HImode))
5888 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5892 (define_expand "movhi"
5893 [(set (match_operand:HI 0 "general_operand" "")
5894 (match_operand:HI 1 "general_operand" ""))]
5899 if (can_create_pseudo_p ())
5901 if (GET_CODE (operands[0]) == MEM)
5905 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5908 if (GET_CODE (operands[1]) == CONST_INT)
5909 emit_insn (gen_storeinthi (operands[0], operands[1]));
5912 if (GET_CODE (operands[1]) == MEM)
5913 operands[1] = force_reg (HImode, operands[1]);
5914 if (BYTES_BIG_ENDIAN)
5915 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5917 emit_insn (gen_storehi (operands[1], operands[0]));
5921 /* Sign extend a constant, and keep it in an SImode reg. */
5922 else if (GET_CODE (operands[1]) == CONST_INT)
5924 rtx reg = gen_reg_rtx (SImode);
5925 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5927 /* If the constant is already valid, leave it alone. */
5928 if (!const_ok_for_arm (val))
5930 /* If setting all the top bits will make the constant
5931 loadable in a single instruction, then set them.
5932 Otherwise, sign extend the number. */
5934 if (const_ok_for_arm (~(val | ~0xffff)))
5936 else if (val & 0x8000)
5940 emit_insn (gen_movsi (reg, GEN_INT (val)));
5941 operands[1] = gen_lowpart (HImode, reg);
5943 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5944 && GET_CODE (operands[1]) == MEM)
5946 rtx reg = gen_reg_rtx (SImode);
5948 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5949 operands[1] = gen_lowpart (HImode, reg);
5951 else if (!arm_arch4)
5953 if (GET_CODE (operands[1]) == MEM)
5956 rtx offset = const0_rtx;
5957 rtx reg = gen_reg_rtx (SImode);
5959 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5960 || (GET_CODE (base) == PLUS
5961 && (GET_CODE (offset = XEXP (base, 1))
5963 && ((INTVAL(offset) & 1) != 1)
5964 && GET_CODE (base = XEXP (base, 0)) == REG))
5965 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5969 new_rtx = widen_memory_access (operands[1], SImode,
5970 ((INTVAL (offset) & ~3)
5971 - INTVAL (offset)));
5972 emit_insn (gen_movsi (reg, new_rtx));
5973 if (((INTVAL (offset) & 2) != 0)
5974 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5976 rtx reg2 = gen_reg_rtx (SImode);
5978 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5983 emit_insn (gen_movhi_bytes (reg, operands[1]));
5985 operands[1] = gen_lowpart (HImode, reg);
5989 /* Handle loading a large integer during reload. */
5990 else if (GET_CODE (operands[1]) == CONST_INT
5991 && !const_ok_for_arm (INTVAL (operands[1]))
5992 && !const_ok_for_arm (~INTVAL (operands[1])))
5994 /* Writing a constant to memory needs a scratch, which should
5995 be handled with SECONDARY_RELOADs. */
5996 gcc_assert (GET_CODE (operands[0]) == REG);
5998 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5999 emit_insn (gen_movsi (operands[0], operands[1]));
6003 else if (TARGET_THUMB2)
6005 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6006 if (can_create_pseudo_p ())
6008 if (GET_CODE (operands[0]) != REG)
6009 operands[1] = force_reg (HImode, operands[1]);
6010 /* Zero extend a constant, and keep it in an SImode reg. */
6011 else if (GET_CODE (operands[1]) == CONST_INT)
6013 rtx reg = gen_reg_rtx (SImode);
6014 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6016 emit_insn (gen_movsi (reg, GEN_INT (val)));
6017 operands[1] = gen_lowpart (HImode, reg);
6021 else /* TARGET_THUMB1 */
6023 if (can_create_pseudo_p ())
6025 if (GET_CODE (operands[1]) == CONST_INT)
6027 rtx reg = gen_reg_rtx (SImode);
6029 emit_insn (gen_movsi (reg, operands[1]));
6030 operands[1] = gen_lowpart (HImode, reg);
6033 /* ??? We shouldn't really get invalid addresses here, but this can
6034 happen if we are passed a SP (never OK for HImode/QImode) or
6035 virtual register (also rejected as illegitimate for HImode/QImode)
6036 relative address. */
6037 /* ??? This should perhaps be fixed elsewhere, for instance, in
6038 fixup_stack_1, by checking for other kinds of invalid addresses,
6039 e.g. a bare reference to a virtual register. This may confuse the
6040 alpha though, which must handle this case differently. */
6041 if (GET_CODE (operands[0]) == MEM
6042 && !memory_address_p (GET_MODE (operands[0]),
6043 XEXP (operands[0], 0)))
6045 = replace_equiv_address (operands[0],
6046 copy_to_reg (XEXP (operands[0], 0)));
6048 if (GET_CODE (operands[1]) == MEM
6049 && !memory_address_p (GET_MODE (operands[1]),
6050 XEXP (operands[1], 0)))
6052 = replace_equiv_address (operands[1],
6053 copy_to_reg (XEXP (operands[1], 0)));
6055 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6057 rtx reg = gen_reg_rtx (SImode);
6059 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6060 operands[1] = gen_lowpart (HImode, reg);
6063 if (GET_CODE (operands[0]) == MEM)
6064 operands[1] = force_reg (HImode, operands[1]);
6066 else if (GET_CODE (operands[1]) == CONST_INT
6067 && !satisfies_constraint_I (operands[1]))
6069 /* Handle loading a large integer during reload. */
6071 /* Writing a constant to memory needs a scratch, which should
6072 be handled with SECONDARY_RELOADs. */
6073 gcc_assert (GET_CODE (operands[0]) == REG);
6075 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6076 emit_insn (gen_movsi (operands[0], operands[1]));
6083 (define_insn "*thumb1_movhi_insn"
6084 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6085 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6087 && ( register_operand (operands[0], HImode)
6088 || register_operand (operands[1], HImode))"
6090 switch (which_alternative)
6092 case 0: return \"add %0, %1, #0\";
6093 case 2: return \"strh %1, %0\";
6094 case 3: return \"mov %0, %1\";
6095 case 4: return \"mov %0, %1\";
6096 case 5: return \"mov %0, %1\";
6097 default: gcc_unreachable ();
6099 /* The stack pointer can end up being taken as an index register.
6100 Catch this case here and deal with it. */
6101 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6102 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
6103 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6106 ops[0] = operands[0];
6107 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6109 output_asm_insn (\"mov %0, %1\", ops);
6111 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6114 return \"ldrh %0, %1\";
6116 [(set_attr "length" "2,4,2,2,2,2")
6117 (set_attr "type" "*,load1,store1,*,*,*")
6118 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6121 (define_expand "movhi_bytes"
6122 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6124 (zero_extend:SI (match_dup 6)))
6125 (set (match_operand:SI 0 "" "")
6126 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6131 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6133 mem1 = change_address (operands[1], QImode, addr);
6134 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
6135 operands[0] = gen_lowpart (SImode, operands[0]);
6137 operands[2] = gen_reg_rtx (SImode);
6138 operands[3] = gen_reg_rtx (SImode);
6141 if (BYTES_BIG_ENDIAN)
6143 operands[4] = operands[2];
6144 operands[5] = operands[3];
6148 operands[4] = operands[3];
6149 operands[5] = operands[2];
6154 (define_expand "movhi_bigend"
6156 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6159 (ashiftrt:SI (match_dup 2) (const_int 16)))
6160 (set (match_operand:HI 0 "s_register_operand" "")
6164 operands[2] = gen_reg_rtx (SImode);
6165 operands[3] = gen_reg_rtx (SImode);
6166 operands[4] = gen_lowpart (HImode, operands[3]);
6170 ;; Pattern to recognize insn generated default case above
6171 (define_insn "*movhi_insn_arch4"
6172 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6173 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6176 && (register_operand (operands[0], HImode)
6177 || register_operand (operands[1], HImode))"
6179 mov%?\\t%0, %1\\t%@ movhi
6180 mvn%?\\t%0, #%B1\\t%@ movhi
6181 str%(h%)\\t%1, %0\\t%@ movhi
6182 ldr%(h%)\\t%0, %1\\t%@ movhi"
6183 [(set_attr "type" "*,*,store1,load1")
6184 (set_attr "predicable" "yes")
6185 (set_attr "insn" "mov,mvn,*,*")
6186 (set_attr "pool_range" "*,*,*,256")
6187 (set_attr "neg_pool_range" "*,*,*,244")]
6190 (define_insn "*movhi_bytes"
6191 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
6192 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
6195 mov%?\\t%0, %1\\t%@ movhi
6196 mvn%?\\t%0, #%B1\\t%@ movhi"
6197 [(set_attr "predicable" "yes")
6198 (set_attr "insn" "mov,mvn")]
6201 (define_expand "thumb_movhi_clobber"
6202 [(set (match_operand:HI 0 "memory_operand" "")
6203 (match_operand:HI 1 "register_operand" ""))
6204 (clobber (match_operand:DI 2 "register_operand" ""))]
6207 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6208 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6210 emit_insn (gen_movhi (operands[0], operands[1]));
6213 /* XXX Fixme, need to handle other cases here as well. */
6218 ;; We use a DImode scratch because we may occasionally need an additional
6219 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6220 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6221 (define_expand "reload_outhi"
6222 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6223 (match_operand:HI 1 "s_register_operand" "r")
6224 (match_operand:DI 2 "s_register_operand" "=&l")])]
6227 arm_reload_out_hi (operands);
6229 thumb_reload_out_hi (operands);
6234 (define_expand "reload_inhi"
6235 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6236 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6237 (match_operand:DI 2 "s_register_operand" "=&r")])]
6241 arm_reload_in_hi (operands);
6243 thumb_reload_out_hi (operands);
6247 (define_expand "movqi"
6248 [(set (match_operand:QI 0 "general_operand" "")
6249 (match_operand:QI 1 "general_operand" ""))]
6252 /* Everything except mem = const or mem = mem can be done easily */
6254 if (can_create_pseudo_p ())
6256 if (GET_CODE (operands[1]) == CONST_INT)
6258 rtx reg = gen_reg_rtx (SImode);
6260 /* For thumb we want an unsigned immediate, then we are more likely
6261 to be able to use a movs insn. */
6263 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6265 emit_insn (gen_movsi (reg, operands[1]));
6266 operands[1] = gen_lowpart (QImode, reg);
6271 /* ??? We shouldn't really get invalid addresses here, but this can
6272 happen if we are passed a SP (never OK for HImode/QImode) or
6273 virtual register (also rejected as illegitimate for HImode/QImode)
6274 relative address. */
6275 /* ??? This should perhaps be fixed elsewhere, for instance, in
6276 fixup_stack_1, by checking for other kinds of invalid addresses,
6277 e.g. a bare reference to a virtual register. This may confuse the
6278 alpha though, which must handle this case differently. */
6279 if (GET_CODE (operands[0]) == MEM
6280 && !memory_address_p (GET_MODE (operands[0]),
6281 XEXP (operands[0], 0)))
6283 = replace_equiv_address (operands[0],
6284 copy_to_reg (XEXP (operands[0], 0)));
6285 if (GET_CODE (operands[1]) == MEM
6286 && !memory_address_p (GET_MODE (operands[1]),
6287 XEXP (operands[1], 0)))
6289 = replace_equiv_address (operands[1],
6290 copy_to_reg (XEXP (operands[1], 0)));
6293 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6295 rtx reg = gen_reg_rtx (SImode);
6297 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6298 operands[1] = gen_lowpart (QImode, reg);
6301 if (GET_CODE (operands[0]) == MEM)
6302 operands[1] = force_reg (QImode, operands[1]);
6304 else if (TARGET_THUMB
6305 && GET_CODE (operands[1]) == CONST_INT
6306 && !satisfies_constraint_I (operands[1]))
6308 /* Handle loading a large integer during reload. */
6310 /* Writing a constant to memory needs a scratch, which should
6311 be handled with SECONDARY_RELOADs. */
6312 gcc_assert (GET_CODE (operands[0]) == REG);
6314 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6315 emit_insn (gen_movsi (operands[0], operands[1]));
6322 (define_insn "*arm_movqi_insn"
6323 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6324 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6326 && ( register_operand (operands[0], QImode)
6327 || register_operand (operands[1], QImode))"
6335 [(set_attr "type" "*,*,load1,store1,load1,store1")
6336 (set_attr "insn" "mov,mvn,*,*,*,*")
6337 (set_attr "predicable" "yes")
6338 (set_attr "arch" "any,any,t2,t2,any,any")
6339 (set_attr "length" "4,4,2,2,4,4")]
6342 (define_insn "*thumb1_movqi_insn"
6343 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6344 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6346 && ( register_operand (operands[0], QImode)
6347 || register_operand (operands[1], QImode))"
6355 [(set_attr "length" "2")
6356 (set_attr "type" "*,load1,store1,*,*,*")
6357 (set_attr "insn" "*,*,*,mov,mov,mov")
6358 (set_attr "pool_range" "*,32,*,*,*,*")
6359 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6362 (define_expand "movhf"
6363 [(set (match_operand:HF 0 "general_operand" "")
6364 (match_operand:HF 1 "general_operand" ""))]
6369 if (GET_CODE (operands[0]) == MEM)
6370 operands[1] = force_reg (HFmode, operands[1]);
6372 else /* TARGET_THUMB1 */
6374 if (can_create_pseudo_p ())
6376 if (GET_CODE (operands[0]) != REG)
6377 operands[1] = force_reg (HFmode, operands[1]);
6383 (define_insn "*arm32_movhf"
6384 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6385 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6386 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6387 && ( s_register_operand (operands[0], HFmode)
6388 || s_register_operand (operands[1], HFmode))"
6390 switch (which_alternative)
6392 case 0: /* ARM register from memory */
6393 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6394 case 1: /* memory from ARM register */
6395 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6396 case 2: /* ARM register from ARM register */
6397 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6398 case 3: /* ARM register from constant */
6404 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6405 bits = real_to_target (NULL, &r, HFmode);
6406 ops[0] = operands[0];
6407 ops[1] = GEN_INT (bits);
6408 ops[2] = GEN_INT (bits & 0xff00);
6409 ops[3] = GEN_INT (bits & 0x00ff);
6411 if (arm_arch_thumb2)
6412 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6414 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6421 [(set_attr "conds" "unconditional")
6422 (set_attr "type" "load1,store1,*,*")
6423 (set_attr "insn" "*,*,mov,mov")
6424 (set_attr "length" "4,4,4,8")
6425 (set_attr "predicable" "yes")]
6428 (define_insn "*thumb1_movhf"
6429 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6430 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6432 && ( s_register_operand (operands[0], HFmode)
6433 || s_register_operand (operands[1], HFmode))"
6435 switch (which_alternative)
6440 gcc_assert (GET_CODE(operands[1]) == MEM);
6441 addr = XEXP (operands[1], 0);
6442 if (GET_CODE (addr) == LABEL_REF
6443 || (GET_CODE (addr) == CONST
6444 && GET_CODE (XEXP (addr, 0)) == PLUS
6445 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6446 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6448 /* Constant pool entry. */
6449 return \"ldr\\t%0, %1\";
6451 return \"ldrh\\t%0, %1\";
6453 case 2: return \"strh\\t%1, %0\";
6454 default: return \"mov\\t%0, %1\";
6457 [(set_attr "length" "2")
6458 (set_attr "type" "*,load1,store1,*,*")
6459 (set_attr "insn" "mov,*,*,mov,mov")
6460 (set_attr "pool_range" "*,1020,*,*,*")
6461 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6463 (define_expand "movsf"
6464 [(set (match_operand:SF 0 "general_operand" "")
6465 (match_operand:SF 1 "general_operand" ""))]
6470 if (GET_CODE (operands[0]) == MEM)
6471 operands[1] = force_reg (SFmode, operands[1]);
6473 else /* TARGET_THUMB1 */
6475 if (can_create_pseudo_p ())
6477 if (GET_CODE (operands[0]) != REG)
6478 operands[1] = force_reg (SFmode, operands[1]);
6484 ;; Transform a floating-point move of a constant into a core register into
6485 ;; an SImode operation.
6487 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6488 (match_operand:SF 1 "immediate_operand" ""))]
6491 && GET_CODE (operands[1]) == CONST_DOUBLE"
6492 [(set (match_dup 2) (match_dup 3))]
6494 operands[2] = gen_lowpart (SImode, operands[0]);
6495 operands[3] = gen_lowpart (SImode, operands[1]);
6496 if (operands[2] == 0 || operands[3] == 0)
6501 (define_insn "*arm_movsf_soft_insn"
6502 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6503 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6505 && TARGET_SOFT_FLOAT
6506 && (GET_CODE (operands[0]) != MEM
6507 || register_operand (operands[1], SFmode))"
6510 ldr%?\\t%0, %1\\t%@ float
6511 str%?\\t%1, %0\\t%@ float"
6512 [(set_attr "predicable" "yes")
6513 (set_attr "type" "*,load1,store1")
6514 (set_attr "insn" "mov,*,*")
6515 (set_attr "pool_range" "*,4096,*")
6516 (set_attr "arm_neg_pool_range" "*,4084,*")
6517 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6520 ;;; ??? This should have alternatives for constants.
6521 (define_insn "*thumb1_movsf_insn"
6522 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6523 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6525 && ( register_operand (operands[0], SFmode)
6526 || register_operand (operands[1], SFmode))"
6535 [(set_attr "length" "2")
6536 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6537 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6538 (set_attr "insn" "*,*,*,*,*,mov,mov")
6539 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6542 (define_expand "movdf"
6543 [(set (match_operand:DF 0 "general_operand" "")
6544 (match_operand:DF 1 "general_operand" ""))]
6549 if (GET_CODE (operands[0]) == MEM)
6550 operands[1] = force_reg (DFmode, operands[1]);
6552 else /* TARGET_THUMB */
6554 if (can_create_pseudo_p ())
6556 if (GET_CODE (operands[0]) != REG)
6557 operands[1] = force_reg (DFmode, operands[1]);
6563 ;; Reloading a df mode value stored in integer regs to memory can require a
6565 (define_expand "reload_outdf"
6566 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6567 (match_operand:DF 1 "s_register_operand" "r")
6568 (match_operand:SI 2 "s_register_operand" "=&r")]
6572 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6575 operands[2] = XEXP (operands[0], 0);
6576 else if (code == POST_INC || code == PRE_DEC)
6578 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6579 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6580 emit_insn (gen_movdi (operands[0], operands[1]));
6583 else if (code == PRE_INC)
6585 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6587 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6590 else if (code == POST_DEC)
6591 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6593 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6594 XEXP (XEXP (operands[0], 0), 1)));
6596 emit_insn (gen_rtx_SET (VOIDmode,
6597 replace_equiv_address (operands[0], operands[2]),
6600 if (code == POST_DEC)
6601 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6607 (define_insn "*movdf_soft_insn"
6608 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6609 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6610 "TARGET_32BIT && TARGET_SOFT_FLOAT
6611 && ( register_operand (operands[0], DFmode)
6612 || register_operand (operands[1], DFmode))"
6614 switch (which_alternative)
6621 return output_move_double (operands, true, NULL);
6624 [(set_attr "length" "8,12,16,8,8")
6625 (set_attr "type" "*,*,*,load2,store2")
6626 (set_attr "pool_range" "*,*,*,1020,*")
6627 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6628 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6631 ;;; ??? This should have alternatives for constants.
6632 ;;; ??? This was originally identical to the movdi_insn pattern.
6633 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6634 ;;; thumb_reorg with a memory reference.
6635 (define_insn "*thumb_movdf_insn"
6636 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6637 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6639 && ( register_operand (operands[0], DFmode)
6640 || register_operand (operands[1], DFmode))"
6642 switch (which_alternative)
6646 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6647 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6648 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6650 return \"ldmia\\t%1, {%0, %H0}\";
6652 return \"stmia\\t%0, {%1, %H1}\";
6654 return thumb_load_double_from_address (operands);
6656 operands[2] = gen_rtx_MEM (SImode,
6657 plus_constant (XEXP (operands[0], 0), 4));
6658 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6661 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6662 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6663 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6666 [(set_attr "length" "4,2,2,6,4,4")
6667 (set_attr "type" "*,load2,store2,load2,store2,*")
6668 (set_attr "insn" "*,*,*,*,*,mov")
6669 (set_attr "pool_range" "*,*,*,1020,*,*")]
6672 (define_expand "movxf"
6673 [(set (match_operand:XF 0 "general_operand" "")
6674 (match_operand:XF 1 "general_operand" ""))]
6675 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6677 if (GET_CODE (operands[0]) == MEM)
6678 operands[1] = force_reg (XFmode, operands[1]);
6684 ;; load- and store-multiple insns
6685 ;; The arm can load/store any set of registers, provided that they are in
6686 ;; ascending order, but these expanders assume a contiguous set.
6688 (define_expand "load_multiple"
6689 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6690 (match_operand:SI 1 "" ""))
6691 (use (match_operand:SI 2 "" ""))])]
6694 HOST_WIDE_INT offset = 0;
6696 /* Support only fixed point registers. */
6697 if (GET_CODE (operands[2]) != CONST_INT
6698 || INTVAL (operands[2]) > 14
6699 || INTVAL (operands[2]) < 2
6700 || GET_CODE (operands[1]) != MEM
6701 || GET_CODE (operands[0]) != REG
6702 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6703 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6707 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6708 INTVAL (operands[2]),
6709 force_reg (SImode, XEXP (operands[1], 0)),
6710 FALSE, operands[1], &offset);
6713 (define_expand "store_multiple"
6714 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6715 (match_operand:SI 1 "" ""))
6716 (use (match_operand:SI 2 "" ""))])]
6719 HOST_WIDE_INT offset = 0;
6721 /* Support only fixed point registers. */
6722 if (GET_CODE (operands[2]) != CONST_INT
6723 || INTVAL (operands[2]) > 14
6724 || INTVAL (operands[2]) < 2
6725 || GET_CODE (operands[1]) != REG
6726 || GET_CODE (operands[0]) != MEM
6727 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6728 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6732 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6733 INTVAL (operands[2]),
6734 force_reg (SImode, XEXP (operands[0], 0)),
6735 FALSE, operands[0], &offset);
6739 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6740 ;; We could let this apply for blocks of less than this, but it clobbers so
6741 ;; many registers that there is then probably a better way.
6743 (define_expand "movmemqi"
6744 [(match_operand:BLK 0 "general_operand" "")
6745 (match_operand:BLK 1 "general_operand" "")
6746 (match_operand:SI 2 "const_int_operand" "")
6747 (match_operand:SI 3 "const_int_operand" "")]
6752 if (arm_gen_movmemqi (operands))
6756 else /* TARGET_THUMB1 */
6758 if ( INTVAL (operands[3]) != 4
6759 || INTVAL (operands[2]) > 48)
6762 thumb_expand_movmemqi (operands);
6768 ;; Thumb block-move insns
6770 (define_insn "movmem12b"
6771 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6772 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6773 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6774 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6775 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6776 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6777 (set (match_operand:SI 0 "register_operand" "=l")
6778 (plus:SI (match_dup 2) (const_int 12)))
6779 (set (match_operand:SI 1 "register_operand" "=l")
6780 (plus:SI (match_dup 3) (const_int 12)))
6781 (clobber (match_scratch:SI 4 "=&l"))
6782 (clobber (match_scratch:SI 5 "=&l"))
6783 (clobber (match_scratch:SI 6 "=&l"))]
6785 "* return thumb_output_move_mem_multiple (3, operands);"
6786 [(set_attr "length" "4")
6787 ; This isn't entirely accurate... It loads as well, but in terms of
6788 ; scheduling the following insn it is better to consider it as a store
6789 (set_attr "type" "store3")]
6792 (define_insn "movmem8b"
6793 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6794 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6795 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6796 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6797 (set (match_operand:SI 0 "register_operand" "=l")
6798 (plus:SI (match_dup 2) (const_int 8)))
6799 (set (match_operand:SI 1 "register_operand" "=l")
6800 (plus:SI (match_dup 3) (const_int 8)))
6801 (clobber (match_scratch:SI 4 "=&l"))
6802 (clobber (match_scratch:SI 5 "=&l"))]
6804 "* return thumb_output_move_mem_multiple (2, operands);"
6805 [(set_attr "length" "4")
6806 ; This isn't entirely accurate... It loads as well, but in terms of
6807 ; scheduling the following insn it is better to consider it as a store
6808 (set_attr "type" "store2")]
6813 ;; Compare & branch insns
6814 ;; The range calculations are based as follows:
6815 ;; For forward branches, the address calculation returns the address of
6816 ;; the next instruction. This is 2 beyond the branch instruction.
6817 ;; For backward branches, the address calculation returns the address of
6818 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6819 ;; instruction for the shortest sequence, and 4 before the branch instruction
6820 ;; if we have to jump around an unconditional branch.
6821 ;; To the basic branch range the PC offset must be added (this is +4).
6822 ;; So for forward branches we have
6823 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6824 ;; And for backward branches we have
6825 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6827 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6828 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6830 (define_expand "cbranchsi4"
6831 [(set (pc) (if_then_else
6832 (match_operator 0 "expandable_comparison_operator"
6833 [(match_operand:SI 1 "s_register_operand" "")
6834 (match_operand:SI 2 "nonmemory_operand" "")])
6835 (label_ref (match_operand 3 "" ""))
6837 "TARGET_THUMB1 || TARGET_32BIT"
6841 if (!arm_add_operand (operands[2], SImode))
6842 operands[2] = force_reg (SImode, operands[2]);
6843 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6847 if (thumb1_cmpneg_operand (operands[2], SImode))
6849 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6850 operands[3], operands[0]));
6853 if (!thumb1_cmp_operand (operands[2], SImode))
6854 operands[2] = force_reg (SImode, operands[2]);
6857 ;; A pattern to recognize a special situation and optimize for it.
6858 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6859 ;; due to the available addressing modes. Hence, convert a signed comparison
6860 ;; with zero into an unsigned comparison with 127 if possible.
6861 (define_expand "cbranchqi4"
6862 [(set (pc) (if_then_else
6863 (match_operator 0 "lt_ge_comparison_operator"
6864 [(match_operand:QI 1 "memory_operand" "")
6865 (match_operand:QI 2 "const0_operand" "")])
6866 (label_ref (match_operand 3 "" ""))
6871 xops[1] = gen_reg_rtx (SImode);
6872 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6873 xops[2] = GEN_INT (127);
6874 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6875 VOIDmode, xops[1], xops[2]);
6876 xops[3] = operands[3];
6877 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6881 (define_expand "cbranchsf4"
6882 [(set (pc) (if_then_else
6883 (match_operator 0 "expandable_comparison_operator"
6884 [(match_operand:SF 1 "s_register_operand" "")
6885 (match_operand:SF 2 "arm_float_compare_operand" "")])
6886 (label_ref (match_operand 3 "" ""))
6888 "TARGET_32BIT && TARGET_HARD_FLOAT"
6889 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6890 operands[3])); DONE;"
6893 (define_expand "cbranchdf4"
6894 [(set (pc) (if_then_else
6895 (match_operator 0 "expandable_comparison_operator"
6896 [(match_operand:DF 1 "s_register_operand" "")
6897 (match_operand:DF 2 "arm_float_compare_operand" "")])
6898 (label_ref (match_operand 3 "" ""))
6900 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6901 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6902 operands[3])); DONE;"
6905 (define_expand "cbranchdi4"
6906 [(set (pc) (if_then_else
6907 (match_operator 0 "expandable_comparison_operator"
6908 [(match_operand:DI 1 "cmpdi_operand" "")
6909 (match_operand:DI 2 "cmpdi_operand" "")])
6910 (label_ref (match_operand 3 "" ""))
6914 rtx swap = NULL_RTX;
6915 enum rtx_code code = GET_CODE (operands[0]);
6917 /* We should not have two constants. */
6918 gcc_assert (GET_MODE (operands[1]) == DImode
6919 || GET_MODE (operands[2]) == DImode);
6921 /* Flip unimplemented DImode comparisons to a form that
6922 arm_gen_compare_reg can handle. */
6926 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6928 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6930 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6932 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6937 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6940 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6946 (define_insn "cbranchsi4_insn"
6947 [(set (pc) (if_then_else
6948 (match_operator 0 "arm_comparison_operator"
6949 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6950 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6951 (label_ref (match_operand 3 "" ""))
6955 rtx t = cfun->machine->thumb1_cc_insn;
6958 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6959 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6961 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6963 if (!noov_comparison_operator (operands[0], VOIDmode))
6966 else if (cfun->machine->thumb1_cc_mode != CCmode)
6971 output_asm_insn ("cmp\t%1, %2", operands);
6972 cfun->machine->thumb1_cc_insn = insn;
6973 cfun->machine->thumb1_cc_op0 = operands[1];
6974 cfun->machine->thumb1_cc_op1 = operands[2];
6975 cfun->machine->thumb1_cc_mode = CCmode;
6978 /* Ensure we emit the right type of condition code on the jump. */
6979 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6982 switch (get_attr_length (insn))
6984 case 4: return \"b%d0\\t%l3\";
6985 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6986 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6989 [(set (attr "far_jump")
6991 (eq_attr "length" "8")
6992 (const_string "yes")
6993 (const_string "no")))
6994 (set (attr "length")
6996 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6997 (le (minus (match_dup 3) (pc)) (const_int 256)))
7000 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7001 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7006 (define_insn "cbranchsi4_scratch"
7007 [(set (pc) (if_then_else
7008 (match_operator 4 "arm_comparison_operator"
7009 [(match_operand:SI 1 "s_register_operand" "l,0")
7010 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7011 (label_ref (match_operand 3 "" ""))
7013 (clobber (match_scratch:SI 0 "=l,l"))]
7016 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7018 switch (get_attr_length (insn))
7020 case 4: return \"b%d4\\t%l3\";
7021 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7022 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7025 [(set (attr "far_jump")
7027 (eq_attr "length" "8")
7028 (const_string "yes")
7029 (const_string "no")))
7030 (set (attr "length")
7032 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7033 (le (minus (match_dup 3) (pc)) (const_int 256)))
7036 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7037 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7042 ;; Two peepholes to generate subtract of 0 instead of a move if the
7043 ;; condition codes will be useful.
7045 [(set (match_operand:SI 0 "low_register_operand" "")
7046 (match_operand:SI 1 "low_register_operand" ""))
7048 (if_then_else (match_operator 2 "arm_comparison_operator"
7049 [(match_dup 1) (const_int 0)])
7050 (label_ref (match_operand 3 "" ""))
7053 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7055 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7056 (label_ref (match_dup 3))
7060 ;; Sigh! This variant shouldn't be needed, but combine often fails to
7061 ;; merge cases like this because the op1 is a hard register in
7062 ;; arm_class_likely_spilled_p.
7064 [(set (match_operand:SI 0 "low_register_operand" "")
7065 (match_operand:SI 1 "low_register_operand" ""))
7067 (if_then_else (match_operator 2 "arm_comparison_operator"
7068 [(match_dup 0) (const_int 0)])
7069 (label_ref (match_operand 3 "" ""))
7072 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7074 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7075 (label_ref (match_dup 3))
7079 (define_insn "*negated_cbranchsi4"
7082 (match_operator 0 "equality_operator"
7083 [(match_operand:SI 1 "s_register_operand" "l")
7084 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7085 (label_ref (match_operand 3 "" ""))
7089 output_asm_insn (\"cmn\\t%1, %2\", operands);
7090 switch (get_attr_length (insn))
7092 case 4: return \"b%d0\\t%l3\";
7093 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7094 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7097 [(set (attr "far_jump")
7099 (eq_attr "length" "8")
7100 (const_string "yes")
7101 (const_string "no")))
7102 (set (attr "length")
7104 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7105 (le (minus (match_dup 3) (pc)) (const_int 256)))
7108 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7109 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7114 (define_insn "*tbit_cbranch"
7117 (match_operator 0 "equality_operator"
7118 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7120 (match_operand:SI 2 "const_int_operand" "i"))
7122 (label_ref (match_operand 3 "" ""))
7124 (clobber (match_scratch:SI 4 "=l"))]
7129 op[0] = operands[4];
7130 op[1] = operands[1];
7131 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7133 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7134 switch (get_attr_length (insn))
7136 case 4: return \"b%d0\\t%l3\";
7137 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7138 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7141 [(set (attr "far_jump")
7143 (eq_attr "length" "8")
7144 (const_string "yes")
7145 (const_string "no")))
7146 (set (attr "length")
7148 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7149 (le (minus (match_dup 3) (pc)) (const_int 256)))
7152 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7153 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7158 (define_insn "*tlobits_cbranch"
7161 (match_operator 0 "equality_operator"
7162 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7163 (match_operand:SI 2 "const_int_operand" "i")
7166 (label_ref (match_operand 3 "" ""))
7168 (clobber (match_scratch:SI 4 "=l"))]
7173 op[0] = operands[4];
7174 op[1] = operands[1];
7175 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7177 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7178 switch (get_attr_length (insn))
7180 case 4: return \"b%d0\\t%l3\";
7181 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7182 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7185 [(set (attr "far_jump")
7187 (eq_attr "length" "8")
7188 (const_string "yes")
7189 (const_string "no")))
7190 (set (attr "length")
7192 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7193 (le (minus (match_dup 3) (pc)) (const_int 256)))
7196 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7197 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7202 (define_insn "*tstsi3_cbranch"
7205 (match_operator 3 "equality_operator"
7206 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7207 (match_operand:SI 1 "s_register_operand" "l"))
7209 (label_ref (match_operand 2 "" ""))
7214 output_asm_insn (\"tst\\t%0, %1\", operands);
7215 switch (get_attr_length (insn))
7217 case 4: return \"b%d3\\t%l2\";
7218 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7219 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7222 [(set (attr "far_jump")
7224 (eq_attr "length" "8")
7225 (const_string "yes")
7226 (const_string "no")))
7227 (set (attr "length")
7229 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7230 (le (minus (match_dup 2) (pc)) (const_int 256)))
7233 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7234 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7239 (define_insn "*cbranchne_decr1"
7241 (if_then_else (match_operator 3 "equality_operator"
7242 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7244 (label_ref (match_operand 4 "" ""))
7246 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7247 (plus:SI (match_dup 2) (const_int -1)))
7248 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7253 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7255 VOIDmode, operands[2], const1_rtx);
7256 cond[1] = operands[4];
7258 if (which_alternative == 0)
7259 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7260 else if (which_alternative == 1)
7262 /* We must provide an alternative for a hi reg because reload
7263 cannot handle output reloads on a jump instruction, but we
7264 can't subtract into that. Fortunately a mov from lo to hi
7265 does not clobber the condition codes. */
7266 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7267 output_asm_insn (\"mov\\t%0, %1\", operands);
7271 /* Similarly, but the target is memory. */
7272 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7273 output_asm_insn (\"str\\t%1, %0\", operands);
7276 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7279 output_asm_insn (\"b%d0\\t%l1\", cond);
7282 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7283 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7285 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7286 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7290 [(set (attr "far_jump")
7292 (ior (and (eq (symbol_ref ("which_alternative"))
7294 (eq_attr "length" "8"))
7295 (eq_attr "length" "10"))
7296 (const_string "yes")
7297 (const_string "no")))
7298 (set_attr_alternative "length"
7302 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7303 (le (minus (match_dup 4) (pc)) (const_int 256)))
7306 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7307 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7312 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7313 (le (minus (match_dup 4) (pc)) (const_int 256)))
7316 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7317 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7322 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7323 (le (minus (match_dup 4) (pc)) (const_int 256)))
7326 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7327 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7332 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7333 (le (minus (match_dup 4) (pc)) (const_int 256)))
7336 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7337 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7342 (define_insn "*addsi3_cbranch"
7345 (match_operator 4 "arm_comparison_operator"
7347 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7348 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7350 (label_ref (match_operand 5 "" ""))
7353 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7354 (plus:SI (match_dup 2) (match_dup 3)))
7355 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7357 && (GET_CODE (operands[4]) == EQ
7358 || GET_CODE (operands[4]) == NE
7359 || GET_CODE (operands[4]) == GE
7360 || GET_CODE (operands[4]) == LT)"
7365 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7366 cond[1] = operands[2];
7367 cond[2] = operands[3];
7369 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7370 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7372 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7374 if (which_alternative >= 2
7375 && which_alternative < 4)
7376 output_asm_insn (\"mov\\t%0, %1\", operands);
7377 else if (which_alternative >= 4)
7378 output_asm_insn (\"str\\t%1, %0\", operands);
7380 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7383 return \"b%d4\\t%l5\";
7385 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7387 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7391 [(set (attr "far_jump")
7393 (ior (and (lt (symbol_ref ("which_alternative"))
7395 (eq_attr "length" "8"))
7396 (eq_attr "length" "10"))
7397 (const_string "yes")
7398 (const_string "no")))
7399 (set (attr "length")
7401 (lt (symbol_ref ("which_alternative"))
7404 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7405 (le (minus (match_dup 5) (pc)) (const_int 256)))
7408 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7409 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7413 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7414 (le (minus (match_dup 5) (pc)) (const_int 256)))
7417 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7418 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7423 (define_insn "*addsi3_cbranch_scratch"
7426 (match_operator 3 "arm_comparison_operator"
7428 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7429 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7431 (label_ref (match_operand 4 "" ""))
7433 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7435 && (GET_CODE (operands[3]) == EQ
7436 || GET_CODE (operands[3]) == NE
7437 || GET_CODE (operands[3]) == GE
7438 || GET_CODE (operands[3]) == LT)"
7441 switch (which_alternative)
7444 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7447 output_asm_insn (\"cmn\t%1, %2\", operands);
7450 if (INTVAL (operands[2]) < 0)
7451 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7453 output_asm_insn (\"add\t%0, %1, %2\", operands);
7456 if (INTVAL (operands[2]) < 0)
7457 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7459 output_asm_insn (\"add\t%0, %0, %2\", operands);
7463 switch (get_attr_length (insn))
7466 return \"b%d3\\t%l4\";
7468 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7470 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7474 [(set (attr "far_jump")
7476 (eq_attr "length" "8")
7477 (const_string "yes")
7478 (const_string "no")))
7479 (set (attr "length")
7481 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7482 (le (minus (match_dup 4) (pc)) (const_int 256)))
7485 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7486 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7492 ;; Comparison and test insns
7494 (define_insn "*arm_cmpsi_insn"
7495 [(set (reg:CC CC_REGNUM)
7496 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7497 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7504 [(set_attr "conds" "set")
7505 (set_attr "arch" "t2,t2,any,any")
7506 (set_attr "length" "2,2,4,4")
7507 (set_attr "predicable" "yes")]
7510 (define_insn "*cmpsi_shiftsi"
7511 [(set (reg:CC CC_REGNUM)
7512 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7513 (match_operator:SI 3 "shift_operator"
7514 [(match_operand:SI 1 "s_register_operand" "r,r")
7515 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7518 [(set_attr "conds" "set")
7519 (set_attr "shift" "1")
7520 (set_attr "arch" "32,a")
7521 (set_attr "type" "alu_shift,alu_shift_reg")])
7523 (define_insn "*cmpsi_shiftsi_swp"
7524 [(set (reg:CC_SWP CC_REGNUM)
7525 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7526 [(match_operand:SI 1 "s_register_operand" "r,r")
7527 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7528 (match_operand:SI 0 "s_register_operand" "r,r")))]
7531 [(set_attr "conds" "set")
7532 (set_attr "shift" "1")
7533 (set_attr "arch" "32,a")
7534 (set_attr "type" "alu_shift,alu_shift_reg")])
7536 (define_insn "*arm_cmpsi_negshiftsi_si"
7537 [(set (reg:CC_Z CC_REGNUM)
7539 (neg:SI (match_operator:SI 1 "shift_operator"
7540 [(match_operand:SI 2 "s_register_operand" "r")
7541 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7542 (match_operand:SI 0 "s_register_operand" "r")))]
7545 [(set_attr "conds" "set")
7546 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7547 (const_string "alu_shift")
7548 (const_string "alu_shift_reg")))
7549 (set_attr "predicable" "yes")]
7552 ;; DImode comparisons. The generic code generates branches that
7553 ;; if-conversion can not reduce to a conditional compare, so we do
7556 (define_insn "*arm_cmpdi_insn"
7557 [(set (reg:CC_NCV CC_REGNUM)
7558 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7559 (match_operand:DI 1 "arm_di_operand" "rDi")))
7560 (clobber (match_scratch:SI 2 "=r"))]
7561 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7562 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7563 [(set_attr "conds" "set")
7564 (set_attr "length" "8")]
7567 (define_insn "*arm_cmpdi_unsigned"
7568 [(set (reg:CC_CZ CC_REGNUM)
7569 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7570 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7572 "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
7573 [(set_attr "conds" "set")
7574 (set_attr "length" "8")]
7577 (define_insn "*arm_cmpdi_zero"
7578 [(set (reg:CC_Z CC_REGNUM)
7579 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7581 (clobber (match_scratch:SI 1 "=r"))]
7583 "orr%.\\t%1, %Q0, %R0"
7584 [(set_attr "conds" "set")]
7587 (define_insn "*thumb_cmpdi_zero"
7588 [(set (reg:CC_Z CC_REGNUM)
7589 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7591 (clobber (match_scratch:SI 1 "=l"))]
7593 "orr\\t%1, %Q0, %R0"
7594 [(set_attr "conds" "set")
7595 (set_attr "length" "2")]
7598 ;; Cirrus SF compare instruction
7599 (define_insn "*cirrus_cmpsf"
7600 [(set (reg:CCFP CC_REGNUM)
7601 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7602 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7603 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7604 "cfcmps%?\\tr15, %V0, %V1"
7605 [(set_attr "type" "mav_farith")
7606 (set_attr "cirrus" "compare")]
7609 ;; Cirrus DF compare instruction
7610 (define_insn "*cirrus_cmpdf"
7611 [(set (reg:CCFP CC_REGNUM)
7612 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7613 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7614 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7615 "cfcmpd%?\\tr15, %V0, %V1"
7616 [(set_attr "type" "mav_farith")
7617 (set_attr "cirrus" "compare")]
7620 (define_insn "*cirrus_cmpdi"
7621 [(set (reg:CC CC_REGNUM)
7622 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7623 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7624 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7625 "cfcmp64%?\\tr15, %V0, %V1"
7626 [(set_attr "type" "mav_farith")
7627 (set_attr "cirrus" "compare")]
7630 ; This insn allows redundant compares to be removed by cse, nothing should
7631 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7632 ; is deleted later on. The match_dup will match the mode here, so that
7633 ; mode changes of the condition codes aren't lost by this even though we don't
7634 ; specify what they are.
7636 (define_insn "*deleted_compare"
7637 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7639 "\\t%@ deleted compare"
7640 [(set_attr "conds" "set")
7641 (set_attr "length" "0")]
7645 ;; Conditional branch insns
7647 (define_expand "cbranch_cc"
7649 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7650 (match_operand 2 "" "")])
7651 (label_ref (match_operand 3 "" ""))
7654 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7655 operands[1], operands[2], NULL_RTX);
7656 operands[2] = const0_rtx;"
7660 ;; Patterns to match conditional branch insns.
7663 (define_insn "*arm_cond_branch"
7665 (if_then_else (match_operator 1 "arm_comparison_operator"
7666 [(match_operand 2 "cc_register" "") (const_int 0)])
7667 (label_ref (match_operand 0 "" ""))
7671 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7673 arm_ccfsm_state += 2;
7676 return \"b%d1\\t%l0\";
7678 [(set_attr "conds" "use")
7679 (set_attr "type" "branch")
7680 (set (attr "length")
7682 (and (match_test "TARGET_THUMB2")
7683 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7684 (le (minus (match_dup 0) (pc)) (const_int 256))))
7689 (define_insn "*arm_cond_branch_reversed"
7691 (if_then_else (match_operator 1 "arm_comparison_operator"
7692 [(match_operand 2 "cc_register" "") (const_int 0)])
7694 (label_ref (match_operand 0 "" ""))))]
7697 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7699 arm_ccfsm_state += 2;
7702 return \"b%D1\\t%l0\";
7704 [(set_attr "conds" "use")
7705 (set_attr "type" "branch")
7706 (set (attr "length")
7708 (and (match_test "TARGET_THUMB2")
7709 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7710 (le (minus (match_dup 0) (pc)) (const_int 256))))
7719 (define_expand "cstore_cc"
7720 [(set (match_operand:SI 0 "s_register_operand" "")
7721 (match_operator:SI 1 "" [(match_operand 2 "" "")
7722 (match_operand 3 "" "")]))]
7724 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7725 operands[2], operands[3], NULL_RTX);
7726 operands[3] = const0_rtx;"
7729 (define_insn "*mov_scc"
7730 [(set (match_operand:SI 0 "s_register_operand" "=r")
7731 (match_operator:SI 1 "arm_comparison_operator"
7732 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7734 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7735 [(set_attr "conds" "use")
7736 (set_attr "insn" "mov")
7737 (set_attr "length" "8")]
7740 (define_insn "*mov_negscc"
7741 [(set (match_operand:SI 0 "s_register_operand" "=r")
7742 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7743 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7745 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7746 [(set_attr "conds" "use")
7747 (set_attr "insn" "mov")
7748 (set_attr "length" "8")]
7751 (define_insn "*mov_notscc"
7752 [(set (match_operand:SI 0 "s_register_operand" "=r")
7753 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7754 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7756 "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7757 [(set_attr "conds" "use")
7758 (set_attr "insn" "mov")
7759 (set_attr "length" "8")]
7762 (define_expand "cstoresi4"
7763 [(set (match_operand:SI 0 "s_register_operand" "")
7764 (match_operator:SI 1 "expandable_comparison_operator"
7765 [(match_operand:SI 2 "s_register_operand" "")
7766 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7767 "TARGET_32BIT || TARGET_THUMB1"
7769 rtx op3, scratch, scratch2;
7773 if (!arm_add_operand (operands[3], SImode))
7774 operands[3] = force_reg (SImode, operands[3]);
7775 emit_insn (gen_cstore_cc (operands[0], operands[1],
7776 operands[2], operands[3]));
7780 if (operands[3] == const0_rtx)
7782 switch (GET_CODE (operands[1]))
7785 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7789 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7793 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7794 NULL_RTX, 0, OPTAB_WIDEN);
7795 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7796 NULL_RTX, 0, OPTAB_WIDEN);
7797 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7798 operands[0], 1, OPTAB_WIDEN);
7802 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7804 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7805 NULL_RTX, 1, OPTAB_WIDEN);
7809 scratch = expand_binop (SImode, ashr_optab, operands[2],
7810 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7811 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7812 NULL_RTX, 0, OPTAB_WIDEN);
7813 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7817 /* LT is handled by generic code. No need for unsigned with 0. */
7824 switch (GET_CODE (operands[1]))
7827 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7828 NULL_RTX, 0, OPTAB_WIDEN);
7829 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7833 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7834 NULL_RTX, 0, OPTAB_WIDEN);
7835 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7839 op3 = force_reg (SImode, operands[3]);
7841 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7842 NULL_RTX, 1, OPTAB_WIDEN);
7843 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7844 NULL_RTX, 0, OPTAB_WIDEN);
7845 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7851 if (!thumb1_cmp_operand (op3, SImode))
7852 op3 = force_reg (SImode, op3);
7853 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7854 NULL_RTX, 0, OPTAB_WIDEN);
7855 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7856 NULL_RTX, 1, OPTAB_WIDEN);
7857 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7862 op3 = force_reg (SImode, operands[3]);
7863 scratch = force_reg (SImode, const0_rtx);
7864 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7870 if (!thumb1_cmp_operand (op3, SImode))
7871 op3 = force_reg (SImode, op3);
7872 scratch = force_reg (SImode, const0_rtx);
7873 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7879 if (!thumb1_cmp_operand (op3, SImode))
7880 op3 = force_reg (SImode, op3);
7881 scratch = gen_reg_rtx (SImode);
7882 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7886 op3 = force_reg (SImode, operands[3]);
7887 scratch = gen_reg_rtx (SImode);
7888 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7891 /* No good sequences for GT, LT. */
7898 (define_expand "cstoresf4"
7899 [(set (match_operand:SI 0 "s_register_operand" "")
7900 (match_operator:SI 1 "expandable_comparison_operator"
7901 [(match_operand:SF 2 "s_register_operand" "")
7902 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7903 "TARGET_32BIT && TARGET_HARD_FLOAT"
7904 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7905 operands[2], operands[3])); DONE;"
7908 (define_expand "cstoredf4"
7909 [(set (match_operand:SI 0 "s_register_operand" "")
7910 (match_operator:SI 1 "expandable_comparison_operator"
7911 [(match_operand:DF 2 "s_register_operand" "")
7912 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7913 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7914 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7915 operands[2], operands[3])); DONE;"
7918 (define_expand "cstoredi4"
7919 [(set (match_operand:SI 0 "s_register_operand" "")
7920 (match_operator:SI 1 "expandable_comparison_operator"
7921 [(match_operand:DI 2 "cmpdi_operand" "")
7922 (match_operand:DI 3 "cmpdi_operand" "")]))]
7925 rtx swap = NULL_RTX;
7926 enum rtx_code code = GET_CODE (operands[1]);
7928 /* We should not have two constants. */
7929 gcc_assert (GET_MODE (operands[2]) == DImode
7930 || GET_MODE (operands[3]) == DImode);
7932 /* Flip unimplemented DImode comparisons to a form that
7933 arm_gen_compare_reg can handle. */
7937 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7939 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7941 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7943 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7948 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7951 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7957 (define_expand "cstoresi_eq0_thumb1"
7959 [(set (match_operand:SI 0 "s_register_operand" "")
7960 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7962 (clobber (match_dup:SI 2))])]
7964 "operands[2] = gen_reg_rtx (SImode);"
7967 (define_expand "cstoresi_ne0_thumb1"
7969 [(set (match_operand:SI 0 "s_register_operand" "")
7970 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7972 (clobber (match_dup:SI 2))])]
7974 "operands[2] = gen_reg_rtx (SImode);"
7977 (define_insn "*cstoresi_eq0_thumb1_insn"
7978 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7979 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7981 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7984 neg\\t%0, %1\;adc\\t%0, %0, %1
7985 neg\\t%2, %1\;adc\\t%0, %1, %2"
7986 [(set_attr "length" "4")]
7989 (define_insn "*cstoresi_ne0_thumb1_insn"
7990 [(set (match_operand:SI 0 "s_register_operand" "=l")
7991 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7993 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7995 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7996 [(set_attr "length" "4")]
7999 ;; Used as part of the expansion of thumb ltu and gtu sequences
8000 (define_insn "cstoresi_nltu_thumb1"
8001 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8002 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8003 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8005 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8006 [(set_attr "length" "4")]
8009 (define_insn_and_split "cstoresi_ltu_thumb1"
8010 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8011 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8012 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8017 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8018 (set (match_dup 0) (neg:SI (match_dup 3)))]
8019 "operands[3] = gen_reg_rtx (SImode);"
8020 [(set_attr "length" "4")]
8023 ;; Used as part of the expansion of thumb les sequence.
8024 (define_insn "thumb1_addsi3_addgeu"
8025 [(set (match_operand:SI 0 "s_register_operand" "=l")
8026 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8027 (match_operand:SI 2 "s_register_operand" "l"))
8028 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8029 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8031 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8032 [(set_attr "length" "4")]
8036 ;; Conditional move insns
8038 (define_expand "movsicc"
8039 [(set (match_operand:SI 0 "s_register_operand" "")
8040 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8041 (match_operand:SI 2 "arm_not_operand" "")
8042 (match_operand:SI 3 "arm_not_operand" "")))]
8046 enum rtx_code code = GET_CODE (operands[1]);
8049 if (code == UNEQ || code == LTGT)
8052 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8053 XEXP (operands[1], 1), NULL_RTX);
8054 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8058 (define_expand "movsfcc"
8059 [(set (match_operand:SF 0 "s_register_operand" "")
8060 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8061 (match_operand:SF 2 "s_register_operand" "")
8062 (match_operand:SF 3 "nonmemory_operand" "")))]
8063 "TARGET_32BIT && TARGET_HARD_FLOAT"
8066 enum rtx_code code = GET_CODE (operands[1]);
8069 if (code == UNEQ || code == LTGT)
8072 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8073 Otherwise, ensure it is a valid FP add operand */
8074 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8075 || (!arm_float_add_operand (operands[3], SFmode)))
8076 operands[3] = force_reg (SFmode, operands[3]);
8078 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8079 XEXP (operands[1], 1), NULL_RTX);
8080 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8084 (define_expand "movdfcc"
8085 [(set (match_operand:DF 0 "s_register_operand" "")
8086 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8087 (match_operand:DF 2 "s_register_operand" "")
8088 (match_operand:DF 3 "arm_float_add_operand" "")))]
8089 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8092 enum rtx_code code = GET_CODE (operands[1]);
8095 if (code == UNEQ || code == LTGT)
8098 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8099 XEXP (operands[1], 1), NULL_RTX);
8100 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8104 (define_insn "*movsicc_insn"
8105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8107 (match_operator 3 "arm_comparison_operator"
8108 [(match_operand 4 "cc_register" "") (const_int 0)])
8109 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8110 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8117 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8118 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8119 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8120 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8121 [(set_attr "length" "4,4,4,4,8,8,8,8")
8122 (set_attr "conds" "use")
8123 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
8126 (define_insn "*movsfcc_soft_insn"
8127 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8128 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8129 [(match_operand 4 "cc_register" "") (const_int 0)])
8130 (match_operand:SF 1 "s_register_operand" "0,r")
8131 (match_operand:SF 2 "s_register_operand" "r,0")))]
8132 "TARGET_ARM && TARGET_SOFT_FLOAT"
8136 [(set_attr "conds" "use")
8137 (set_attr "insn" "mov")]
8141 ;; Jump and linkage insns
8143 (define_expand "jump"
8145 (label_ref (match_operand 0 "" "")))]
8150 (define_insn "*arm_jump"
8152 (label_ref (match_operand 0 "" "")))]
8156 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8158 arm_ccfsm_state += 2;
8161 return \"b%?\\t%l0\";
8164 [(set_attr "predicable" "yes")
8165 (set (attr "length")
8167 (and (match_test "TARGET_THUMB2")
8168 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8169 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8174 (define_insn "*thumb_jump"
8176 (label_ref (match_operand 0 "" "")))]
8179 if (get_attr_length (insn) == 2)
8181 return \"bl\\t%l0\\t%@ far jump\";
8183 [(set (attr "far_jump")
8185 (eq_attr "length" "4")
8186 (const_string "yes")
8187 (const_string "no")))
8188 (set (attr "length")
8190 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8191 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8196 (define_expand "call"
8197 [(parallel [(call (match_operand 0 "memory_operand" "")
8198 (match_operand 1 "general_operand" ""))
8199 (use (match_operand 2 "" ""))
8200 (clobber (reg:SI LR_REGNUM))])]
8206 /* In an untyped call, we can get NULL for operand 2. */
8207 if (operands[2] == NULL_RTX)
8208 operands[2] = const0_rtx;
8210 /* Decide if we should generate indirect calls by loading the
8211 32-bit address of the callee into a register before performing the
8213 callee = XEXP (operands[0], 0);
8214 if (GET_CODE (callee) == SYMBOL_REF
8215 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8217 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8219 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8220 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8225 (define_expand "call_internal"
8226 [(parallel [(call (match_operand 0 "memory_operand" "")
8227 (match_operand 1 "general_operand" ""))
8228 (use (match_operand 2 "" ""))
8229 (clobber (reg:SI LR_REGNUM))])])
8231 (define_insn "*call_reg_armv5"
8232 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8233 (match_operand 1 "" ""))
8234 (use (match_operand 2 "" ""))
8235 (clobber (reg:SI LR_REGNUM))]
8236 "TARGET_ARM && arm_arch5"
8238 [(set_attr "type" "call")]
8241 (define_insn "*call_reg_arm"
8242 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8243 (match_operand 1 "" ""))
8244 (use (match_operand 2 "" ""))
8245 (clobber (reg:SI LR_REGNUM))]
8246 "TARGET_ARM && !arm_arch5"
8248 return output_call (operands);
8250 ;; length is worst case, normally it is only two
8251 [(set_attr "length" "12")
8252 (set_attr "type" "call")]
8256 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8257 ;; considered a function call by the branch predictor of some cores (PR40887).
8258 ;; Falls back to blx rN (*call_reg_armv5).
8260 (define_insn "*call_mem"
8261 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8262 (match_operand 1 "" ""))
8263 (use (match_operand 2 "" ""))
8264 (clobber (reg:SI LR_REGNUM))]
8265 "TARGET_ARM && !arm_arch5"
8267 return output_call_mem (operands);
8269 [(set_attr "length" "12")
8270 (set_attr "type" "call")]
8273 (define_insn "*call_reg_thumb1_v5"
8274 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8275 (match_operand 1 "" ""))
8276 (use (match_operand 2 "" ""))
8277 (clobber (reg:SI LR_REGNUM))]
8278 "TARGET_THUMB1 && arm_arch5"
8280 [(set_attr "length" "2")
8281 (set_attr "type" "call")]
8284 (define_insn "*call_reg_thumb1"
8285 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8286 (match_operand 1 "" ""))
8287 (use (match_operand 2 "" ""))
8288 (clobber (reg:SI LR_REGNUM))]
8289 "TARGET_THUMB1 && !arm_arch5"
8292 if (!TARGET_CALLER_INTERWORKING)
8293 return thumb_call_via_reg (operands[0]);
8294 else if (operands[1] == const0_rtx)
8295 return \"bl\\t%__interwork_call_via_%0\";
8296 else if (frame_pointer_needed)
8297 return \"bl\\t%__interwork_r7_call_via_%0\";
8299 return \"bl\\t%__interwork_r11_call_via_%0\";
8301 [(set_attr "type" "call")]
8304 (define_expand "call_value"
8305 [(parallel [(set (match_operand 0 "" "")
8306 (call (match_operand 1 "memory_operand" "")
8307 (match_operand 2 "general_operand" "")))
8308 (use (match_operand 3 "" ""))
8309 (clobber (reg:SI LR_REGNUM))])]
8315 /* In an untyped call, we can get NULL for operand 2. */
8316 if (operands[3] == 0)
8317 operands[3] = const0_rtx;
8319 /* Decide if we should generate indirect calls by loading the
8320 32-bit address of the callee into a register before performing the
8322 callee = XEXP (operands[1], 0);
8323 if (GET_CODE (callee) == SYMBOL_REF
8324 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8326 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8328 pat = gen_call_value_internal (operands[0], operands[1],
8329 operands[2], operands[3]);
8330 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8335 (define_expand "call_value_internal"
8336 [(parallel [(set (match_operand 0 "" "")
8337 (call (match_operand 1 "memory_operand" "")
8338 (match_operand 2 "general_operand" "")))
8339 (use (match_operand 3 "" ""))
8340 (clobber (reg:SI LR_REGNUM))])])
8342 (define_insn "*call_value_reg_armv5"
8343 [(set (match_operand 0 "" "")
8344 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8345 (match_operand 2 "" "")))
8346 (use (match_operand 3 "" ""))
8347 (clobber (reg:SI LR_REGNUM))]
8348 "TARGET_ARM && arm_arch5"
8350 [(set_attr "type" "call")]
8353 (define_insn "*call_value_reg_arm"
8354 [(set (match_operand 0 "" "")
8355 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8356 (match_operand 2 "" "")))
8357 (use (match_operand 3 "" ""))
8358 (clobber (reg:SI LR_REGNUM))]
8359 "TARGET_ARM && !arm_arch5"
8361 return output_call (&operands[1]);
8363 [(set_attr "length" "12")
8364 (set_attr "type" "call")]
8367 ;; Note: see *call_mem
8369 (define_insn "*call_value_mem"
8370 [(set (match_operand 0 "" "")
8371 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8372 (match_operand 2 "" "")))
8373 (use (match_operand 3 "" ""))
8374 (clobber (reg:SI LR_REGNUM))]
8375 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8377 return output_call_mem (&operands[1]);
8379 [(set_attr "length" "12")
8380 (set_attr "type" "call")]
8383 (define_insn "*call_value_reg_thumb1_v5"
8384 [(set (match_operand 0 "" "")
8385 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8386 (match_operand 2 "" "")))
8387 (use (match_operand 3 "" ""))
8388 (clobber (reg:SI LR_REGNUM))]
8389 "TARGET_THUMB1 && arm_arch5"
8391 [(set_attr "length" "2")
8392 (set_attr "type" "call")]
8395 (define_insn "*call_value_reg_thumb1"
8396 [(set (match_operand 0 "" "")
8397 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8398 (match_operand 2 "" "")))
8399 (use (match_operand 3 "" ""))
8400 (clobber (reg:SI LR_REGNUM))]
8401 "TARGET_THUMB1 && !arm_arch5"
8404 if (!TARGET_CALLER_INTERWORKING)
8405 return thumb_call_via_reg (operands[1]);
8406 else if (operands[2] == const0_rtx)
8407 return \"bl\\t%__interwork_call_via_%1\";
8408 else if (frame_pointer_needed)
8409 return \"bl\\t%__interwork_r7_call_via_%1\";
8411 return \"bl\\t%__interwork_r11_call_via_%1\";
8413 [(set_attr "type" "call")]
8416 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8417 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8419 (define_insn "*call_symbol"
8420 [(call (mem:SI (match_operand:SI 0 "" ""))
8421 (match_operand 1 "" ""))
8422 (use (match_operand 2 "" ""))
8423 (clobber (reg:SI LR_REGNUM))]
8425 && (GET_CODE (operands[0]) == SYMBOL_REF)
8426 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8429 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8431 [(set_attr "type" "call")]
8434 (define_insn "*call_value_symbol"
8435 [(set (match_operand 0 "" "")
8436 (call (mem:SI (match_operand:SI 1 "" ""))
8437 (match_operand:SI 2 "" "")))
8438 (use (match_operand 3 "" ""))
8439 (clobber (reg:SI LR_REGNUM))]
8441 && (GET_CODE (operands[1]) == SYMBOL_REF)
8442 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8445 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8447 [(set_attr "type" "call")]
8450 (define_insn "*call_insn"
8451 [(call (mem:SI (match_operand:SI 0 "" ""))
8452 (match_operand:SI 1 "" ""))
8453 (use (match_operand 2 "" ""))
8454 (clobber (reg:SI LR_REGNUM))]
8456 && GET_CODE (operands[0]) == SYMBOL_REF
8457 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8459 [(set_attr "length" "4")
8460 (set_attr "type" "call")]
8463 (define_insn "*call_value_insn"
8464 [(set (match_operand 0 "" "")
8465 (call (mem:SI (match_operand 1 "" ""))
8466 (match_operand 2 "" "")))
8467 (use (match_operand 3 "" ""))
8468 (clobber (reg:SI LR_REGNUM))]
8470 && GET_CODE (operands[1]) == SYMBOL_REF
8471 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8473 [(set_attr "length" "4")
8474 (set_attr "type" "call")]
8477 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8478 (define_expand "sibcall"
8479 [(parallel [(call (match_operand 0 "memory_operand" "")
8480 (match_operand 1 "general_operand" ""))
8482 (use (match_operand 2 "" ""))])]
8486 if (operands[2] == NULL_RTX)
8487 operands[2] = const0_rtx;
8491 (define_expand "sibcall_value"
8492 [(parallel [(set (match_operand 0 "" "")
8493 (call (match_operand 1 "memory_operand" "")
8494 (match_operand 2 "general_operand" "")))
8496 (use (match_operand 3 "" ""))])]
8500 if (operands[3] == NULL_RTX)
8501 operands[3] = const0_rtx;
8505 (define_insn "*sibcall_insn"
8506 [(call (mem:SI (match_operand:SI 0 "" "X"))
8507 (match_operand 1 "" ""))
8509 (use (match_operand 2 "" ""))]
8510 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8512 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8514 [(set_attr "type" "call")]
8517 (define_insn "*sibcall_value_insn"
8518 [(set (match_operand 0 "" "")
8519 (call (mem:SI (match_operand:SI 1 "" "X"))
8520 (match_operand 2 "" "")))
8522 (use (match_operand 3 "" ""))]
8523 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8525 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8527 [(set_attr "type" "call")]
8530 (define_expand "return"
8532 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8535 ;; Often the return insn will be the same as loading from memory, so set attr
8536 (define_insn "*arm_return"
8538 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8541 if (arm_ccfsm_state == 2)
8543 arm_ccfsm_state += 2;
8546 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8548 [(set_attr "type" "load1")
8549 (set_attr "length" "12")
8550 (set_attr "predicable" "yes")]
8553 (define_insn "*cond_return"
8555 (if_then_else (match_operator 0 "arm_comparison_operator"
8556 [(match_operand 1 "cc_register" "") (const_int 0)])
8559 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8562 if (arm_ccfsm_state == 2)
8564 arm_ccfsm_state += 2;
8567 return output_return_instruction (operands[0], TRUE, FALSE);
8569 [(set_attr "conds" "use")
8570 (set_attr "length" "12")
8571 (set_attr "type" "load1")]
8574 (define_insn "*cond_return_inverted"
8576 (if_then_else (match_operator 0 "arm_comparison_operator"
8577 [(match_operand 1 "cc_register" "") (const_int 0)])
8580 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8583 if (arm_ccfsm_state == 2)
8585 arm_ccfsm_state += 2;
8588 return output_return_instruction (operands[0], TRUE, TRUE);
8590 [(set_attr "conds" "use")
8591 (set_attr "length" "12")
8592 (set_attr "type" "load1")]
8595 ;; Generate a sequence of instructions to determine if the processor is
8596 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8599 (define_expand "return_addr_mask"
8601 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8603 (set (match_operand:SI 0 "s_register_operand" "")
8604 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8606 (const_int 67108860)))] ; 0x03fffffc
8609 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8612 (define_insn "*check_arch2"
8613 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8614 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8617 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8618 [(set_attr "length" "8")
8619 (set_attr "conds" "set")]
8622 ;; Call subroutine returning any type.
8624 (define_expand "untyped_call"
8625 [(parallel [(call (match_operand 0 "" "")
8627 (match_operand 1 "" "")
8628 (match_operand 2 "" "")])]
8633 rtx par = gen_rtx_PARALLEL (VOIDmode,
8634 rtvec_alloc (XVECLEN (operands[2], 0)));
8635 rtx addr = gen_reg_rtx (Pmode);
8639 emit_move_insn (addr, XEXP (operands[1], 0));
8640 mem = change_address (operands[1], BLKmode, addr);
8642 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8644 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8646 /* Default code only uses r0 as a return value, but we could
8647 be using anything up to 4 registers. */
8648 if (REGNO (src) == R0_REGNUM)
8649 src = gen_rtx_REG (TImode, R0_REGNUM);
8651 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8653 size += GET_MODE_SIZE (GET_MODE (src));
8656 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8661 for (i = 0; i < XVECLEN (par, 0); i++)
8663 HOST_WIDE_INT offset = 0;
8664 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8667 emit_move_insn (addr, plus_constant (addr, size));
8669 mem = change_address (mem, GET_MODE (reg), NULL);
8670 if (REGNO (reg) == R0_REGNUM)
8672 /* On thumb we have to use a write-back instruction. */
8673 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8674 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8675 size = TARGET_ARM ? 16 : 0;
8679 emit_move_insn (mem, reg);
8680 size = GET_MODE_SIZE (GET_MODE (reg));
8684 /* The optimizer does not know that the call sets the function value
8685 registers we stored in the result block. We avoid problems by
8686 claiming that all hard registers are used and clobbered at this
8688 emit_insn (gen_blockage ());
8694 (define_expand "untyped_return"
8695 [(match_operand:BLK 0 "memory_operand" "")
8696 (match_operand 1 "" "")]
8701 rtx addr = gen_reg_rtx (Pmode);
8705 emit_move_insn (addr, XEXP (operands[0], 0));
8706 mem = change_address (operands[0], BLKmode, addr);
8708 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8710 HOST_WIDE_INT offset = 0;
8711 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8714 emit_move_insn (addr, plus_constant (addr, size));
8716 mem = change_address (mem, GET_MODE (reg), NULL);
8717 if (REGNO (reg) == R0_REGNUM)
8719 /* On thumb we have to use a write-back instruction. */
8720 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8721 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8722 size = TARGET_ARM ? 16 : 0;
8726 emit_move_insn (reg, mem);
8727 size = GET_MODE_SIZE (GET_MODE (reg));
8731 /* Emit USE insns before the return. */
8732 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8733 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8735 /* Construct the return. */
8736 expand_naked_return ();
8742 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8743 ;; all of memory. This blocks insns from being moved across this point.
8745 (define_insn "blockage"
8746 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8749 [(set_attr "length" "0")
8750 (set_attr "type" "block")]
8753 (define_expand "casesi"
8754 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8755 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8756 (match_operand:SI 2 "const_int_operand" "") ; total range
8757 (match_operand:SI 3 "" "") ; table label
8758 (match_operand:SI 4 "" "")] ; Out of range label
8759 "TARGET_32BIT || optimize_size || flag_pic"
8762 enum insn_code code;
8763 if (operands[1] != const0_rtx)
8765 rtx reg = gen_reg_rtx (SImode);
8767 emit_insn (gen_addsi3 (reg, operands[0],
8768 gen_int_mode (-INTVAL (operands[1]),
8774 code = CODE_FOR_arm_casesi_internal;
8775 else if (TARGET_THUMB1)
8776 code = CODE_FOR_thumb1_casesi_internal_pic;
8778 code = CODE_FOR_thumb2_casesi_internal_pic;
8780 code = CODE_FOR_thumb2_casesi_internal;
8782 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8783 operands[2] = force_reg (SImode, operands[2]);
8785 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8786 operands[3], operands[4]));
8791 ;; The USE in this pattern is needed to tell flow analysis that this is
8792 ;; a CASESI insn. It has no other purpose.
8793 (define_insn "arm_casesi_internal"
8794 [(parallel [(set (pc)
8796 (leu (match_operand:SI 0 "s_register_operand" "r")
8797 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8798 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8799 (label_ref (match_operand 2 "" ""))))
8800 (label_ref (match_operand 3 "" ""))))
8801 (clobber (reg:CC CC_REGNUM))
8802 (use (label_ref (match_dup 2)))])]
8806 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8807 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8809 [(set_attr "conds" "clob")
8810 (set_attr "length" "12")]
8813 (define_expand "thumb1_casesi_internal_pic"
8814 [(match_operand:SI 0 "s_register_operand" "")
8815 (match_operand:SI 1 "thumb1_cmp_operand" "")
8816 (match_operand 2 "" "")
8817 (match_operand 3 "" "")]
8821 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8822 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8824 reg0 = gen_rtx_REG (SImode, 0);
8825 emit_move_insn (reg0, operands[0]);
8826 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8831 (define_insn "thumb1_casesi_dispatch"
8832 [(parallel [(set (pc) (unspec [(reg:SI 0)
8833 (label_ref (match_operand 0 "" ""))
8834 ;; (label_ref (match_operand 1 "" ""))
8836 UNSPEC_THUMB1_CASESI))
8837 (clobber (reg:SI IP_REGNUM))
8838 (clobber (reg:SI LR_REGNUM))])]
8840 "* return thumb1_output_casesi(operands);"
8841 [(set_attr "length" "4")]
8844 (define_expand "indirect_jump"
8846 (match_operand:SI 0 "s_register_operand" ""))]
8849 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8850 address and use bx. */
8854 tmp = gen_reg_rtx (SImode);
8855 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8861 ;; NB Never uses BX.
8862 (define_insn "*arm_indirect_jump"
8864 (match_operand:SI 0 "s_register_operand" "r"))]
8866 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8867 [(set_attr "predicable" "yes")]
8870 (define_insn "*load_indirect_jump"
8872 (match_operand:SI 0 "memory_operand" "m"))]
8874 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8875 [(set_attr "type" "load1")
8876 (set_attr "pool_range" "4096")
8877 (set_attr "neg_pool_range" "4084")
8878 (set_attr "predicable" "yes")]
8881 ;; NB Never uses BX.
8882 (define_insn "*thumb1_indirect_jump"
8884 (match_operand:SI 0 "register_operand" "l*r"))]
8887 [(set_attr "conds" "clob")
8888 (set_attr "length" "2")]
8898 if (TARGET_UNIFIED_ASM)
8901 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8902 return \"mov\\tr8, r8\";
8904 [(set (attr "length")
8905 (if_then_else (eq_attr "is_thumb" "yes")
8911 ;; Patterns to allow combination of arithmetic, cond code and shifts
8913 (define_insn "*arith_shiftsi"
8914 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8915 (match_operator:SI 1 "shiftable_operator"
8916 [(match_operator:SI 3 "shift_operator"
8917 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8918 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8919 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8921 "%i1%?\\t%0, %2, %4%S3"
8922 [(set_attr "predicable" "yes")
8923 (set_attr "shift" "4")
8924 (set_attr "arch" "a,t2,t2,a")
8925 ;; Thumb2 doesn't allow the stack pointer to be used for
8926 ;; operand1 for all operations other than add and sub. In this case
8927 ;; the minus operation is a candidate for an rsub and hence needs
8929 ;; We have to make sure to disable the fourth alternative if
8930 ;; the shift_operator is MULT, since otherwise the insn will
8931 ;; also match a multiply_accumulate pattern and validate_change
8932 ;; will allow a replacement of the constant with a register
8933 ;; despite the checks done in shift_operator.
8934 (set_attr_alternative "insn_enabled"
8935 [(const_string "yes")
8937 (match_operand:SI 1 "add_operator" "")
8938 (const_string "yes") (const_string "no"))
8939 (const_string "yes")
8941 (match_operand:SI 3 "mult_operator" "")
8942 (const_string "no") (const_string "yes"))])
8943 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8946 [(set (match_operand:SI 0 "s_register_operand" "")
8947 (match_operator:SI 1 "shiftable_operator"
8948 [(match_operator:SI 2 "shiftable_operator"
8949 [(match_operator:SI 3 "shift_operator"
8950 [(match_operand:SI 4 "s_register_operand" "")
8951 (match_operand:SI 5 "reg_or_int_operand" "")])
8952 (match_operand:SI 6 "s_register_operand" "")])
8953 (match_operand:SI 7 "arm_rhs_operand" "")]))
8954 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8957 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8960 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8963 (define_insn "*arith_shiftsi_compare0"
8964 [(set (reg:CC_NOOV CC_REGNUM)
8966 (match_operator:SI 1 "shiftable_operator"
8967 [(match_operator:SI 3 "shift_operator"
8968 [(match_operand:SI 4 "s_register_operand" "r,r")
8969 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8970 (match_operand:SI 2 "s_register_operand" "r,r")])
8972 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8973 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8976 "%i1%.\\t%0, %2, %4%S3"
8977 [(set_attr "conds" "set")
8978 (set_attr "shift" "4")
8979 (set_attr "arch" "32,a")
8980 (set_attr "type" "alu_shift,alu_shift_reg")])
8982 (define_insn "*arith_shiftsi_compare0_scratch"
8983 [(set (reg:CC_NOOV CC_REGNUM)
8985 (match_operator:SI 1 "shiftable_operator"
8986 [(match_operator:SI 3 "shift_operator"
8987 [(match_operand:SI 4 "s_register_operand" "r,r")
8988 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8989 (match_operand:SI 2 "s_register_operand" "r,r")])
8991 (clobber (match_scratch:SI 0 "=r,r"))]
8993 "%i1%.\\t%0, %2, %4%S3"
8994 [(set_attr "conds" "set")
8995 (set_attr "shift" "4")
8996 (set_attr "arch" "32,a")
8997 (set_attr "type" "alu_shift,alu_shift_reg")])
8999 (define_insn "*sub_shiftsi"
9000 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9001 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9002 (match_operator:SI 2 "shift_operator"
9003 [(match_operand:SI 3 "s_register_operand" "r,r")
9004 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9006 "sub%?\\t%0, %1, %3%S2"
9007 [(set_attr "predicable" "yes")
9008 (set_attr "shift" "3")
9009 (set_attr "arch" "32,a")
9010 (set_attr "type" "alu_shift,alu_shift_reg")])
9012 (define_insn "*sub_shiftsi_compare0"
9013 [(set (reg:CC_NOOV CC_REGNUM)
9015 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9016 (match_operator:SI 2 "shift_operator"
9017 [(match_operand:SI 3 "s_register_operand" "r,r")
9018 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9020 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9021 (minus:SI (match_dup 1)
9022 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9024 "sub%.\\t%0, %1, %3%S2"
9025 [(set_attr "conds" "set")
9026 (set_attr "shift" "3")
9027 (set_attr "arch" "32,a")
9028 (set_attr "type" "alu_shift,alu_shift_reg")])
9030 (define_insn "*sub_shiftsi_compare0_scratch"
9031 [(set (reg:CC_NOOV CC_REGNUM)
9033 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9034 (match_operator:SI 2 "shift_operator"
9035 [(match_operand:SI 3 "s_register_operand" "r,r")
9036 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9038 (clobber (match_scratch:SI 0 "=r,r"))]
9040 "sub%.\\t%0, %1, %3%S2"
9041 [(set_attr "conds" "set")
9042 (set_attr "shift" "3")
9043 (set_attr "arch" "32,a")
9044 (set_attr "type" "alu_shift,alu_shift_reg")])
9047 (define_insn "*and_scc"
9048 [(set (match_operand:SI 0 "s_register_operand" "=r")
9049 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9050 [(match_operand 3 "cc_register" "") (const_int 0)])
9051 (match_operand:SI 2 "s_register_operand" "r")))]
9053 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9054 [(set_attr "conds" "use")
9055 (set_attr "insn" "mov")
9056 (set_attr "length" "8")]
9059 (define_insn "*ior_scc"
9060 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9061 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9062 [(match_operand 3 "cc_register" "") (const_int 0)])
9063 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9067 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9068 [(set_attr "conds" "use")
9069 (set_attr "length" "4,8")]
9072 ; A series of splitters for the compare_scc pattern below. Note that
9073 ; order is important.
9075 [(set (match_operand:SI 0 "s_register_operand" "")
9076 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9078 (clobber (reg:CC CC_REGNUM))]
9079 "TARGET_32BIT && reload_completed"
9080 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9083 [(set (match_operand:SI 0 "s_register_operand" "")
9084 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9086 (clobber (reg:CC CC_REGNUM))]
9087 "TARGET_32BIT && reload_completed"
9088 [(set (match_dup 0) (not:SI (match_dup 1)))
9089 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9092 [(set (match_operand:SI 0 "s_register_operand" "")
9093 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9095 (clobber (reg:CC CC_REGNUM))]
9096 "TARGET_32BIT && reload_completed"
9098 [(set (reg:CC CC_REGNUM)
9099 (compare:CC (const_int 1) (match_dup 1)))
9101 (minus:SI (const_int 1) (match_dup 1)))])
9102 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9103 (set (match_dup 0) (const_int 0)))])
9106 [(set (match_operand:SI 0 "s_register_operand" "")
9107 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9108 (match_operand:SI 2 "const_int_operand" "")))
9109 (clobber (reg:CC CC_REGNUM))]
9110 "TARGET_32BIT && reload_completed"
9112 [(set (reg:CC CC_REGNUM)
9113 (compare:CC (match_dup 1) (match_dup 2)))
9114 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9115 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9116 (set (match_dup 0) (const_int 1)))]
9118 operands[3] = GEN_INT (-INTVAL (operands[2]));
9122 [(set (match_operand:SI 0 "s_register_operand" "")
9123 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9124 (match_operand:SI 2 "arm_add_operand" "")))
9125 (clobber (reg:CC CC_REGNUM))]
9126 "TARGET_32BIT && reload_completed"
9128 [(set (reg:CC_NOOV CC_REGNUM)
9129 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9131 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9132 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9133 (set (match_dup 0) (const_int 1)))])
9135 (define_insn_and_split "*compare_scc"
9136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9137 (match_operator:SI 1 "arm_comparison_operator"
9138 [(match_operand:SI 2 "s_register_operand" "r,r")
9139 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9140 (clobber (reg:CC CC_REGNUM))]
9143 "&& reload_completed"
9144 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9145 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9146 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9149 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9150 operands[2], operands[3]);
9151 enum rtx_code rc = GET_CODE (operands[1]);
9153 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9155 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9156 if (mode == CCFPmode || mode == CCFPEmode)
9157 rc = reverse_condition_maybe_unordered (rc);
9159 rc = reverse_condition (rc);
9160 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9163 ;; Attempt to improve the sequence generated by the compare_scc splitters
9164 ;; not to use conditional execution.
9166 [(set (reg:CC CC_REGNUM)
9167 (compare:CC (match_operand:SI 1 "register_operand" "")
9168 (match_operand:SI 2 "arm_rhs_operand" "")))
9169 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9170 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9171 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9172 (set (match_dup 0) (const_int 1)))
9173 (match_scratch:SI 3 "r")]
9176 [(set (reg:CC CC_REGNUM)
9177 (compare:CC (match_dup 1) (match_dup 2)))
9178 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9180 [(set (reg:CC CC_REGNUM)
9181 (compare:CC (const_int 0) (match_dup 3)))
9182 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9185 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9186 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9187 (clobber (reg:CC CC_REGNUM))])])
9189 (define_insn "*cond_move"
9190 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9191 (if_then_else:SI (match_operator 3 "equality_operator"
9192 [(match_operator 4 "arm_comparison_operator"
9193 [(match_operand 5 "cc_register" "") (const_int 0)])
9195 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9196 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9199 if (GET_CODE (operands[3]) == NE)
9201 if (which_alternative != 1)
9202 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9203 if (which_alternative != 0)
9204 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9207 if (which_alternative != 0)
9208 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9209 if (which_alternative != 1)
9210 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9213 [(set_attr "conds" "use")
9214 (set_attr "insn" "mov")
9215 (set_attr "length" "4,4,8")]
9218 (define_insn "*cond_arith"
9219 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9220 (match_operator:SI 5 "shiftable_operator"
9221 [(match_operator:SI 4 "arm_comparison_operator"
9222 [(match_operand:SI 2 "s_register_operand" "r,r")
9223 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9224 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9225 (clobber (reg:CC CC_REGNUM))]
9228 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9229 return \"%i5\\t%0, %1, %2, lsr #31\";
9231 output_asm_insn (\"cmp\\t%2, %3\", operands);
9232 if (GET_CODE (operands[5]) == AND)
9233 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9234 else if (GET_CODE (operands[5]) == MINUS)
9235 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9236 else if (which_alternative != 0)
9237 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9238 return \"%i5%d4\\t%0, %1, #1\";
9240 [(set_attr "conds" "clob")
9241 (set_attr "length" "12")]
9244 (define_insn "*cond_sub"
9245 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9246 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9247 (match_operator:SI 4 "arm_comparison_operator"
9248 [(match_operand:SI 2 "s_register_operand" "r,r")
9249 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9250 (clobber (reg:CC CC_REGNUM))]
9253 output_asm_insn (\"cmp\\t%2, %3\", operands);
9254 if (which_alternative != 0)
9255 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9256 return \"sub%d4\\t%0, %1, #1\";
9258 [(set_attr "conds" "clob")
9259 (set_attr "length" "8,12")]
9262 (define_insn "*cmp_ite0"
9263 [(set (match_operand 6 "dominant_cc_register" "")
9266 (match_operator 4 "arm_comparison_operator"
9267 [(match_operand:SI 0 "s_register_operand"
9268 "l,l,l,r,r,r,r,r,r")
9269 (match_operand:SI 1 "arm_add_operand"
9270 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9271 (match_operator:SI 5 "arm_comparison_operator"
9272 [(match_operand:SI 2 "s_register_operand"
9273 "l,r,r,l,l,r,r,r,r")
9274 (match_operand:SI 3 "arm_add_operand"
9275 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9281 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9283 {\"cmp%d5\\t%0, %1\",
9284 \"cmp%d4\\t%2, %3\"},
9285 {\"cmn%d5\\t%0, #%n1\",
9286 \"cmp%d4\\t%2, %3\"},
9287 {\"cmp%d5\\t%0, %1\",
9288 \"cmn%d4\\t%2, #%n3\"},
9289 {\"cmn%d5\\t%0, #%n1\",
9290 \"cmn%d4\\t%2, #%n3\"}
9292 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9297 \"cmn\\t%0, #%n1\"},
9298 {\"cmn\\t%2, #%n3\",
9300 {\"cmn\\t%2, #%n3\",
9303 static const char * const ite[2] =
9308 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9309 CMP_CMP, CMN_CMP, CMP_CMP,
9310 CMN_CMP, CMP_CMN, CMN_CMN};
9312 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9314 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9315 if (TARGET_THUMB2) {
9316 output_asm_insn (ite[swap], operands);
9318 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9321 [(set_attr "conds" "set")
9322 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9323 (set_attr_alternative "length"
9329 (if_then_else (eq_attr "is_thumb" "no")
9332 (if_then_else (eq_attr "is_thumb" "no")
9335 (if_then_else (eq_attr "is_thumb" "no")
9338 (if_then_else (eq_attr "is_thumb" "no")
9343 (define_insn "*cmp_ite1"
9344 [(set (match_operand 6 "dominant_cc_register" "")
9347 (match_operator 4 "arm_comparison_operator"
9348 [(match_operand:SI 0 "s_register_operand"
9349 "l,l,l,r,r,r,r,r,r")
9350 (match_operand:SI 1 "arm_add_operand"
9351 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9352 (match_operator:SI 5 "arm_comparison_operator"
9353 [(match_operand:SI 2 "s_register_operand"
9354 "l,r,r,l,l,r,r,r,r")
9355 (match_operand:SI 3 "arm_add_operand"
9356 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9362 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9366 {\"cmn\\t%0, #%n1\",
9369 \"cmn\\t%2, #%n3\"},
9370 {\"cmn\\t%0, #%n1\",
9373 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9375 {\"cmp%d4\\t%2, %3\",
9376 \"cmp%D5\\t%0, %1\"},
9377 {\"cmp%d4\\t%2, %3\",
9378 \"cmn%D5\\t%0, #%n1\"},
9379 {\"cmn%d4\\t%2, #%n3\",
9380 \"cmp%D5\\t%0, %1\"},
9381 {\"cmn%d4\\t%2, #%n3\",
9382 \"cmn%D5\\t%0, #%n1\"}
9384 static const char * const ite[2] =
9389 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9390 CMP_CMP, CMN_CMP, CMP_CMP,
9391 CMN_CMP, CMP_CMN, CMN_CMN};
9393 comparison_dominates_p (GET_CODE (operands[5]),
9394 reverse_condition (GET_CODE (operands[4])));
9396 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9397 if (TARGET_THUMB2) {
9398 output_asm_insn (ite[swap], operands);
9400 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9403 [(set_attr "conds" "set")
9404 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9405 (set_attr_alternative "length"
9411 (if_then_else (eq_attr "is_thumb" "no")
9414 (if_then_else (eq_attr "is_thumb" "no")
9417 (if_then_else (eq_attr "is_thumb" "no")
9420 (if_then_else (eq_attr "is_thumb" "no")
9425 (define_insn "*cmp_and"
9426 [(set (match_operand 6 "dominant_cc_register" "")
9429 (match_operator 4 "arm_comparison_operator"
9430 [(match_operand:SI 0 "s_register_operand"
9431 "l,l,l,r,r,r,r,r,r")
9432 (match_operand:SI 1 "arm_add_operand"
9433 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9434 (match_operator:SI 5 "arm_comparison_operator"
9435 [(match_operand:SI 2 "s_register_operand"
9436 "l,r,r,l,l,r,r,r,r")
9437 (match_operand:SI 3 "arm_add_operand"
9438 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9443 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9445 {\"cmp%d5\\t%0, %1\",
9446 \"cmp%d4\\t%2, %3\"},
9447 {\"cmn%d5\\t%0, #%n1\",
9448 \"cmp%d4\\t%2, %3\"},
9449 {\"cmp%d5\\t%0, %1\",
9450 \"cmn%d4\\t%2, #%n3\"},
9451 {\"cmn%d5\\t%0, #%n1\",
9452 \"cmn%d4\\t%2, #%n3\"}
9454 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9459 \"cmn\\t%0, #%n1\"},
9460 {\"cmn\\t%2, #%n3\",
9462 {\"cmn\\t%2, #%n3\",
9465 static const char *const ite[2] =
9470 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9471 CMP_CMP, CMN_CMP, CMP_CMP,
9472 CMN_CMP, CMP_CMN, CMN_CMN};
9474 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9476 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9477 if (TARGET_THUMB2) {
9478 output_asm_insn (ite[swap], operands);
9480 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9483 [(set_attr "conds" "set")
9484 (set_attr "predicable" "no")
9485 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9486 (set_attr_alternative "length"
9492 (if_then_else (eq_attr "is_thumb" "no")
9495 (if_then_else (eq_attr "is_thumb" "no")
9498 (if_then_else (eq_attr "is_thumb" "no")
9501 (if_then_else (eq_attr "is_thumb" "no")
9506 (define_insn "*cmp_ior"
9507 [(set (match_operand 6 "dominant_cc_register" "")
9510 (match_operator 4 "arm_comparison_operator"
9511 [(match_operand:SI 0 "s_register_operand"
9512 "l,l,l,r,r,r,r,r,r")
9513 (match_operand:SI 1 "arm_add_operand"
9514 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9515 (match_operator:SI 5 "arm_comparison_operator"
9516 [(match_operand:SI 2 "s_register_operand"
9517 "l,r,r,l,l,r,r,r,r")
9518 (match_operand:SI 3 "arm_add_operand"
9519 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9524 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9528 {\"cmn\\t%0, #%n1\",
9531 \"cmn\\t%2, #%n3\"},
9532 {\"cmn\\t%0, #%n1\",
9535 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9537 {\"cmp%D4\\t%2, %3\",
9538 \"cmp%D5\\t%0, %1\"},
9539 {\"cmp%D4\\t%2, %3\",
9540 \"cmn%D5\\t%0, #%n1\"},
9541 {\"cmn%D4\\t%2, #%n3\",
9542 \"cmp%D5\\t%0, %1\"},
9543 {\"cmn%D4\\t%2, #%n3\",
9544 \"cmn%D5\\t%0, #%n1\"}
9546 static const char *const ite[2] =
9551 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9552 CMP_CMP, CMN_CMP, CMP_CMP,
9553 CMN_CMP, CMP_CMN, CMN_CMN};
9555 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9557 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9558 if (TARGET_THUMB2) {
9559 output_asm_insn (ite[swap], operands);
9561 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9565 [(set_attr "conds" "set")
9566 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9567 (set_attr_alternative "length"
9573 (if_then_else (eq_attr "is_thumb" "no")
9576 (if_then_else (eq_attr "is_thumb" "no")
9579 (if_then_else (eq_attr "is_thumb" "no")
9582 (if_then_else (eq_attr "is_thumb" "no")
9587 (define_insn_and_split "*ior_scc_scc"
9588 [(set (match_operand:SI 0 "s_register_operand" "=r")
9589 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9590 [(match_operand:SI 1 "s_register_operand" "r")
9591 (match_operand:SI 2 "arm_add_operand" "rIL")])
9592 (match_operator:SI 6 "arm_comparison_operator"
9593 [(match_operand:SI 4 "s_register_operand" "r")
9594 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9595 (clobber (reg:CC CC_REGNUM))]
9597 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9600 "TARGET_32BIT && reload_completed"
9604 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9605 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9607 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9609 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9612 [(set_attr "conds" "clob")
9613 (set_attr "length" "16")])
9615 ; If the above pattern is followed by a CMP insn, then the compare is
9616 ; redundant, since we can rework the conditional instruction that follows.
9617 (define_insn_and_split "*ior_scc_scc_cmp"
9618 [(set (match_operand 0 "dominant_cc_register" "")
9619 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9620 [(match_operand:SI 1 "s_register_operand" "r")
9621 (match_operand:SI 2 "arm_add_operand" "rIL")])
9622 (match_operator:SI 6 "arm_comparison_operator"
9623 [(match_operand:SI 4 "s_register_operand" "r")
9624 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9626 (set (match_operand:SI 7 "s_register_operand" "=r")
9627 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9628 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9631 "TARGET_32BIT && reload_completed"
9635 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9636 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9638 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9640 [(set_attr "conds" "set")
9641 (set_attr "length" "16")])
9643 (define_insn_and_split "*and_scc_scc"
9644 [(set (match_operand:SI 0 "s_register_operand" "=r")
9645 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9646 [(match_operand:SI 1 "s_register_operand" "r")
9647 (match_operand:SI 2 "arm_add_operand" "rIL")])
9648 (match_operator:SI 6 "arm_comparison_operator"
9649 [(match_operand:SI 4 "s_register_operand" "r")
9650 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9651 (clobber (reg:CC CC_REGNUM))]
9653 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9656 "TARGET_32BIT && reload_completed
9657 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9662 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9663 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9665 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9667 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9670 [(set_attr "conds" "clob")
9671 (set_attr "length" "16")])
9673 ; If the above pattern is followed by a CMP insn, then the compare is
9674 ; redundant, since we can rework the conditional instruction that follows.
9675 (define_insn_and_split "*and_scc_scc_cmp"
9676 [(set (match_operand 0 "dominant_cc_register" "")
9677 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9678 [(match_operand:SI 1 "s_register_operand" "r")
9679 (match_operand:SI 2 "arm_add_operand" "rIL")])
9680 (match_operator:SI 6 "arm_comparison_operator"
9681 [(match_operand:SI 4 "s_register_operand" "r")
9682 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9684 (set (match_operand:SI 7 "s_register_operand" "=r")
9685 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9686 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9689 "TARGET_32BIT && reload_completed"
9693 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9694 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9696 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9698 [(set_attr "conds" "set")
9699 (set_attr "length" "16")])
9701 ;; If there is no dominance in the comparison, then we can still save an
9702 ;; instruction in the AND case, since we can know that the second compare
9703 ;; need only zero the value if false (if true, then the value is already
9705 (define_insn_and_split "*and_scc_scc_nodom"
9706 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9707 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9708 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9709 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9710 (match_operator:SI 6 "arm_comparison_operator"
9711 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9712 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9713 (clobber (reg:CC CC_REGNUM))]
9715 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9718 "TARGET_32BIT && reload_completed"
9719 [(parallel [(set (match_dup 0)
9720 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9721 (clobber (reg:CC CC_REGNUM))])
9722 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9724 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9727 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9728 operands[4], operands[5]),
9730 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9732 [(set_attr "conds" "clob")
9733 (set_attr "length" "20")])
9736 [(set (reg:CC_NOOV CC_REGNUM)
9737 (compare:CC_NOOV (ior:SI
9738 (and:SI (match_operand:SI 0 "s_register_operand" "")
9740 (match_operator:SI 1 "arm_comparison_operator"
9741 [(match_operand:SI 2 "s_register_operand" "")
9742 (match_operand:SI 3 "arm_add_operand" "")]))
9744 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9747 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9749 (set (reg:CC_NOOV CC_REGNUM)
9750 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9755 [(set (reg:CC_NOOV CC_REGNUM)
9756 (compare:CC_NOOV (ior:SI
9757 (match_operator:SI 1 "arm_comparison_operator"
9758 [(match_operand:SI 2 "s_register_operand" "")
9759 (match_operand:SI 3 "arm_add_operand" "")])
9760 (and:SI (match_operand:SI 0 "s_register_operand" "")
9763 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9766 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9768 (set (reg:CC_NOOV CC_REGNUM)
9769 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9772 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9774 (define_insn "*negscc"
9775 [(set (match_operand:SI 0 "s_register_operand" "=r")
9776 (neg:SI (match_operator 3 "arm_comparison_operator"
9777 [(match_operand:SI 1 "s_register_operand" "r")
9778 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9779 (clobber (reg:CC CC_REGNUM))]
9782 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9783 return \"mov\\t%0, %1, asr #31\";
9785 if (GET_CODE (operands[3]) == NE)
9786 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9788 output_asm_insn (\"cmp\\t%1, %2\", operands);
9789 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9790 return \"mvn%d3\\t%0, #0\";
9792 [(set_attr "conds" "clob")
9793 (set_attr "length" "12")]
9796 (define_insn "movcond"
9797 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9799 (match_operator 5 "arm_comparison_operator"
9800 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9801 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9802 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9803 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9804 (clobber (reg:CC CC_REGNUM))]
9807 if (GET_CODE (operands[5]) == LT
9808 && (operands[4] == const0_rtx))
9810 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9812 if (operands[2] == const0_rtx)
9813 return \"and\\t%0, %1, %3, asr #31\";
9814 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9816 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9818 if (operands[1] == const0_rtx)
9819 return \"bic\\t%0, %2, %3, asr #31\";
9820 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9822 /* The only case that falls through to here is when both ops 1 & 2
9826 if (GET_CODE (operands[5]) == GE
9827 && (operands[4] == const0_rtx))
9829 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9831 if (operands[2] == const0_rtx)
9832 return \"bic\\t%0, %1, %3, asr #31\";
9833 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9835 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9837 if (operands[1] == const0_rtx)
9838 return \"and\\t%0, %2, %3, asr #31\";
9839 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9841 /* The only case that falls through to here is when both ops 1 & 2
9844 if (GET_CODE (operands[4]) == CONST_INT
9845 && !const_ok_for_arm (INTVAL (operands[4])))
9846 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9848 output_asm_insn (\"cmp\\t%3, %4\", operands);
9849 if (which_alternative != 0)
9850 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9851 if (which_alternative != 1)
9852 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9855 [(set_attr "conds" "clob")
9856 (set_attr "length" "8,8,12")]
9859 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9861 (define_insn "*ifcompare_plus_move"
9862 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9863 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9864 [(match_operand:SI 4 "s_register_operand" "r,r")
9865 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9867 (match_operand:SI 2 "s_register_operand" "r,r")
9868 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9869 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9870 (clobber (reg:CC CC_REGNUM))]
9873 [(set_attr "conds" "clob")
9874 (set_attr "length" "8,12")]
9877 (define_insn "*if_plus_move"
9878 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9880 (match_operator 4 "arm_comparison_operator"
9881 [(match_operand 5 "cc_register" "") (const_int 0)])
9883 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9884 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9885 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9889 sub%d4\\t%0, %2, #%n3
9890 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9891 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9892 [(set_attr "conds" "use")
9893 (set_attr "length" "4,4,8,8")
9894 (set_attr "type" "*,*,*,*")]
9897 (define_insn "*ifcompare_move_plus"
9898 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9899 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9900 [(match_operand:SI 4 "s_register_operand" "r,r")
9901 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9902 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9904 (match_operand:SI 2 "s_register_operand" "r,r")
9905 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9906 (clobber (reg:CC CC_REGNUM))]
9909 [(set_attr "conds" "clob")
9910 (set_attr "length" "8,12")]
9913 (define_insn "*if_move_plus"
9914 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9916 (match_operator 4 "arm_comparison_operator"
9917 [(match_operand 5 "cc_register" "") (const_int 0)])
9918 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9920 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9921 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9925 sub%D4\\t%0, %2, #%n3
9926 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9927 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9928 [(set_attr "conds" "use")
9929 (set_attr "length" "4,4,8,8")
9930 (set_attr "type" "*,*,*,*")]
9933 (define_insn "*ifcompare_arith_arith"
9934 [(set (match_operand:SI 0 "s_register_operand" "=r")
9935 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9936 [(match_operand:SI 5 "s_register_operand" "r")
9937 (match_operand:SI 6 "arm_add_operand" "rIL")])
9938 (match_operator:SI 8 "shiftable_operator"
9939 [(match_operand:SI 1 "s_register_operand" "r")
9940 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9941 (match_operator:SI 7 "shiftable_operator"
9942 [(match_operand:SI 3 "s_register_operand" "r")
9943 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9944 (clobber (reg:CC CC_REGNUM))]
9947 [(set_attr "conds" "clob")
9948 (set_attr "length" "12")]
9951 (define_insn "*if_arith_arith"
9952 [(set (match_operand:SI 0 "s_register_operand" "=r")
9953 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9954 [(match_operand 8 "cc_register" "") (const_int 0)])
9955 (match_operator:SI 6 "shiftable_operator"
9956 [(match_operand:SI 1 "s_register_operand" "r")
9957 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9958 (match_operator:SI 7 "shiftable_operator"
9959 [(match_operand:SI 3 "s_register_operand" "r")
9960 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9962 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9963 [(set_attr "conds" "use")
9964 (set_attr "length" "8")]
9967 (define_insn "*ifcompare_arith_move"
9968 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9969 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9970 [(match_operand:SI 2 "s_register_operand" "r,r")
9971 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9972 (match_operator:SI 7 "shiftable_operator"
9973 [(match_operand:SI 4 "s_register_operand" "r,r")
9974 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9975 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9976 (clobber (reg:CC CC_REGNUM))]
9979 /* If we have an operation where (op x 0) is the identity operation and
9980 the conditional operator is LT or GE and we are comparing against zero and
9981 everything is in registers then we can do this in two instructions. */
9982 if (operands[3] == const0_rtx
9983 && GET_CODE (operands[7]) != AND
9984 && GET_CODE (operands[5]) == REG
9985 && GET_CODE (operands[1]) == REG
9986 && REGNO (operands[1]) == REGNO (operands[4])
9987 && REGNO (operands[4]) != REGNO (operands[0]))
9989 if (GET_CODE (operands[6]) == LT)
9990 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9991 else if (GET_CODE (operands[6]) == GE)
9992 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9994 if (GET_CODE (operands[3]) == CONST_INT
9995 && !const_ok_for_arm (INTVAL (operands[3])))
9996 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9998 output_asm_insn (\"cmp\\t%2, %3\", operands);
9999 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10000 if (which_alternative != 0)
10001 return \"mov%D6\\t%0, %1\";
10004 [(set_attr "conds" "clob")
10005 (set_attr "length" "8,12")]
10008 (define_insn "*if_arith_move"
10009 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10010 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10011 [(match_operand 6 "cc_register" "") (const_int 0)])
10012 (match_operator:SI 5 "shiftable_operator"
10013 [(match_operand:SI 2 "s_register_operand" "r,r")
10014 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10015 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10018 %I5%d4\\t%0, %2, %3
10019 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10020 [(set_attr "conds" "use")
10021 (set_attr "length" "4,8")
10022 (set_attr "type" "*,*")]
10025 (define_insn "*ifcompare_move_arith"
10026 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10027 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10028 [(match_operand:SI 4 "s_register_operand" "r,r")
10029 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10030 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10031 (match_operator:SI 7 "shiftable_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r,r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10034 (clobber (reg:CC CC_REGNUM))]
10037 /* If we have an operation where (op x 0) is the identity operation and
10038 the conditional operator is LT or GE and we are comparing against zero and
10039 everything is in registers then we can do this in two instructions */
10040 if (operands[5] == const0_rtx
10041 && GET_CODE (operands[7]) != AND
10042 && GET_CODE (operands[3]) == REG
10043 && GET_CODE (operands[1]) == REG
10044 && REGNO (operands[1]) == REGNO (operands[2])
10045 && REGNO (operands[2]) != REGNO (operands[0]))
10047 if (GET_CODE (operands[6]) == GE)
10048 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10049 else if (GET_CODE (operands[6]) == LT)
10050 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10053 if (GET_CODE (operands[5]) == CONST_INT
10054 && !const_ok_for_arm (INTVAL (operands[5])))
10055 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10057 output_asm_insn (\"cmp\\t%4, %5\", operands);
10059 if (which_alternative != 0)
10060 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10061 return \"%I7%D6\\t%0, %2, %3\";
10063 [(set_attr "conds" "clob")
10064 (set_attr "length" "8,12")]
10067 (define_insn "*if_move_arith"
10068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10070 (match_operator 4 "arm_comparison_operator"
10071 [(match_operand 6 "cc_register" "") (const_int 0)])
10072 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10073 (match_operator:SI 5 "shiftable_operator"
10074 [(match_operand:SI 2 "s_register_operand" "r,r")
10075 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10078 %I5%D4\\t%0, %2, %3
10079 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10080 [(set_attr "conds" "use")
10081 (set_attr "length" "4,8")
10082 (set_attr "type" "*,*")]
10085 (define_insn "*ifcompare_move_not"
10086 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10088 (match_operator 5 "arm_comparison_operator"
10089 [(match_operand:SI 3 "s_register_operand" "r,r")
10090 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10091 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10093 (match_operand:SI 2 "s_register_operand" "r,r"))))
10094 (clobber (reg:CC CC_REGNUM))]
10097 [(set_attr "conds" "clob")
10098 (set_attr "length" "8,12")]
10101 (define_insn "*if_move_not"
10102 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10104 (match_operator 4 "arm_comparison_operator"
10105 [(match_operand 3 "cc_register" "") (const_int 0)])
10106 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10107 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10111 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10112 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10113 [(set_attr "conds" "use")
10114 (set_attr "insn" "mvn")
10115 (set_attr "length" "4,8,8")]
10118 (define_insn "*ifcompare_not_move"
10119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10121 (match_operator 5 "arm_comparison_operator"
10122 [(match_operand:SI 3 "s_register_operand" "r,r")
10123 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10125 (match_operand:SI 2 "s_register_operand" "r,r"))
10126 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10127 (clobber (reg:CC CC_REGNUM))]
10130 [(set_attr "conds" "clob")
10131 (set_attr "length" "8,12")]
10134 (define_insn "*if_not_move"
10135 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10137 (match_operator 4 "arm_comparison_operator"
10138 [(match_operand 3 "cc_register" "") (const_int 0)])
10139 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10140 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10144 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10145 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10146 [(set_attr "conds" "use")
10147 (set_attr "insn" "mvn")
10148 (set_attr "length" "4,8,8")]
10151 (define_insn "*ifcompare_shift_move"
10152 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10154 (match_operator 6 "arm_comparison_operator"
10155 [(match_operand:SI 4 "s_register_operand" "r,r")
10156 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10157 (match_operator:SI 7 "shift_operator"
10158 [(match_operand:SI 2 "s_register_operand" "r,r")
10159 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10160 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10161 (clobber (reg:CC CC_REGNUM))]
10164 [(set_attr "conds" "clob")
10165 (set_attr "length" "8,12")]
10168 (define_insn "*if_shift_move"
10169 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10171 (match_operator 5 "arm_comparison_operator"
10172 [(match_operand 6 "cc_register" "") (const_int 0)])
10173 (match_operator:SI 4 "shift_operator"
10174 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10175 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10176 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10180 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10181 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10182 [(set_attr "conds" "use")
10183 (set_attr "shift" "2")
10184 (set_attr "length" "4,8,8")
10185 (set_attr "insn" "mov")
10186 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10187 (const_string "alu_shift")
10188 (const_string "alu_shift_reg")))]
10191 (define_insn "*ifcompare_move_shift"
10192 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10194 (match_operator 6 "arm_comparison_operator"
10195 [(match_operand:SI 4 "s_register_operand" "r,r")
10196 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10197 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10198 (match_operator:SI 7 "shift_operator"
10199 [(match_operand:SI 2 "s_register_operand" "r,r")
10200 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10201 (clobber (reg:CC CC_REGNUM))]
10204 [(set_attr "conds" "clob")
10205 (set_attr "length" "8,12")]
10208 (define_insn "*if_move_shift"
10209 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10211 (match_operator 5 "arm_comparison_operator"
10212 [(match_operand 6 "cc_register" "") (const_int 0)])
10213 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10214 (match_operator:SI 4 "shift_operator"
10215 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10216 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10220 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10221 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10222 [(set_attr "conds" "use")
10223 (set_attr "shift" "2")
10224 (set_attr "length" "4,8,8")
10225 (set_attr "insn" "mov")
10226 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10227 (const_string "alu_shift")
10228 (const_string "alu_shift_reg")))]
10231 (define_insn "*ifcompare_shift_shift"
10232 [(set (match_operand:SI 0 "s_register_operand" "=r")
10234 (match_operator 7 "arm_comparison_operator"
10235 [(match_operand:SI 5 "s_register_operand" "r")
10236 (match_operand:SI 6 "arm_add_operand" "rIL")])
10237 (match_operator:SI 8 "shift_operator"
10238 [(match_operand:SI 1 "s_register_operand" "r")
10239 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10240 (match_operator:SI 9 "shift_operator"
10241 [(match_operand:SI 3 "s_register_operand" "r")
10242 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10243 (clobber (reg:CC CC_REGNUM))]
10246 [(set_attr "conds" "clob")
10247 (set_attr "length" "12")]
10250 (define_insn "*if_shift_shift"
10251 [(set (match_operand:SI 0 "s_register_operand" "=r")
10253 (match_operator 5 "arm_comparison_operator"
10254 [(match_operand 8 "cc_register" "") (const_int 0)])
10255 (match_operator:SI 6 "shift_operator"
10256 [(match_operand:SI 1 "s_register_operand" "r")
10257 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10258 (match_operator:SI 7 "shift_operator"
10259 [(match_operand:SI 3 "s_register_operand" "r")
10260 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10262 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10263 [(set_attr "conds" "use")
10264 (set_attr "shift" "1")
10265 (set_attr "length" "8")
10266 (set_attr "insn" "mov")
10267 (set (attr "type") (if_then_else
10268 (and (match_operand 2 "const_int_operand" "")
10269 (match_operand 4 "const_int_operand" ""))
10270 (const_string "alu_shift")
10271 (const_string "alu_shift_reg")))]
10274 (define_insn "*ifcompare_not_arith"
10275 [(set (match_operand:SI 0 "s_register_operand" "=r")
10277 (match_operator 6 "arm_comparison_operator"
10278 [(match_operand:SI 4 "s_register_operand" "r")
10279 (match_operand:SI 5 "arm_add_operand" "rIL")])
10280 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10281 (match_operator:SI 7 "shiftable_operator"
10282 [(match_operand:SI 2 "s_register_operand" "r")
10283 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10284 (clobber (reg:CC CC_REGNUM))]
10287 [(set_attr "conds" "clob")
10288 (set_attr "length" "12")]
10291 (define_insn "*if_not_arith"
10292 [(set (match_operand:SI 0 "s_register_operand" "=r")
10294 (match_operator 5 "arm_comparison_operator"
10295 [(match_operand 4 "cc_register" "") (const_int 0)])
10296 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10297 (match_operator:SI 6 "shiftable_operator"
10298 [(match_operand:SI 2 "s_register_operand" "r")
10299 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10301 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10302 [(set_attr "conds" "use")
10303 (set_attr "insn" "mvn")
10304 (set_attr "length" "8")]
10307 (define_insn "*ifcompare_arith_not"
10308 [(set (match_operand:SI 0 "s_register_operand" "=r")
10310 (match_operator 6 "arm_comparison_operator"
10311 [(match_operand:SI 4 "s_register_operand" "r")
10312 (match_operand:SI 5 "arm_add_operand" "rIL")])
10313 (match_operator:SI 7 "shiftable_operator"
10314 [(match_operand:SI 2 "s_register_operand" "r")
10315 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10316 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10317 (clobber (reg:CC CC_REGNUM))]
10320 [(set_attr "conds" "clob")
10321 (set_attr "length" "12")]
10324 (define_insn "*if_arith_not"
10325 [(set (match_operand:SI 0 "s_register_operand" "=r")
10327 (match_operator 5 "arm_comparison_operator"
10328 [(match_operand 4 "cc_register" "") (const_int 0)])
10329 (match_operator:SI 6 "shiftable_operator"
10330 [(match_operand:SI 2 "s_register_operand" "r")
10331 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10332 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10334 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10335 [(set_attr "conds" "use")
10336 (set_attr "insn" "mvn")
10337 (set_attr "length" "8")]
10340 (define_insn "*ifcompare_neg_move"
10341 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10343 (match_operator 5 "arm_comparison_operator"
10344 [(match_operand:SI 3 "s_register_operand" "r,r")
10345 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10346 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10347 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10348 (clobber (reg:CC CC_REGNUM))]
10351 [(set_attr "conds" "clob")
10352 (set_attr "length" "8,12")]
10355 (define_insn "*if_neg_move"
10356 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10358 (match_operator 4 "arm_comparison_operator"
10359 [(match_operand 3 "cc_register" "") (const_int 0)])
10360 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10361 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10364 rsb%d4\\t%0, %2, #0
10365 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10366 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10367 [(set_attr "conds" "use")
10368 (set_attr "length" "4,8,8")]
10371 (define_insn "*ifcompare_move_neg"
10372 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10374 (match_operator 5 "arm_comparison_operator"
10375 [(match_operand:SI 3 "s_register_operand" "r,r")
10376 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10377 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10378 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10379 (clobber (reg:CC CC_REGNUM))]
10382 [(set_attr "conds" "clob")
10383 (set_attr "length" "8,12")]
10386 (define_insn "*if_move_neg"
10387 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10389 (match_operator 4 "arm_comparison_operator"
10390 [(match_operand 3 "cc_register" "") (const_int 0)])
10391 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10392 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10395 rsb%D4\\t%0, %2, #0
10396 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10397 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10398 [(set_attr "conds" "use")
10399 (set_attr "length" "4,8,8")]
10402 (define_insn "*arith_adjacentmem"
10403 [(set (match_operand:SI 0 "s_register_operand" "=r")
10404 (match_operator:SI 1 "shiftable_operator"
10405 [(match_operand:SI 2 "memory_operand" "m")
10406 (match_operand:SI 3 "memory_operand" "m")]))
10407 (clobber (match_scratch:SI 4 "=r"))]
10408 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10414 HOST_WIDE_INT val1 = 0, val2 = 0;
10416 if (REGNO (operands[0]) > REGNO (operands[4]))
10418 ldm[1] = operands[4];
10419 ldm[2] = operands[0];
10423 ldm[1] = operands[0];
10424 ldm[2] = operands[4];
10427 base_reg = XEXP (operands[2], 0);
10429 if (!REG_P (base_reg))
10431 val1 = INTVAL (XEXP (base_reg, 1));
10432 base_reg = XEXP (base_reg, 0);
10435 if (!REG_P (XEXP (operands[3], 0)))
10436 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10438 arith[0] = operands[0];
10439 arith[3] = operands[1];
10453 if (val1 !=0 && val2 != 0)
10457 if (val1 == 4 || val2 == 4)
10458 /* Other val must be 8, since we know they are adjacent and neither
10460 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10461 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10463 ldm[0] = ops[0] = operands[4];
10465 ops[2] = GEN_INT (val1);
10466 output_add_immediate (ops);
10468 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10470 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10474 /* Offset is out of range for a single add, so use two ldr. */
10477 ops[2] = GEN_INT (val1);
10478 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10480 ops[2] = GEN_INT (val2);
10481 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10484 else if (val1 != 0)
10487 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10489 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10494 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10496 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10498 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10501 [(set_attr "length" "12")
10502 (set_attr "predicable" "yes")
10503 (set_attr "type" "load1")]
10506 ; This pattern is never tried by combine, so do it as a peephole
10509 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10510 (match_operand:SI 1 "arm_general_register_operand" ""))
10511 (set (reg:CC CC_REGNUM)
10512 (compare:CC (match_dup 1) (const_int 0)))]
10514 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10515 (set (match_dup 0) (match_dup 1))])]
10520 [(set (match_operand:SI 0 "s_register_operand" "")
10521 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10523 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10524 [(match_operand:SI 3 "s_register_operand" "")
10525 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10526 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10528 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10529 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10534 ;; This split can be used because CC_Z mode implies that the following
10535 ;; branch will be an equality, or an unsigned inequality, so the sign
10536 ;; extension is not needed.
10539 [(set (reg:CC_Z CC_REGNUM)
10541 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10543 (match_operand 1 "const_int_operand" "")))
10544 (clobber (match_scratch:SI 2 ""))]
10546 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10547 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10548 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10549 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10551 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10554 ;; ??? Check the patterns above for Thumb-2 usefulness
10556 (define_expand "prologue"
10557 [(clobber (const_int 0))]
10560 arm_expand_prologue ();
10562 thumb1_expand_prologue ();
10567 (define_expand "epilogue"
10568 [(clobber (const_int 0))]
10571 if (crtl->calls_eh_return)
10572 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10574 thumb1_expand_epilogue ();
10575 else if (USE_RETURN_INSN (FALSE))
10577 emit_jump_insn (gen_return ());
10580 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10581 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10586 (define_insn "prologue_thumb1_interwork"
10587 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10589 "* return thumb1_output_interwork ();"
10590 [(set_attr "length" "8")]
10593 ;; Note - although unspec_volatile's USE all hard registers,
10594 ;; USEs are ignored after relaod has completed. Thus we need
10595 ;; to add an unspec of the link register to ensure that flow
10596 ;; does not think that it is unused by the sibcall branch that
10597 ;; will replace the standard function epilogue.
10598 (define_insn "sibcall_epilogue"
10599 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10600 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10603 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10604 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10605 return arm_output_epilogue (next_nonnote_insn (insn));
10607 ;; Length is absolute worst case
10608 [(set_attr "length" "44")
10609 (set_attr "type" "block")
10610 ;; We don't clobber the conditions, but the potential length of this
10611 ;; operation is sufficient to make conditionalizing the sequence
10612 ;; unlikely to be profitable.
10613 (set_attr "conds" "clob")]
10616 (define_insn "*epilogue_insns"
10617 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10621 return arm_output_epilogue (NULL);
10622 else /* TARGET_THUMB1 */
10623 return thumb_unexpanded_epilogue ();
10625 ; Length is absolute worst case
10626 [(set_attr "length" "44")
10627 (set_attr "type" "block")
10628 ;; We don't clobber the conditions, but the potential length of this
10629 ;; operation is sufficient to make conditionalizing the sequence
10630 ;; unlikely to be profitable.
10631 (set_attr "conds" "clob")]
10634 (define_expand "eh_epilogue"
10635 [(use (match_operand:SI 0 "register_operand" ""))
10636 (use (match_operand:SI 1 "register_operand" ""))
10637 (use (match_operand:SI 2 "register_operand" ""))]
10641 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10642 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10644 rtx ra = gen_rtx_REG (Pmode, 2);
10646 emit_move_insn (ra, operands[2]);
10649 /* This is a hack -- we may have crystalized the function type too
10651 cfun->machine->func_type = 0;
10655 ;; This split is only used during output to reduce the number of patterns
10656 ;; that need assembler instructions adding to them. We allowed the setting
10657 ;; of the conditions to be implicit during rtl generation so that
10658 ;; the conditional compare patterns would work. However this conflicts to
10659 ;; some extent with the conditional data operations, so we have to split them
10662 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10663 ;; conditional execution sufficient?
10666 [(set (match_operand:SI 0 "s_register_operand" "")
10667 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10668 [(match_operand 2 "" "") (match_operand 3 "" "")])
10670 (match_operand 4 "" "")))
10671 (clobber (reg:CC CC_REGNUM))]
10672 "TARGET_ARM && reload_completed"
10673 [(set (match_dup 5) (match_dup 6))
10674 (cond_exec (match_dup 7)
10675 (set (match_dup 0) (match_dup 4)))]
10678 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10679 operands[2], operands[3]);
10680 enum rtx_code rc = GET_CODE (operands[1]);
10682 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10683 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10684 if (mode == CCFPmode || mode == CCFPEmode)
10685 rc = reverse_condition_maybe_unordered (rc);
10687 rc = reverse_condition (rc);
10689 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10694 [(set (match_operand:SI 0 "s_register_operand" "")
10695 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10696 [(match_operand 2 "" "") (match_operand 3 "" "")])
10697 (match_operand 4 "" "")
10699 (clobber (reg:CC CC_REGNUM))]
10700 "TARGET_ARM && reload_completed"
10701 [(set (match_dup 5) (match_dup 6))
10702 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10703 (set (match_dup 0) (match_dup 4)))]
10706 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10707 operands[2], operands[3]);
10709 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10710 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10715 [(set (match_operand:SI 0 "s_register_operand" "")
10716 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10717 [(match_operand 2 "" "") (match_operand 3 "" "")])
10718 (match_operand 4 "" "")
10719 (match_operand 5 "" "")))
10720 (clobber (reg:CC CC_REGNUM))]
10721 "TARGET_ARM && reload_completed"
10722 [(set (match_dup 6) (match_dup 7))
10723 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10724 (set (match_dup 0) (match_dup 4)))
10725 (cond_exec (match_dup 8)
10726 (set (match_dup 0) (match_dup 5)))]
10729 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10730 operands[2], operands[3]);
10731 enum rtx_code rc = GET_CODE (operands[1]);
10733 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10734 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10735 if (mode == CCFPmode || mode == CCFPEmode)
10736 rc = reverse_condition_maybe_unordered (rc);
10738 rc = reverse_condition (rc);
10740 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10745 [(set (match_operand:SI 0 "s_register_operand" "")
10746 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10747 [(match_operand:SI 2 "s_register_operand" "")
10748 (match_operand:SI 3 "arm_add_operand" "")])
10749 (match_operand:SI 4 "arm_rhs_operand" "")
10751 (match_operand:SI 5 "s_register_operand" ""))))
10752 (clobber (reg:CC CC_REGNUM))]
10753 "TARGET_ARM && reload_completed"
10754 [(set (match_dup 6) (match_dup 7))
10755 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10756 (set (match_dup 0) (match_dup 4)))
10757 (cond_exec (match_dup 8)
10758 (set (match_dup 0) (not:SI (match_dup 5))))]
10761 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10762 operands[2], operands[3]);
10763 enum rtx_code rc = GET_CODE (operands[1]);
10765 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10766 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10767 if (mode == CCFPmode || mode == CCFPEmode)
10768 rc = reverse_condition_maybe_unordered (rc);
10770 rc = reverse_condition (rc);
10772 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10776 (define_insn "*cond_move_not"
10777 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10778 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10779 [(match_operand 3 "cc_register" "") (const_int 0)])
10780 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10782 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10786 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10787 [(set_attr "conds" "use")
10788 (set_attr "insn" "mvn")
10789 (set_attr "length" "4,8")]
10792 ;; The next two patterns occur when an AND operation is followed by a
10793 ;; scc insn sequence
10795 (define_insn "*sign_extract_onebit"
10796 [(set (match_operand:SI 0 "s_register_operand" "=r")
10797 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10799 (match_operand:SI 2 "const_int_operand" "n")))
10800 (clobber (reg:CC CC_REGNUM))]
10803 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10804 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10805 return \"mvnne\\t%0, #0\";
10807 [(set_attr "conds" "clob")
10808 (set_attr "length" "8")]
10811 (define_insn "*not_signextract_onebit"
10812 [(set (match_operand:SI 0 "s_register_operand" "=r")
10814 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10816 (match_operand:SI 2 "const_int_operand" "n"))))
10817 (clobber (reg:CC CC_REGNUM))]
10820 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10821 output_asm_insn (\"tst\\t%1, %2\", operands);
10822 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10823 return \"movne\\t%0, #0\";
10825 [(set_attr "conds" "clob")
10826 (set_attr "length" "12")]
10828 ;; ??? The above patterns need auditing for Thumb-2
10830 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10831 ;; expressions. For simplicity, the first register is also in the unspec
10833 ;; To avoid the usage of GNU extension, the length attribute is computed
10834 ;; in a C function arm_attr_length_push_multi.
10835 (define_insn "*push_multi"
10836 [(match_parallel 2 "multi_register_push"
10837 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10838 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10839 UNSPEC_PUSH_MULT))])]
10843 int num_saves = XVECLEN (operands[2], 0);
10845 /* For the StrongARM at least it is faster to
10846 use STR to store only a single register.
10847 In Thumb mode always use push, and the assembler will pick
10848 something appropriate. */
10849 if (num_saves == 1 && TARGET_ARM)
10850 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10857 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10858 else if (TARGET_THUMB2)
10859 strcpy (pattern, \"push%?\\t{%1\");
10861 strcpy (pattern, \"push\\t{%1\");
10863 for (i = 1; i < num_saves; i++)
10865 strcat (pattern, \", %|\");
10867 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10870 strcat (pattern, \"}\");
10871 output_asm_insn (pattern, operands);
10876 [(set_attr "type" "store4")
10877 (set (attr "length")
10878 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10881 (define_insn "stack_tie"
10882 [(set (mem:BLK (scratch))
10883 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10884 (match_operand:SI 1 "s_register_operand" "rk")]
10888 [(set_attr "length" "0")]
10891 ;; Similarly for the floating point registers
10892 (define_insn "*push_fp_multi"
10893 [(match_parallel 2 "multi_register_push"
10894 [(set (match_operand:BLK 0 "memory_operand" "=m")
10895 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10896 UNSPEC_PUSH_MULT))])]
10897 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10902 sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10903 output_asm_insn (pattern, operands);
10906 [(set_attr "type" "f_fpa_store")]
10909 ;; Special patterns for dealing with the constant pool
10911 (define_insn "align_4"
10912 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10915 assemble_align (32);
10920 (define_insn "align_8"
10921 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10924 assemble_align (64);
10929 (define_insn "consttable_end"
10930 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10933 making_const_table = FALSE;
10938 (define_insn "consttable_1"
10939 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10942 making_const_table = TRUE;
10943 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10944 assemble_zeros (3);
10947 [(set_attr "length" "4")]
10950 (define_insn "consttable_2"
10951 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10954 making_const_table = TRUE;
10955 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10956 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10957 assemble_zeros (2);
10960 [(set_attr "length" "4")]
10963 (define_insn "consttable_4"
10964 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10968 rtx x = operands[0];
10969 making_const_table = TRUE;
10970 switch (GET_MODE_CLASS (GET_MODE (x)))
10973 if (GET_MODE (x) == HFmode)
10974 arm_emit_fp16_const (x);
10978 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10979 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10983 /* XXX: Sometimes gcc does something really dumb and ends up with
10984 a HIGH in a constant pool entry, usually because it's trying to
10985 load into a VFP register. We know this will always be used in
10986 combination with a LO_SUM which ignores the high bits, so just
10987 strip off the HIGH. */
10988 if (GET_CODE (x) == HIGH)
10990 assemble_integer (x, 4, BITS_PER_WORD, 1);
10991 mark_symbol_refs_as_used (x);
10996 [(set_attr "length" "4")]
10999 (define_insn "consttable_8"
11000 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11004 making_const_table = TRUE;
11005 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11010 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11011 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11015 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11020 [(set_attr "length" "8")]
11023 (define_insn "consttable_16"
11024 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11028 making_const_table = TRUE;
11029 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11034 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11035 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11039 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11044 [(set_attr "length" "16")]
11047 ;; Miscellaneous Thumb patterns
11049 (define_expand "tablejump"
11050 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11051 (use (label_ref (match_operand 1 "" "")))])]
11056 /* Hopefully, CSE will eliminate this copy. */
11057 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11058 rtx reg2 = gen_reg_rtx (SImode);
11060 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11061 operands[0] = reg2;
11066 ;; NB never uses BX.
11067 (define_insn "*thumb1_tablejump"
11068 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11069 (use (label_ref (match_operand 1 "" "")))]
11072 [(set_attr "length" "2")]
11075 ;; V5 Instructions,
11077 (define_insn "clzsi2"
11078 [(set (match_operand:SI 0 "s_register_operand" "=r")
11079 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11080 "TARGET_32BIT && arm_arch5"
11082 [(set_attr "predicable" "yes")
11083 (set_attr "insn" "clz")])
11085 (define_insn "rbitsi2"
11086 [(set (match_operand:SI 0 "s_register_operand" "=r")
11087 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11088 "TARGET_32BIT && arm_arch_thumb2"
11090 [(set_attr "predicable" "yes")
11091 (set_attr "insn" "clz")])
11093 (define_expand "ctzsi2"
11094 [(set (match_operand:SI 0 "s_register_operand" "")
11095 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11096 "TARGET_32BIT && arm_arch_thumb2"
11099 rtx tmp = gen_reg_rtx (SImode);
11100 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11101 emit_insn (gen_clzsi2 (operands[0], tmp));
11107 ;; V5E instructions.
11109 (define_insn "prefetch"
11110 [(prefetch (match_operand:SI 0 "address_operand" "p")
11111 (match_operand:SI 1 "" "")
11112 (match_operand:SI 2 "" ""))]
11113 "TARGET_32BIT && arm_arch5e"
11116 ;; General predication pattern
11119 [(match_operator 0 "arm_comparison_operator"
11120 [(match_operand 1 "cc_register" "")
11126 (define_insn "prologue_use"
11127 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11129 "%@ %0 needed for prologue"
11130 [(set_attr "length" "0")]
11134 ;; Patterns for exception handling
11136 (define_expand "eh_return"
11137 [(use (match_operand 0 "general_operand" ""))]
11142 emit_insn (gen_arm_eh_return (operands[0]));
11144 emit_insn (gen_thumb_eh_return (operands[0]));
11149 ;; We can't expand this before we know where the link register is stored.
11150 (define_insn_and_split "arm_eh_return"
11151 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11153 (clobber (match_scratch:SI 1 "=&r"))]
11156 "&& reload_completed"
11160 arm_set_return_address (operands[0], operands[1]);
11165 (define_insn_and_split "thumb_eh_return"
11166 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11168 (clobber (match_scratch:SI 1 "=&l"))]
11171 "&& reload_completed"
11175 thumb_set_return_address (operands[0], operands[1]);
11183 (define_insn "load_tp_hard"
11184 [(set (match_operand:SI 0 "register_operand" "=r")
11185 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11187 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11188 [(set_attr "predicable" "yes")]
11191 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11192 (define_insn "load_tp_soft"
11193 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11194 (clobber (reg:SI LR_REGNUM))
11195 (clobber (reg:SI IP_REGNUM))
11196 (clobber (reg:CC CC_REGNUM))]
11198 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11199 [(set_attr "conds" "clob")]
11202 ;; tls descriptor call
11203 (define_insn "tlscall"
11204 [(set (reg:SI R0_REGNUM)
11205 (unspec:SI [(reg:SI R0_REGNUM)
11206 (match_operand:SI 0 "" "X")
11207 (match_operand 1 "" "")] UNSPEC_TLS))
11208 (clobber (reg:SI R1_REGNUM))
11209 (clobber (reg:SI LR_REGNUM))
11210 (clobber (reg:SI CC_REGNUM))]
11213 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11214 INTVAL (operands[1]));
11215 return "bl\\t%c0(tlscall)";
11217 [(set_attr "conds" "clob")
11218 (set_attr "length" "4")]
11223 ;; We only care about the lower 16 bits of the constant
11224 ;; being inserted into the upper 16 bits of the register.
11225 (define_insn "*arm_movtas_ze"
11226 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11229 (match_operand:SI 1 "const_int_operand" ""))]
11232 [(set_attr "predicable" "yes")
11233 (set_attr "length" "4")]
11236 (define_insn "*arm_rev"
11237 [(set (match_operand:SI 0 "s_register_operand" "=r")
11238 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11239 "TARGET_32BIT && arm_arch6"
11241 [(set_attr "predicable" "yes")
11242 (set_attr "length" "4")]
11245 (define_insn "*thumb1_rev"
11246 [(set (match_operand:SI 0 "s_register_operand" "=l")
11247 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11248 "TARGET_THUMB1 && arm_arch6"
11250 [(set_attr "length" "2")]
11253 (define_expand "arm_legacy_rev"
11254 [(set (match_operand:SI 2 "s_register_operand" "")
11255 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11259 (lshiftrt:SI (match_dup 2)
11261 (set (match_operand:SI 3 "s_register_operand" "")
11262 (rotatert:SI (match_dup 1)
11265 (and:SI (match_dup 2)
11266 (const_int -65281)))
11267 (set (match_operand:SI 0 "s_register_operand" "")
11268 (xor:SI (match_dup 3)
11274 ;; Reuse temporaries to keep register pressure down.
11275 (define_expand "thumb_legacy_rev"
11276 [(set (match_operand:SI 2 "s_register_operand" "")
11277 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11279 (set (match_operand:SI 3 "s_register_operand" "")
11280 (lshiftrt:SI (match_dup 1)
11283 (ior:SI (match_dup 3)
11285 (set (match_operand:SI 4 "s_register_operand" "")
11287 (set (match_operand:SI 5 "s_register_operand" "")
11288 (rotatert:SI (match_dup 1)
11291 (ashift:SI (match_dup 5)
11294 (lshiftrt:SI (match_dup 5)
11297 (ior:SI (match_dup 5)
11300 (rotatert:SI (match_dup 5)
11302 (set (match_operand:SI 0 "s_register_operand" "")
11303 (ior:SI (match_dup 5)
11309 (define_expand "bswapsi2"
11310 [(set (match_operand:SI 0 "s_register_operand" "=r")
11311 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11312 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11316 rtx op2 = gen_reg_rtx (SImode);
11317 rtx op3 = gen_reg_rtx (SImode);
11321 rtx op4 = gen_reg_rtx (SImode);
11322 rtx op5 = gen_reg_rtx (SImode);
11324 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11325 op2, op3, op4, op5));
11329 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11338 ;; Load the load/store multiple patterns
11339 (include "ldmstm.md")
11340 ;; Load the FPA co-processor patterns
11342 ;; Load the Maverick co-processor patterns
11343 (include "cirrus.md")
11344 ;; Vector bits common to IWMMXT and Neon
11345 (include "vec-common.md")
11346 ;; Load the Intel Wireless Multimedia Extension patterns
11347 (include "iwmmxt.md")
11348 ;; Load the VFP co-processor patterns
11350 ;; Thumb-2 patterns
11351 (include "thumb2.md")
11353 (include "neon.md")
11354 ;; Synchronization Primitives
11355 (include "sync.md")
11356 ;; Fixed-point patterns
11357 (include "arm-fixed.md")