1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
109 ;; UNSPEC_VOLATILE Usage:
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
143 ;;---------------------------------------------------------------------------
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
151 ;; Operand number of an input operand that is shifted. Zero if the
152 ;; given instruction does not shift one of its input operands.
153 (define_attr "shift" "" (const_int 0))
155 ; Floating Point Unit. If we only have floating point emulation, then there
156 ; is no point in scheduling the floating point insns. (Well, for best
157 ; performance we should try and group them together).
158 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
159 (const (symbol_ref "arm_fpu_attr")))
161 ; LENGTH of an instruction (in bytes)
162 (define_attr "length" "" (const_int 4))
164 ; POOL_RANGE is how far away from a constant pool entry that this insn
165 ; can be placed. If the distance is zero, then this insn will never
166 ; reference the pool.
167 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
168 ; before its address.
169 (define_attr "pool_range" "" (const_int 0))
170 (define_attr "neg_pool_range" "" (const_int 0))
172 ; An assembler sequence may clobber the condition codes without us knowing.
173 ; If such an insn references the pool, then we have no way of knowing how,
174 ; so use the most conservative value for pool_range.
175 (define_asm_attributes
176 [(set_attr "conds" "clob")
177 (set_attr "length" "4")
178 (set_attr "pool_range" "250")])
180 ;; The instruction used to implement a particular pattern. This
181 ;; information is used by pipeline descriptions to provide accurate
182 ;; scheduling information.
185 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
186 (const_string "other"))
188 ; TYPE attribute is used to detect floating point instructions which, if
189 ; running on a co-processor can run in parallel with other, basic instructions
190 ; If write-buffer scheduling is enabled then it can also be used in the
191 ; scheduling of writes.
193 ; Classification of each insn
194 ; Note: vfp.md has different meanings for some of these, and some further
195 ; types as well. See that file for details.
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
250 ;; Classification of NEON instructions for scheduling purposes.
251 ;; Do not set this attribute and the "type" attribute together in
252 ;; any one instruction pattern.
253 (define_attr "neon_type"
264 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
265 neon_mul_qqq_8_16_32_ddd_32,\
266 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
267 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
269 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
270 neon_mla_qqq_32_qqd_32_scalar,\
271 neon_mul_ddd_16_scalar_32_16_long_scalar,\
272 neon_mul_qqd_32_scalar,\
273 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
278 neon_vqshl_vrshl_vqrshl_qqq,\
280 neon_fp_vadd_ddd_vabs_dd,\
281 neon_fp_vadd_qqq_vabs_qq,\
287 neon_fp_vmla_ddd_scalar,\
288 neon_fp_vmla_qqq_scalar,\
289 neon_fp_vrecps_vrsqrts_ddd,\
290 neon_fp_vrecps_vrsqrts_qqq,\
298 neon_vld2_2_regs_vld1_vld2_all_lanes,\
301 neon_vst1_1_2_regs_vst2_2_regs,\
303 neon_vst2_4_regs_vst3_vst4,\
305 neon_vld1_vld2_lane,\
306 neon_vld3_vld4_lane,\
307 neon_vst1_vst2_lane,\
308 neon_vst3_vst4_lane,\
309 neon_vld3_vld4_all_lanes,\
317 (const_string "none"))
319 ; condition codes: this one is used by final_prescan_insn to speed up
320 ; conditionalizing instructions. It saves having to scan the rtl to see if
321 ; it uses or alters the condition codes.
323 ; USE means that the condition codes are used by the insn in the process of
324 ; outputting code, this means (at present) that we can't use the insn in
327 ; SET means that the purpose of the insn is to set the condition codes in a
328 ; well defined manner.
330 ; CLOB means that the condition codes are altered in an undefined manner, if
331 ; they are altered at all
333 ; UNCONDITIONAL means the instions can not be conditionally executed.
335 ; NOCOND means that the condition codes are neither altered nor affect the
336 ; output of this insn
338 (define_attr "conds" "use,set,clob,unconditional,nocond"
339 (if_then_else (eq_attr "type" "call")
340 (const_string "clob")
341 (if_then_else (eq_attr "neon_type" "none")
342 (const_string "nocond")
343 (const_string "unconditional"))))
345 ; Predicable means that the insn can be conditionally executed based on
346 ; an automatically added predicate (additional patterns are generated by
347 ; gen...). We default to 'no' because no Thumb patterns match this rule
348 ; and not all ARM patterns do.
349 (define_attr "predicable" "no,yes" (const_string "no"))
351 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
352 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
353 ; suffer blockages enough to warrant modelling this (and it can adversely
354 ; affect the schedule).
355 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
357 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
358 ; to stall the processor. Used with model_wbuf above.
359 (define_attr "write_conflict" "no,yes"
360 (if_then_else (eq_attr "type"
361 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
363 (const_string "no")))
365 ; Classify the insns into those that take one cycle and those that take more
366 ; than one on the main cpu execution unit.
367 (define_attr "core_cycles" "single,multi"
368 (if_then_else (eq_attr "type"
369 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
370 (const_string "single")
371 (const_string "multi")))
373 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
374 ;; distant label. Only applicable to Thumb code.
375 (define_attr "far_jump" "yes,no" (const_string "no"))
378 ;; The number of machine instructions this pattern expands to.
379 ;; Used for Thumb-2 conditional execution.
380 (define_attr "ce_count" "" (const_int 1))
382 ;;---------------------------------------------------------------------------
385 ; A list of modes that are exactly 64 bits in size. We use this to expand
386 ; some splits that are the same for all modes when operating on ARM
388 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
390 ;; The integer modes up to word size
391 (define_mode_iterator QHSI [QI HI SI])
393 ;;---------------------------------------------------------------------------
396 (include "predicates.md")
397 (include "constraints.md")
399 ;;---------------------------------------------------------------------------
400 ;; Pipeline descriptions
402 ;; Processor type. This is created automatically from arm-cores.def.
403 (include "arm-tune.md")
405 (define_attr "tune_cortexr4" "yes,no"
407 (eq_attr "tune" "cortexr4,cortexr4f")
409 (const_string "no"))))
411 ;; True if the generic scheduling description should be used.
413 (define_attr "generic_sched" "yes,no"
415 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
416 (eq_attr "tune_cortexr4" "yes"))
418 (const_string "yes"))))
420 (define_attr "generic_vfp" "yes,no"
422 (and (eq_attr "fpu" "vfp")
423 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
424 (eq_attr "tune_cortexr4" "no"))
426 (const_string "no"))))
428 (include "arm-generic.md")
429 (include "arm926ejs.md")
430 (include "arm1020e.md")
431 (include "arm1026ejs.md")
432 (include "arm1136jfs.md")
433 (include "cortex-a8.md")
434 (include "cortex-a9.md")
435 (include "cortex-r4.md")
436 (include "cortex-r4f.md")
440 ;;---------------------------------------------------------------------------
445 ;; Note: For DImode insns, there is normally no reason why operands should
446 ;; not be in the same register, what we don't want is for something being
447 ;; written to partially overlap something that is an input.
448 ;; Cirrus 64bit additions should not be split because we have a native
449 ;; 64bit addition instructions.
451 (define_expand "adddi3"
453 [(set (match_operand:DI 0 "s_register_operand" "")
454 (plus:DI (match_operand:DI 1 "s_register_operand" "")
455 (match_operand:DI 2 "s_register_operand" "")))
456 (clobber (reg:CC CC_REGNUM))])]
459 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
461 if (!cirrus_fp_register (operands[0], DImode))
462 operands[0] = force_reg (DImode, operands[0]);
463 if (!cirrus_fp_register (operands[1], DImode))
464 operands[1] = force_reg (DImode, operands[1]);
465 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
471 if (GET_CODE (operands[1]) != REG)
472 operands[1] = force_reg (DImode, operands[1]);
473 if (GET_CODE (operands[2]) != REG)
474 operands[2] = force_reg (DImode, operands[2]);
479 (define_insn "*thumb1_adddi3"
480 [(set (match_operand:DI 0 "register_operand" "=l")
481 (plus:DI (match_operand:DI 1 "register_operand" "%0")
482 (match_operand:DI 2 "register_operand" "l")))
483 (clobber (reg:CC CC_REGNUM))
486 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
487 [(set_attr "length" "4")]
490 (define_insn_and_split "*arm_adddi3"
491 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
492 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
493 (match_operand:DI 2 "s_register_operand" "r, 0")))
494 (clobber (reg:CC CC_REGNUM))]
495 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
497 "TARGET_32BIT && reload_completed"
498 [(parallel [(set (reg:CC_C CC_REGNUM)
499 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
501 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
502 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
503 (plus:SI (match_dup 4) (match_dup 5))))]
506 operands[3] = gen_highpart (SImode, operands[0]);
507 operands[0] = gen_lowpart (SImode, operands[0]);
508 operands[4] = gen_highpart (SImode, operands[1]);
509 operands[1] = gen_lowpart (SImode, operands[1]);
510 operands[5] = gen_highpart (SImode, operands[2]);
511 operands[2] = gen_lowpart (SImode, operands[2]);
513 [(set_attr "conds" "clob")
514 (set_attr "length" "8")]
517 (define_insn_and_split "*adddi_sesidi_di"
518 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
519 (plus:DI (sign_extend:DI
520 (match_operand:SI 2 "s_register_operand" "r,r"))
521 (match_operand:DI 1 "s_register_operand" "0,r")))
522 (clobber (reg:CC CC_REGNUM))]
523 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
525 "TARGET_32BIT && reload_completed"
526 [(parallel [(set (reg:CC_C CC_REGNUM)
527 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
529 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
530 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
531 (plus:SI (ashiftrt:SI (match_dup 2)
536 operands[3] = gen_highpart (SImode, operands[0]);
537 operands[0] = gen_lowpart (SImode, operands[0]);
538 operands[4] = gen_highpart (SImode, operands[1]);
539 operands[1] = gen_lowpart (SImode, operands[1]);
540 operands[2] = gen_lowpart (SImode, operands[2]);
542 [(set_attr "conds" "clob")
543 (set_attr "length" "8")]
546 (define_insn_and_split "*adddi_zesidi_di"
547 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
548 (plus:DI (zero_extend:DI
549 (match_operand:SI 2 "s_register_operand" "r,r"))
550 (match_operand:DI 1 "s_register_operand" "0,r")))
551 (clobber (reg:CC CC_REGNUM))]
552 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
554 "TARGET_32BIT && reload_completed"
555 [(parallel [(set (reg:CC_C CC_REGNUM)
556 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
558 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
559 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
560 (plus:SI (match_dup 4) (const_int 0))))]
563 operands[3] = gen_highpart (SImode, operands[0]);
564 operands[0] = gen_lowpart (SImode, operands[0]);
565 operands[4] = gen_highpart (SImode, operands[1]);
566 operands[1] = gen_lowpart (SImode, operands[1]);
567 operands[2] = gen_lowpart (SImode, operands[2]);
569 [(set_attr "conds" "clob")
570 (set_attr "length" "8")]
573 (define_expand "addsi3"
574 [(set (match_operand:SI 0 "s_register_operand" "")
575 (plus:SI (match_operand:SI 1 "s_register_operand" "")
576 (match_operand:SI 2 "reg_or_int_operand" "")))]
579 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
581 arm_split_constant (PLUS, SImode, NULL_RTX,
582 INTVAL (operands[2]), operands[0], operands[1],
583 optimize && can_create_pseudo_p ());
589 ; If there is a scratch available, this will be faster than synthesizing the
592 [(match_scratch:SI 3 "r")
593 (set (match_operand:SI 0 "arm_general_register_operand" "")
594 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
595 (match_operand:SI 2 "const_int_operand" "")))]
597 !(const_ok_for_arm (INTVAL (operands[2]))
598 || const_ok_for_arm (-INTVAL (operands[2])))
599 && const_ok_for_arm (~INTVAL (operands[2]))"
600 [(set (match_dup 3) (match_dup 2))
601 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
605 ;; The r/r/k alternative is required when reloading the address
606 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
607 ;; put the duplicated register first, and not try the commutative version.
608 (define_insn_and_split "*arm_addsi3"
609 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
610 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
611 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
621 && GET_CODE (operands[2]) == CONST_INT
622 && !(const_ok_for_arm (INTVAL (operands[2]))
623 || const_ok_for_arm (-INTVAL (operands[2])))
624 && (reload_completed || !arm_eliminable_register (operands[1]))"
625 [(clobber (const_int 0))]
627 arm_split_constant (PLUS, SImode, curr_insn,
628 INTVAL (operands[2]), operands[0],
632 [(set_attr "length" "4,4,4,4,4,16")
633 (set_attr "predicable" "yes")]
636 ;; Register group 'k' is a single register group containing only the stack
637 ;; register. Trying to reload it will always fail catastrophically,
638 ;; so never allow those alternatives to match if reloading is needed.
640 (define_insn_and_split "*thumb1_addsi3"
641 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
642 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
643 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
646 static const char * const asms[] =
648 \"add\\t%0, %0, %2\",
649 \"sub\\t%0, %0, #%n2\",
650 \"add\\t%0, %1, %2\",
651 \"add\\t%0, %0, %2\",
652 \"add\\t%0, %0, %2\",
653 \"add\\t%0, %1, %2\",
654 \"add\\t%0, %1, %2\",
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
664 "&& reload_completed && CONST_INT_P (operands[2])
665 && operands[1] != stack_pointer_rtx
666 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
667 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
668 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
670 HOST_WIDE_INT offset = INTVAL (operands[2]);
673 else if (offset < -255)
676 operands[3] = GEN_INT (offset);
677 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
679 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
682 ;; Reloading and elimination of the frame pointer can
683 ;; sometimes cause this optimization to be missed.
685 [(set (match_operand:SI 0 "arm_general_register_operand" "")
686 (match_operand:SI 1 "const_int_operand" ""))
688 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
690 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
691 && (INTVAL (operands[1]) & 3) == 0"
692 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
696 (define_insn "*addsi3_compare0"
697 [(set (reg:CC_NOOV CC_REGNUM)
699 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
700 (match_operand:SI 2 "arm_add_operand" "rI,L"))
702 (set (match_operand:SI 0 "s_register_operand" "=r,r")
703 (plus:SI (match_dup 1) (match_dup 2)))]
707 sub%.\\t%0, %1, #%n2"
708 [(set_attr "conds" "set")]
711 (define_insn "*addsi3_compare0_scratch"
712 [(set (reg:CC_NOOV CC_REGNUM)
714 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
715 (match_operand:SI 1 "arm_add_operand" "rI,L"))
721 [(set_attr "conds" "set")]
724 (define_insn "*compare_negsi_si"
725 [(set (reg:CC_Z CC_REGNUM)
727 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
728 (match_operand:SI 1 "s_register_operand" "r")))]
731 [(set_attr "conds" "set")]
734 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
735 ;; addend is a constant.
736 (define_insn "*cmpsi2_addneg"
737 [(set (reg:CC CC_REGNUM)
739 (match_operand:SI 1 "s_register_operand" "r,r")
740 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
741 (set (match_operand:SI 0 "s_register_operand" "=r,r")
742 (plus:SI (match_dup 1)
743 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
744 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
747 sub%.\\t%0, %1, #%n3"
748 [(set_attr "conds" "set")]
751 ;; Convert the sequence
753 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
757 ;; bcs dest ((unsigned)rn >= 1)
758 ;; similarly for the beq variant using bcc.
759 ;; This is a common looping idiom (while (n--))
761 [(set (match_operand:SI 0 "arm_general_register_operand" "")
762 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
764 (set (match_operand 2 "cc_register" "")
765 (compare (match_dup 0) (const_int -1)))
767 (if_then_else (match_operator 3 "equality_operator"
768 [(match_dup 2) (const_int 0)])
769 (match_operand 4 "" "")
770 (match_operand 5 "" "")))]
771 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
775 (match_dup 1) (const_int 1)))
776 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
778 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
781 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
782 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
785 operands[2], const0_rtx);"
788 ;; The next four insns work because they compare the result with one of
789 ;; the operands, and we know that the use of the condition code is
790 ;; either GEU or LTU, so we can use the carry flag from the addition
791 ;; instead of doing the compare a second time.
792 (define_insn "*addsi3_compare_op1"
793 [(set (reg:CC_C CC_REGNUM)
795 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
796 (match_operand:SI 2 "arm_add_operand" "rI,L"))
798 (set (match_operand:SI 0 "s_register_operand" "=r,r")
799 (plus:SI (match_dup 1) (match_dup 2)))]
803 sub%.\\t%0, %1, #%n2"
804 [(set_attr "conds" "set")]
807 (define_insn "*addsi3_compare_op2"
808 [(set (reg:CC_C CC_REGNUM)
810 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
811 (match_operand:SI 2 "arm_add_operand" "rI,L"))
813 (set (match_operand:SI 0 "s_register_operand" "=r,r")
814 (plus:SI (match_dup 1) (match_dup 2)))]
818 sub%.\\t%0, %1, #%n2"
819 [(set_attr "conds" "set")]
822 (define_insn "*compare_addsi2_op0"
823 [(set (reg:CC_C CC_REGNUM)
825 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
826 (match_operand:SI 1 "arm_add_operand" "rI,L"))
832 [(set_attr "conds" "set")]
835 (define_insn "*compare_addsi2_op1"
836 [(set (reg:CC_C CC_REGNUM)
838 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
839 (match_operand:SI 1 "arm_add_operand" "rI,L"))
845 [(set_attr "conds" "set")]
848 (define_insn "*addsi3_carryin"
849 [(set (match_operand:SI 0 "s_register_operand" "=r")
850 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
851 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
852 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
855 [(set_attr "conds" "use")]
858 (define_insn "*addsi3_carryin_shift"
859 [(set (match_operand:SI 0 "s_register_operand" "=r")
860 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
862 (match_operator:SI 2 "shift_operator"
863 [(match_operand:SI 3 "s_register_operand" "r")
864 (match_operand:SI 4 "reg_or_int_operand" "rM")])
865 (match_operand:SI 1 "s_register_operand" "r"))))]
867 "adc%?\\t%0, %1, %3%S2"
868 [(set_attr "conds" "use")
869 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
870 (const_string "alu_shift")
871 (const_string "alu_shift_reg")))]
874 (define_insn "*addsi3_carryin_alt1"
875 [(set (match_operand:SI 0 "s_register_operand" "=r")
876 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
877 (match_operand:SI 2 "arm_rhs_operand" "rI"))
878 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
881 [(set_attr "conds" "use")]
884 (define_insn "*addsi3_carryin_alt2"
885 [(set (match_operand:SI 0 "s_register_operand" "=r")
886 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
887 (match_operand:SI 1 "s_register_operand" "r"))
888 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
891 [(set_attr "conds" "use")]
894 (define_insn "*addsi3_carryin_alt3"
895 [(set (match_operand:SI 0 "s_register_operand" "=r")
896 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
897 (match_operand:SI 2 "arm_rhs_operand" "rI"))
898 (match_operand:SI 1 "s_register_operand" "r")))]
901 [(set_attr "conds" "use")]
904 (define_expand "incscc"
905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
906 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
907 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
908 (match_operand:SI 1 "s_register_operand" "0,?r")))]
913 (define_insn "*arm_incscc"
914 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
915 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
916 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
917 (match_operand:SI 1 "s_register_operand" "0,?r")))]
921 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
922 [(set_attr "conds" "use")
923 (set_attr "length" "4,8")]
926 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
928 [(set (match_operand:SI 0 "s_register_operand" "")
929 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
930 (match_operand:SI 2 "s_register_operand" ""))
932 (clobber (match_operand:SI 3 "s_register_operand" ""))]
934 [(set (match_dup 3) (match_dup 1))
935 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
937 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
940 (define_expand "addsf3"
941 [(set (match_operand:SF 0 "s_register_operand" "")
942 (plus:SF (match_operand:SF 1 "s_register_operand" "")
943 (match_operand:SF 2 "arm_float_add_operand" "")))]
944 "TARGET_32BIT && TARGET_HARD_FLOAT"
947 && !cirrus_fp_register (operands[2], SFmode))
948 operands[2] = force_reg (SFmode, operands[2]);
951 (define_expand "adddf3"
952 [(set (match_operand:DF 0 "s_register_operand" "")
953 (plus:DF (match_operand:DF 1 "s_register_operand" "")
954 (match_operand:DF 2 "arm_float_add_operand" "")))]
955 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
958 && !cirrus_fp_register (operands[2], DFmode))
959 operands[2] = force_reg (DFmode, operands[2]);
962 (define_expand "subdi3"
964 [(set (match_operand:DI 0 "s_register_operand" "")
965 (minus:DI (match_operand:DI 1 "s_register_operand" "")
966 (match_operand:DI 2 "s_register_operand" "")))
967 (clobber (reg:CC CC_REGNUM))])]
970 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
972 && cirrus_fp_register (operands[0], DImode)
973 && cirrus_fp_register (operands[1], DImode))
975 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
981 if (GET_CODE (operands[1]) != REG)
982 operands[1] = force_reg (DImode, operands[1]);
983 if (GET_CODE (operands[2]) != REG)
984 operands[2] = force_reg (DImode, operands[2]);
989 (define_insn "*arm_subdi3"
990 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
991 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
992 (match_operand:DI 2 "s_register_operand" "r,0,0")))
993 (clobber (reg:CC CC_REGNUM))]
995 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
996 [(set_attr "conds" "clob")
997 (set_attr "length" "8")]
1000 (define_insn "*thumb_subdi3"
1001 [(set (match_operand:DI 0 "register_operand" "=l")
1002 (minus:DI (match_operand:DI 1 "register_operand" "0")
1003 (match_operand:DI 2 "register_operand" "l")))
1004 (clobber (reg:CC CC_REGNUM))]
1006 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1007 [(set_attr "length" "4")]
1010 (define_insn "*subdi_di_zesidi"
1011 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1012 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1014 (match_operand:SI 2 "s_register_operand" "r,r"))))
1015 (clobber (reg:CC CC_REGNUM))]
1017 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1018 [(set_attr "conds" "clob")
1019 (set_attr "length" "8")]
1022 (define_insn "*subdi_di_sesidi"
1023 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1024 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1026 (match_operand:SI 2 "s_register_operand" "r,r"))))
1027 (clobber (reg:CC CC_REGNUM))]
1029 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1030 [(set_attr "conds" "clob")
1031 (set_attr "length" "8")]
1034 (define_insn "*subdi_zesidi_di"
1035 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1036 (minus:DI (zero_extend:DI
1037 (match_operand:SI 2 "s_register_operand" "r,r"))
1038 (match_operand:DI 1 "s_register_operand" "0,r")))
1039 (clobber (reg:CC CC_REGNUM))]
1041 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1042 [(set_attr "conds" "clob")
1043 (set_attr "length" "8")]
1046 (define_insn "*subdi_sesidi_di"
1047 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1048 (minus:DI (sign_extend:DI
1049 (match_operand:SI 2 "s_register_operand" "r,r"))
1050 (match_operand:DI 1 "s_register_operand" "0,r")))
1051 (clobber (reg:CC CC_REGNUM))]
1053 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1054 [(set_attr "conds" "clob")
1055 (set_attr "length" "8")]
1058 (define_insn "*subdi_zesidi_zesidi"
1059 [(set (match_operand:DI 0 "s_register_operand" "=r")
1060 (minus:DI (zero_extend:DI
1061 (match_operand:SI 1 "s_register_operand" "r"))
1063 (match_operand:SI 2 "s_register_operand" "r"))))
1064 (clobber (reg:CC CC_REGNUM))]
1066 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1067 [(set_attr "conds" "clob")
1068 (set_attr "length" "8")]
1071 (define_expand "subsi3"
1072 [(set (match_operand:SI 0 "s_register_operand" "")
1073 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1074 (match_operand:SI 2 "s_register_operand" "")))]
1077 if (GET_CODE (operands[1]) == CONST_INT)
1081 arm_split_constant (MINUS, SImode, NULL_RTX,
1082 INTVAL (operands[1]), operands[0],
1083 operands[2], optimize && can_create_pseudo_p ());
1086 else /* TARGET_THUMB1 */
1087 operands[1] = force_reg (SImode, operands[1]);
1092 (define_insn "*thumb1_subsi3_insn"
1093 [(set (match_operand:SI 0 "register_operand" "=l")
1094 (minus:SI (match_operand:SI 1 "register_operand" "l")
1095 (match_operand:SI 2 "register_operand" "l")))]
1098 [(set_attr "length" "2")]
1101 ; ??? Check Thumb-2 split length
1102 (define_insn_and_split "*arm_subsi3_insn"
1103 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1104 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1105 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1112 && GET_CODE (operands[1]) == CONST_INT
1113 && !const_ok_for_arm (INTVAL (operands[1]))"
1114 [(clobber (const_int 0))]
1116 arm_split_constant (MINUS, SImode, curr_insn,
1117 INTVAL (operands[1]), operands[0], operands[2], 0);
1120 [(set_attr "length" "4,4,16")
1121 (set_attr "predicable" "yes")]
1125 [(match_scratch:SI 3 "r")
1126 (set (match_operand:SI 0 "arm_general_register_operand" "")
1127 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1128 (match_operand:SI 2 "arm_general_register_operand" "")))]
1130 && !const_ok_for_arm (INTVAL (operands[1]))
1131 && const_ok_for_arm (~INTVAL (operands[1]))"
1132 [(set (match_dup 3) (match_dup 1))
1133 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1137 (define_insn "*subsi3_compare0"
1138 [(set (reg:CC_NOOV CC_REGNUM)
1140 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1141 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1144 (minus:SI (match_dup 1) (match_dup 2)))]
1149 [(set_attr "conds" "set")]
1152 (define_expand "decscc"
1153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1154 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1155 (match_operator:SI 2 "arm_comparison_operator"
1156 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1161 (define_insn "*arm_decscc"
1162 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1163 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1164 (match_operator:SI 2 "arm_comparison_operator"
1165 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1169 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1170 [(set_attr "conds" "use")
1171 (set_attr "length" "*,8")]
1174 (define_expand "subsf3"
1175 [(set (match_operand:SF 0 "s_register_operand" "")
1176 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1177 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1178 "TARGET_32BIT && TARGET_HARD_FLOAT"
1180 if (TARGET_MAVERICK)
1182 if (!cirrus_fp_register (operands[1], SFmode))
1183 operands[1] = force_reg (SFmode, operands[1]);
1184 if (!cirrus_fp_register (operands[2], SFmode))
1185 operands[2] = force_reg (SFmode, operands[2]);
1189 (define_expand "subdf3"
1190 [(set (match_operand:DF 0 "s_register_operand" "")
1191 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1192 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1193 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1195 if (TARGET_MAVERICK)
1197 if (!cirrus_fp_register (operands[1], DFmode))
1198 operands[1] = force_reg (DFmode, operands[1]);
1199 if (!cirrus_fp_register (operands[2], DFmode))
1200 operands[2] = force_reg (DFmode, operands[2]);
1205 ;; Multiplication insns
1207 (define_expand "mulsi3"
1208 [(set (match_operand:SI 0 "s_register_operand" "")
1209 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1210 (match_operand:SI 1 "s_register_operand" "")))]
1215 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1216 (define_insn "*arm_mulsi3"
1217 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1218 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1219 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1220 "TARGET_32BIT && !arm_arch6"
1221 "mul%?\\t%0, %2, %1"
1222 [(set_attr "insn" "mul")
1223 (set_attr "predicable" "yes")]
1226 (define_insn "*arm_mulsi3_v6"
1227 [(set (match_operand:SI 0 "s_register_operand" "=r")
1228 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1229 (match_operand:SI 2 "s_register_operand" "r")))]
1230 "TARGET_32BIT && arm_arch6"
1231 "mul%?\\t%0, %1, %2"
1232 [(set_attr "insn" "mul")
1233 (set_attr "predicable" "yes")]
1236 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1237 ; 1 and 2; are the same, because reload will make operand 0 match
1238 ; operand 1 without realizing that this conflicts with operand 2. We fix
1239 ; this by adding another alternative to match this case, and then `reload'
1240 ; it ourselves. This alternative must come first.
1241 (define_insn "*thumb_mulsi3"
1242 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1243 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1244 (match_operand:SI 2 "register_operand" "l,l,l")))]
1245 "TARGET_THUMB1 && !arm_arch6"
1247 if (which_alternative < 2)
1248 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1250 return \"mul\\t%0, %2\";
1252 [(set_attr "length" "4,4,2")
1253 (set_attr "insn" "mul")]
1256 (define_insn "*thumb_mulsi3_v6"
1257 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1258 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1259 (match_operand:SI 2 "register_operand" "l,0,0")))]
1260 "TARGET_THUMB1 && arm_arch6"
1265 [(set_attr "length" "2")
1266 (set_attr "insn" "mul")]
1269 (define_insn "*mulsi3_compare0"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1271 (compare:CC_NOOV (mult:SI
1272 (match_operand:SI 2 "s_register_operand" "r,r")
1273 (match_operand:SI 1 "s_register_operand" "%0,r"))
1275 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1276 (mult:SI (match_dup 2) (match_dup 1)))]
1277 "TARGET_ARM && !arm_arch6"
1278 "mul%.\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "insn" "muls")]
1283 (define_insn "*mulsi3_compare0_v6"
1284 [(set (reg:CC_NOOV CC_REGNUM)
1285 (compare:CC_NOOV (mult:SI
1286 (match_operand:SI 2 "s_register_operand" "r")
1287 (match_operand:SI 1 "s_register_operand" "r"))
1289 (set (match_operand:SI 0 "s_register_operand" "=r")
1290 (mult:SI (match_dup 2) (match_dup 1)))]
1291 "TARGET_ARM && arm_arch6 && optimize_size"
1292 "mul%.\\t%0, %2, %1"
1293 [(set_attr "conds" "set")
1294 (set_attr "insn" "muls")]
1297 (define_insn "*mulsi_compare0_scratch"
1298 [(set (reg:CC_NOOV CC_REGNUM)
1299 (compare:CC_NOOV (mult:SI
1300 (match_operand:SI 2 "s_register_operand" "r,r")
1301 (match_operand:SI 1 "s_register_operand" "%0,r"))
1303 (clobber (match_scratch:SI 0 "=&r,&r"))]
1304 "TARGET_ARM && !arm_arch6"
1305 "mul%.\\t%0, %2, %1"
1306 [(set_attr "conds" "set")
1307 (set_attr "insn" "muls")]
1310 (define_insn "*mulsi_compare0_scratch_v6"
1311 [(set (reg:CC_NOOV CC_REGNUM)
1312 (compare:CC_NOOV (mult:SI
1313 (match_operand:SI 2 "s_register_operand" "r")
1314 (match_operand:SI 1 "s_register_operand" "r"))
1316 (clobber (match_scratch:SI 0 "=r"))]
1317 "TARGET_ARM && arm_arch6 && optimize_size"
1318 "mul%.\\t%0, %2, %1"
1319 [(set_attr "conds" "set")
1320 (set_attr "insn" "muls")]
1323 ;; Unnamed templates to match MLA instruction.
1325 (define_insn "*mulsi3addsi"
1326 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1328 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1329 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1330 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1331 "TARGET_32BIT && !arm_arch6"
1332 "mla%?\\t%0, %2, %1, %3"
1333 [(set_attr "insn" "mla")
1334 (set_attr "predicable" "yes")]
1337 (define_insn "*mulsi3addsi_v6"
1338 [(set (match_operand:SI 0 "s_register_operand" "=r")
1340 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1341 (match_operand:SI 1 "s_register_operand" "r"))
1342 (match_operand:SI 3 "s_register_operand" "r")))]
1343 "TARGET_32BIT && arm_arch6"
1344 "mla%?\\t%0, %2, %1, %3"
1345 [(set_attr "insn" "mla")
1346 (set_attr "predicable" "yes")]
1349 (define_insn "*mulsi3addsi_compare0"
1350 [(set (reg:CC_NOOV CC_REGNUM)
1353 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1354 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1355 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1357 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1358 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1360 "TARGET_ARM && arm_arch6"
1361 "mla%.\\t%0, %2, %1, %3"
1362 [(set_attr "conds" "set")
1363 (set_attr "insn" "mlas")]
1366 (define_insn "*mulsi3addsi_compare0_v6"
1367 [(set (reg:CC_NOOV CC_REGNUM)
1370 (match_operand:SI 2 "s_register_operand" "r")
1371 (match_operand:SI 1 "s_register_operand" "r"))
1372 (match_operand:SI 3 "s_register_operand" "r"))
1374 (set (match_operand:SI 0 "s_register_operand" "=r")
1375 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1377 "TARGET_ARM && arm_arch6 && optimize_size"
1378 "mla%.\\t%0, %2, %1, %3"
1379 [(set_attr "conds" "set")
1380 (set_attr "insn" "mlas")]
1383 (define_insn "*mulsi3addsi_compare0_scratch"
1384 [(set (reg:CC_NOOV CC_REGNUM)
1387 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1388 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1389 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1391 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1392 "TARGET_ARM && !arm_arch6"
1393 "mla%.\\t%0, %2, %1, %3"
1394 [(set_attr "conds" "set")
1395 (set_attr "insn" "mlas")]
1398 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1399 [(set (reg:CC_NOOV CC_REGNUM)
1402 (match_operand:SI 2 "s_register_operand" "r")
1403 (match_operand:SI 1 "s_register_operand" "r"))
1404 (match_operand:SI 3 "s_register_operand" "r"))
1406 (clobber (match_scratch:SI 0 "=r"))]
1407 "TARGET_ARM && arm_arch6 && optimize_size"
1408 "mla%.\\t%0, %2, %1, %3"
1409 [(set_attr "conds" "set")
1410 (set_attr "insn" "mlas")]
1413 (define_insn "*mulsi3subsi"
1414 [(set (match_operand:SI 0 "s_register_operand" "=r")
1416 (match_operand:SI 3 "s_register_operand" "r")
1417 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1418 (match_operand:SI 1 "s_register_operand" "r"))))]
1419 "TARGET_32BIT && arm_arch_thumb2"
1420 "mls%?\\t%0, %2, %1, %3"
1421 [(set_attr "insn" "mla")
1422 (set_attr "predicable" "yes")]
1425 (define_expand "maddsidi4"
1426 [(set (match_operand:DI 0 "s_register_operand" "")
1429 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1430 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1431 (match_operand:DI 3 "s_register_operand" "")))]
1432 "TARGET_32BIT && arm_arch3m"
1435 (define_insn "*mulsidi3adddi"
1436 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1439 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1440 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1441 (match_operand:DI 1 "s_register_operand" "0")))]
1442 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1443 "smlal%?\\t%Q0, %R0, %3, %2"
1444 [(set_attr "insn" "smlal")
1445 (set_attr "predicable" "yes")]
1448 (define_insn "*mulsidi3adddi_v6"
1449 [(set (match_operand:DI 0 "s_register_operand" "=r")
1452 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1453 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1454 (match_operand:DI 1 "s_register_operand" "0")))]
1455 "TARGET_32BIT && arm_arch6"
1456 "smlal%?\\t%Q0, %R0, %3, %2"
1457 [(set_attr "insn" "smlal")
1458 (set_attr "predicable" "yes")]
1461 ;; 32x32->64 widening multiply.
1462 ;; As with mulsi3, the only difference between the v3-5 and v6+
1463 ;; versions of these patterns is the requirement that the output not
1464 ;; overlap the inputs, but that still means we have to have a named
1465 ;; expander and two different starred insns.
1467 (define_expand "mulsidi3"
1468 [(set (match_operand:DI 0 "s_register_operand" "")
1470 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1471 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1472 "TARGET_32BIT && arm_arch3m"
1476 (define_insn "*mulsidi3_nov6"
1477 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1479 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1480 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1481 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1482 "smull%?\\t%Q0, %R0, %1, %2"
1483 [(set_attr "insn" "smull")
1484 (set_attr "predicable" "yes")]
1487 (define_insn "*mulsidi3_v6"
1488 [(set (match_operand:DI 0 "s_register_operand" "=r")
1490 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1491 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1492 "TARGET_32BIT && arm_arch6"
1493 "smull%?\\t%Q0, %R0, %1, %2"
1494 [(set_attr "insn" "smull")
1495 (set_attr "predicable" "yes")]
1498 (define_expand "umulsidi3"
1499 [(set (match_operand:DI 0 "s_register_operand" "")
1501 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1502 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1503 "TARGET_32BIT && arm_arch3m"
1507 (define_insn "*umulsidi3_nov6"
1508 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1510 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1511 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1512 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1513 "umull%?\\t%Q0, %R0, %1, %2"
1514 [(set_attr "insn" "umull")
1515 (set_attr "predicable" "yes")]
1518 (define_insn "*umulsidi3_v6"
1519 [(set (match_operand:DI 0 "s_register_operand" "=r")
1521 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1522 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1523 "TARGET_32BIT && arm_arch6"
1524 "umull%?\\t%Q0, %R0, %1, %2"
1525 [(set_attr "insn" "umull")
1526 (set_attr "predicable" "yes")]
1529 (define_expand "umaddsidi4"
1530 [(set (match_operand:DI 0 "s_register_operand" "")
1533 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1534 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1535 (match_operand:DI 3 "s_register_operand" "")))]
1536 "TARGET_32BIT && arm_arch3m"
1539 (define_insn "*umulsidi3adddi"
1540 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1543 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1544 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1545 (match_operand:DI 1 "s_register_operand" "0")))]
1546 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1547 "umlal%?\\t%Q0, %R0, %3, %2"
1548 [(set_attr "insn" "umlal")
1549 (set_attr "predicable" "yes")]
1552 (define_insn "*umulsidi3adddi_v6"
1553 [(set (match_operand:DI 0 "s_register_operand" "=r")
1556 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1557 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1558 (match_operand:DI 1 "s_register_operand" "0")))]
1559 "TARGET_32BIT && arm_arch6"
1560 "umlal%?\\t%Q0, %R0, %3, %2"
1561 [(set_attr "insn" "umlal")
1562 (set_attr "predicable" "yes")]
1565 (define_expand "smulsi3_highpart"
1567 [(set (match_operand:SI 0 "s_register_operand" "")
1571 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1572 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1574 (clobber (match_scratch:SI 3 ""))])]
1575 "TARGET_32BIT && arm_arch3m"
1579 (define_insn "*smulsi3_highpart_nov6"
1580 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1584 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1585 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1587 (clobber (match_scratch:SI 3 "=&r,&r"))]
1588 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1589 "smull%?\\t%3, %0, %2, %1"
1590 [(set_attr "insn" "smull")
1591 (set_attr "predicable" "yes")]
1594 (define_insn "*smulsi3_highpart_v6"
1595 [(set (match_operand:SI 0 "s_register_operand" "=r")
1599 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1600 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1602 (clobber (match_scratch:SI 3 "=r"))]
1603 "TARGET_32BIT && arm_arch6"
1604 "smull%?\\t%3, %0, %2, %1"
1605 [(set_attr "insn" "smull")
1606 (set_attr "predicable" "yes")]
1609 (define_expand "umulsi3_highpart"
1611 [(set (match_operand:SI 0 "s_register_operand" "")
1615 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1616 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1618 (clobber (match_scratch:SI 3 ""))])]
1619 "TARGET_32BIT && arm_arch3m"
1623 (define_insn "*umulsi3_highpart_nov6"
1624 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1628 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1629 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1631 (clobber (match_scratch:SI 3 "=&r,&r"))]
1632 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1633 "umull%?\\t%3, %0, %2, %1"
1634 [(set_attr "insn" "umull")
1635 (set_attr "predicable" "yes")]
1638 (define_insn "*umulsi3_highpart_v6"
1639 [(set (match_operand:SI 0 "s_register_operand" "=r")
1643 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1644 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1646 (clobber (match_scratch:SI 3 "=r"))]
1647 "TARGET_32BIT && arm_arch6"
1648 "umull%?\\t%3, %0, %2, %1"
1649 [(set_attr "insn" "umull")
1650 (set_attr "predicable" "yes")]
1653 (define_insn "mulhisi3"
1654 [(set (match_operand:SI 0 "s_register_operand" "=r")
1655 (mult:SI (sign_extend:SI
1656 (match_operand:HI 1 "s_register_operand" "%r"))
1658 (match_operand:HI 2 "s_register_operand" "r"))))]
1659 "TARGET_DSP_MULTIPLY"
1660 "smulbb%?\\t%0, %1, %2"
1661 [(set_attr "insn" "smulxy")
1662 (set_attr "predicable" "yes")]
1665 (define_insn "*mulhisi3tb"
1666 [(set (match_operand:SI 0 "s_register_operand" "=r")
1667 (mult:SI (ashiftrt:SI
1668 (match_operand:SI 1 "s_register_operand" "r")
1671 (match_operand:HI 2 "s_register_operand" "r"))))]
1672 "TARGET_DSP_MULTIPLY"
1673 "smultb%?\\t%0, %1, %2"
1674 [(set_attr "insn" "smulxy")
1675 (set_attr "predicable" "yes")]
1678 (define_insn "*mulhisi3bt"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1680 (mult:SI (sign_extend:SI
1681 (match_operand:HI 1 "s_register_operand" "r"))
1683 (match_operand:SI 2 "s_register_operand" "r")
1685 "TARGET_DSP_MULTIPLY"
1686 "smulbt%?\\t%0, %1, %2"
1687 [(set_attr "insn" "smulxy")
1688 (set_attr "predicable" "yes")]
1691 (define_insn "*mulhisi3tt"
1692 [(set (match_operand:SI 0 "s_register_operand" "=r")
1693 (mult:SI (ashiftrt:SI
1694 (match_operand:SI 1 "s_register_operand" "r")
1697 (match_operand:SI 2 "s_register_operand" "r")
1699 "TARGET_DSP_MULTIPLY"
1700 "smultt%?\\t%0, %1, %2"
1701 [(set_attr "insn" "smulxy")
1702 (set_attr "predicable" "yes")]
1705 (define_insn "maddhisi4"
1706 [(set (match_operand:SI 0 "s_register_operand" "=r")
1707 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1708 (mult:SI (sign_extend:SI
1709 (match_operand:HI 1 "s_register_operand" "%r"))
1711 (match_operand:HI 2 "s_register_operand" "r")))))]
1712 "TARGET_DSP_MULTIPLY"
1713 "smlabb%?\\t%0, %1, %2, %3"
1714 [(set_attr "insn" "smlaxy")
1715 (set_attr "predicable" "yes")]
1718 (define_insn "*maddhidi4"
1719 [(set (match_operand:DI 0 "s_register_operand" "=r")
1721 (match_operand:DI 3 "s_register_operand" "0")
1722 (mult:DI (sign_extend:DI
1723 (match_operand:HI 1 "s_register_operand" "%r"))
1725 (match_operand:HI 2 "s_register_operand" "r")))))]
1726 "TARGET_DSP_MULTIPLY"
1727 "smlalbb%?\\t%Q0, %R0, %1, %2"
1728 [(set_attr "insn" "smlalxy")
1729 (set_attr "predicable" "yes")])
1731 (define_expand "mulsf3"
1732 [(set (match_operand:SF 0 "s_register_operand" "")
1733 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1734 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1735 "TARGET_32BIT && TARGET_HARD_FLOAT"
1738 && !cirrus_fp_register (operands[2], SFmode))
1739 operands[2] = force_reg (SFmode, operands[2]);
1742 (define_expand "muldf3"
1743 [(set (match_operand:DF 0 "s_register_operand" "")
1744 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1745 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1746 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1749 && !cirrus_fp_register (operands[2], DFmode))
1750 operands[2] = force_reg (DFmode, operands[2]);
1755 (define_expand "divsf3"
1756 [(set (match_operand:SF 0 "s_register_operand" "")
1757 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1758 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1759 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1762 (define_expand "divdf3"
1763 [(set (match_operand:DF 0 "s_register_operand" "")
1764 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1765 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1766 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1771 (define_expand "modsf3"
1772 [(set (match_operand:SF 0 "s_register_operand" "")
1773 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1774 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1775 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1778 (define_expand "moddf3"
1779 [(set (match_operand:DF 0 "s_register_operand" "")
1780 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1781 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1782 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1785 ;; Boolean and,ior,xor insns
1787 ;; Split up double word logical operations
1789 ;; Split up simple DImode logical operations. Simply perform the logical
1790 ;; operation on the upper and lower halves of the registers.
1792 [(set (match_operand:DI 0 "s_register_operand" "")
1793 (match_operator:DI 6 "logical_binary_operator"
1794 [(match_operand:DI 1 "s_register_operand" "")
1795 (match_operand:DI 2 "s_register_operand" "")]))]
1796 "TARGET_32BIT && reload_completed
1797 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1798 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1799 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1802 operands[3] = gen_highpart (SImode, operands[0]);
1803 operands[0] = gen_lowpart (SImode, operands[0]);
1804 operands[4] = gen_highpart (SImode, operands[1]);
1805 operands[1] = gen_lowpart (SImode, operands[1]);
1806 operands[5] = gen_highpart (SImode, operands[2]);
1807 operands[2] = gen_lowpart (SImode, operands[2]);
1812 [(set (match_operand:DI 0 "s_register_operand" "")
1813 (match_operator:DI 6 "logical_binary_operator"
1814 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1815 (match_operand:DI 1 "s_register_operand" "")]))]
1816 "TARGET_32BIT && reload_completed"
1817 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1818 (set (match_dup 3) (match_op_dup:SI 6
1819 [(ashiftrt:SI (match_dup 2) (const_int 31))
1823 operands[3] = gen_highpart (SImode, operands[0]);
1824 operands[0] = gen_lowpart (SImode, operands[0]);
1825 operands[4] = gen_highpart (SImode, operands[1]);
1826 operands[1] = gen_lowpart (SImode, operands[1]);
1827 operands[5] = gen_highpart (SImode, operands[2]);
1828 operands[2] = gen_lowpart (SImode, operands[2]);
1832 ;; The zero extend of operand 2 means we can just copy the high part of
1833 ;; operand1 into operand0.
1835 [(set (match_operand:DI 0 "s_register_operand" "")
1837 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1838 (match_operand:DI 1 "s_register_operand" "")))]
1839 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1840 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1841 (set (match_dup 3) (match_dup 4))]
1844 operands[4] = gen_highpart (SImode, operands[1]);
1845 operands[3] = gen_highpart (SImode, operands[0]);
1846 operands[0] = gen_lowpart (SImode, operands[0]);
1847 operands[1] = gen_lowpart (SImode, operands[1]);
1851 ;; The zero extend of operand 2 means we can just copy the high part of
1852 ;; operand1 into operand0.
1854 [(set (match_operand:DI 0 "s_register_operand" "")
1856 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1857 (match_operand:DI 1 "s_register_operand" "")))]
1858 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1859 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1860 (set (match_dup 3) (match_dup 4))]
1863 operands[4] = gen_highpart (SImode, operands[1]);
1864 operands[3] = gen_highpart (SImode, operands[0]);
1865 operands[0] = gen_lowpart (SImode, operands[0]);
1866 operands[1] = gen_lowpart (SImode, operands[1]);
1870 (define_insn "anddi3"
1871 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1872 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1873 (match_operand:DI 2 "s_register_operand" "r,r")))]
1874 "TARGET_32BIT && ! TARGET_IWMMXT"
1876 [(set_attr "length" "8")]
1879 (define_insn_and_split "*anddi_zesidi_di"
1880 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1881 (and:DI (zero_extend:DI
1882 (match_operand:SI 2 "s_register_operand" "r,r"))
1883 (match_operand:DI 1 "s_register_operand" "0,r")))]
1886 "TARGET_32BIT && reload_completed"
1887 ; The zero extend of operand 2 clears the high word of the output
1889 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1890 (set (match_dup 3) (const_int 0))]
1893 operands[3] = gen_highpart (SImode, operands[0]);
1894 operands[0] = gen_lowpart (SImode, operands[0]);
1895 operands[1] = gen_lowpart (SImode, operands[1]);
1897 [(set_attr "length" "8")]
1900 (define_insn "*anddi_sesdi_di"
1901 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1902 (and:DI (sign_extend:DI
1903 (match_operand:SI 2 "s_register_operand" "r,r"))
1904 (match_operand:DI 1 "s_register_operand" "0,r")))]
1907 [(set_attr "length" "8")]
1910 (define_expand "andsi3"
1911 [(set (match_operand:SI 0 "s_register_operand" "")
1912 (and:SI (match_operand:SI 1 "s_register_operand" "")
1913 (match_operand:SI 2 "reg_or_int_operand" "")))]
1918 if (GET_CODE (operands[2]) == CONST_INT)
1920 arm_split_constant (AND, SImode, NULL_RTX,
1921 INTVAL (operands[2]), operands[0],
1922 operands[1], optimize && can_create_pseudo_p ());
1927 else /* TARGET_THUMB1 */
1929 if (GET_CODE (operands[2]) != CONST_INT)
1931 rtx tmp = force_reg (SImode, operands[2]);
1932 if (rtx_equal_p (operands[0], operands[1]))
1936 operands[2] = operands[1];
1944 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1946 operands[2] = force_reg (SImode,
1947 GEN_INT (~INTVAL (operands[2])));
1949 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1954 for (i = 9; i <= 31; i++)
1956 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1958 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1962 else if ((((HOST_WIDE_INT) 1) << i) - 1
1963 == ~INTVAL (operands[2]))
1965 rtx shift = GEN_INT (i);
1966 rtx reg = gen_reg_rtx (SImode);
1968 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1969 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1975 operands[2] = force_reg (SImode, operands[2]);
1981 ; ??? Check split length for Thumb-2
1982 (define_insn_and_split "*arm_andsi3_insn"
1983 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1984 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1985 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1989 bic%?\\t%0, %1, #%B2
1992 && GET_CODE (operands[2]) == CONST_INT
1993 && !(const_ok_for_arm (INTVAL (operands[2]))
1994 || const_ok_for_arm (~INTVAL (operands[2])))"
1995 [(clobber (const_int 0))]
1997 arm_split_constant (AND, SImode, curr_insn,
1998 INTVAL (operands[2]), operands[0], operands[1], 0);
2001 [(set_attr "length" "4,4,16")
2002 (set_attr "predicable" "yes")]
2005 (define_insn "*thumb1_andsi3_insn"
2006 [(set (match_operand:SI 0 "register_operand" "=l")
2007 (and:SI (match_operand:SI 1 "register_operand" "%0")
2008 (match_operand:SI 2 "register_operand" "l")))]
2011 [(set_attr "length" "2")]
2014 (define_insn "*andsi3_compare0"
2015 [(set (reg:CC_NOOV CC_REGNUM)
2017 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2018 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2020 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2021 (and:SI (match_dup 1) (match_dup 2)))]
2025 bic%.\\t%0, %1, #%B2"
2026 [(set_attr "conds" "set")]
2029 (define_insn "*andsi3_compare0_scratch"
2030 [(set (reg:CC_NOOV CC_REGNUM)
2032 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2033 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2035 (clobber (match_scratch:SI 2 "=X,r"))]
2039 bic%.\\t%2, %0, #%B1"
2040 [(set_attr "conds" "set")]
2043 (define_insn "*zeroextractsi_compare0_scratch"
2044 [(set (reg:CC_NOOV CC_REGNUM)
2045 (compare:CC_NOOV (zero_extract:SI
2046 (match_operand:SI 0 "s_register_operand" "r")
2047 (match_operand 1 "const_int_operand" "n")
2048 (match_operand 2 "const_int_operand" "n"))
2051 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2052 && INTVAL (operands[1]) > 0
2053 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2054 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2056 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2057 << INTVAL (operands[2]));
2058 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2061 [(set_attr "conds" "set")]
2064 (define_insn_and_split "*ne_zeroextractsi"
2065 [(set (match_operand:SI 0 "s_register_operand" "=r")
2066 (ne:SI (zero_extract:SI
2067 (match_operand:SI 1 "s_register_operand" "r")
2068 (match_operand:SI 2 "const_int_operand" "n")
2069 (match_operand:SI 3 "const_int_operand" "n"))
2071 (clobber (reg:CC CC_REGNUM))]
2073 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2074 && INTVAL (operands[2]) > 0
2075 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2076 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2079 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2080 && INTVAL (operands[2]) > 0
2081 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2082 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2083 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2084 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2086 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2088 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2089 (match_dup 0) (const_int 1)))]
2091 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2092 << INTVAL (operands[3]));
2094 [(set_attr "conds" "clob")
2095 (set (attr "length")
2096 (if_then_else (eq_attr "is_thumb" "yes")
2101 (define_insn_and_split "*ne_zeroextractsi_shifted"
2102 [(set (match_operand:SI 0 "s_register_operand" "=r")
2103 (ne:SI (zero_extract:SI
2104 (match_operand:SI 1 "s_register_operand" "r")
2105 (match_operand:SI 2 "const_int_operand" "n")
2108 (clobber (reg:CC CC_REGNUM))]
2112 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2113 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2115 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2117 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2118 (match_dup 0) (const_int 1)))]
2120 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2122 [(set_attr "conds" "clob")
2123 (set_attr "length" "8")]
2126 (define_insn_and_split "*ite_ne_zeroextractsi"
2127 [(set (match_operand:SI 0 "s_register_operand" "=r")
2128 (if_then_else:SI (ne (zero_extract:SI
2129 (match_operand:SI 1 "s_register_operand" "r")
2130 (match_operand:SI 2 "const_int_operand" "n")
2131 (match_operand:SI 3 "const_int_operand" "n"))
2133 (match_operand:SI 4 "arm_not_operand" "rIK")
2135 (clobber (reg:CC CC_REGNUM))]
2137 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2138 && INTVAL (operands[2]) > 0
2139 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2140 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2141 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2144 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2145 && INTVAL (operands[2]) > 0
2146 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2147 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2148 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2149 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2150 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2152 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2154 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2155 (match_dup 0) (match_dup 4)))]
2157 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2158 << INTVAL (operands[3]));
2160 [(set_attr "conds" "clob")
2161 (set_attr "length" "8")]
2164 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2165 [(set (match_operand:SI 0 "s_register_operand" "=r")
2166 (if_then_else:SI (ne (zero_extract:SI
2167 (match_operand:SI 1 "s_register_operand" "r")
2168 (match_operand:SI 2 "const_int_operand" "n")
2171 (match_operand:SI 3 "arm_not_operand" "rIK")
2173 (clobber (reg:CC CC_REGNUM))]
2174 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2176 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2177 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2178 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2180 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2182 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2183 (match_dup 0) (match_dup 3)))]
2185 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2187 [(set_attr "conds" "clob")
2188 (set_attr "length" "8")]
2192 [(set (match_operand:SI 0 "s_register_operand" "")
2193 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2194 (match_operand:SI 2 "const_int_operand" "")
2195 (match_operand:SI 3 "const_int_operand" "")))
2196 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2198 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2199 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2201 HOST_WIDE_INT temp = INTVAL (operands[2]);
2203 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2204 operands[3] = GEN_INT (32 - temp);
2208 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2210 [(set (match_operand:SI 0 "s_register_operand" "")
2211 (match_operator:SI 1 "shiftable_operator"
2212 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2213 (match_operand:SI 3 "const_int_operand" "")
2214 (match_operand:SI 4 "const_int_operand" ""))
2215 (match_operand:SI 5 "s_register_operand" "")]))
2216 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2218 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2221 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2224 HOST_WIDE_INT temp = INTVAL (operands[3]);
2226 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2227 operands[4] = GEN_INT (32 - temp);
2232 [(set (match_operand:SI 0 "s_register_operand" "")
2233 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2234 (match_operand:SI 2 "const_int_operand" "")
2235 (match_operand:SI 3 "const_int_operand" "")))]
2237 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2238 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2240 HOST_WIDE_INT temp = INTVAL (operands[2]);
2242 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2243 operands[3] = GEN_INT (32 - temp);
2248 [(set (match_operand:SI 0 "s_register_operand" "")
2249 (match_operator:SI 1 "shiftable_operator"
2250 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2251 (match_operand:SI 3 "const_int_operand" "")
2252 (match_operand:SI 4 "const_int_operand" ""))
2253 (match_operand:SI 5 "s_register_operand" "")]))
2254 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2256 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2259 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2262 HOST_WIDE_INT temp = INTVAL (operands[3]);
2264 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2265 operands[4] = GEN_INT (32 - temp);
2269 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2270 ;;; represented by the bitfield, then this will produce incorrect results.
2271 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2272 ;;; which have a real bit-field insert instruction, the truncation happens
2273 ;;; in the bit-field insert instruction itself. Since arm does not have a
2274 ;;; bit-field insert instruction, we would have to emit code here to truncate
2275 ;;; the value before we insert. This loses some of the advantage of having
2276 ;;; this insv pattern, so this pattern needs to be reevalutated.
2278 (define_expand "insv"
2279 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2280 (match_operand:SI 1 "general_operand" "")
2281 (match_operand:SI 2 "general_operand" ""))
2282 (match_operand:SI 3 "reg_or_int_operand" ""))]
2283 "TARGET_ARM || arm_arch_thumb2"
2286 int start_bit = INTVAL (operands[2]);
2287 int width = INTVAL (operands[1]);
2288 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2289 rtx target, subtarget;
2291 if (arm_arch_thumb2)
2293 bool use_bfi = TRUE;
2295 if (GET_CODE (operands[3]) == CONST_INT)
2297 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2301 emit_insn (gen_insv_zero (operands[0], operands[1],
2306 /* See if the set can be done with a single orr instruction. */
2307 if (val == mask && const_ok_for_arm (val << start_bit))
2313 if (GET_CODE (operands[3]) != REG)
2314 operands[3] = force_reg (SImode, operands[3]);
2316 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2322 target = copy_rtx (operands[0]);
2323 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2324 subreg as the final target. */
2325 if (GET_CODE (target) == SUBREG)
2327 subtarget = gen_reg_rtx (SImode);
2328 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2329 < GET_MODE_SIZE (SImode))
2330 target = SUBREG_REG (target);
2335 if (GET_CODE (operands[3]) == CONST_INT)
2337 /* Since we are inserting a known constant, we may be able to
2338 reduce the number of bits that we have to clear so that
2339 the mask becomes simple. */
2340 /* ??? This code does not check to see if the new mask is actually
2341 simpler. It may not be. */
2342 rtx op1 = gen_reg_rtx (SImode);
2343 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2344 start of this pattern. */
2345 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2346 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2348 emit_insn (gen_andsi3 (op1, operands[0],
2349 gen_int_mode (~mask2, SImode)));
2350 emit_insn (gen_iorsi3 (subtarget, op1,
2351 gen_int_mode (op3_value << start_bit, SImode)));
2353 else if (start_bit == 0
2354 && !(const_ok_for_arm (mask)
2355 || const_ok_for_arm (~mask)))
2357 /* A Trick, since we are setting the bottom bits in the word,
2358 we can shift operand[3] up, operand[0] down, OR them together
2359 and rotate the result back again. This takes 3 insns, and
2360 the third might be mergeable into another op. */
2361 /* The shift up copes with the possibility that operand[3] is
2362 wider than the bitfield. */
2363 rtx op0 = gen_reg_rtx (SImode);
2364 rtx op1 = gen_reg_rtx (SImode);
2366 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2367 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2368 emit_insn (gen_iorsi3 (op1, op1, op0));
2369 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2371 else if ((width + start_bit == 32)
2372 && !(const_ok_for_arm (mask)
2373 || const_ok_for_arm (~mask)))
2375 /* Similar trick, but slightly less efficient. */
2377 rtx op0 = gen_reg_rtx (SImode);
2378 rtx op1 = gen_reg_rtx (SImode);
2380 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2381 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2382 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2383 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2387 rtx op0 = gen_int_mode (mask, SImode);
2388 rtx op1 = gen_reg_rtx (SImode);
2389 rtx op2 = gen_reg_rtx (SImode);
2391 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2393 rtx tmp = gen_reg_rtx (SImode);
2395 emit_insn (gen_movsi (tmp, op0));
2399 /* Mask out any bits in operand[3] that are not needed. */
2400 emit_insn (gen_andsi3 (op1, operands[3], op0));
2402 if (GET_CODE (op0) == CONST_INT
2403 && (const_ok_for_arm (mask << start_bit)
2404 || const_ok_for_arm (~(mask << start_bit))))
2406 op0 = gen_int_mode (~(mask << start_bit), SImode);
2407 emit_insn (gen_andsi3 (op2, operands[0], op0));
2411 if (GET_CODE (op0) == CONST_INT)
2413 rtx tmp = gen_reg_rtx (SImode);
2415 emit_insn (gen_movsi (tmp, op0));
2420 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2422 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2426 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2428 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2431 if (subtarget != target)
2433 /* If TARGET is still a SUBREG, then it must be wider than a word,
2434 so we must be careful only to set the subword we were asked to. */
2435 if (GET_CODE (target) == SUBREG)
2436 emit_move_insn (target, subtarget);
2438 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2445 (define_insn "insv_zero"
2446 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2447 (match_operand:SI 1 "const_int_operand" "M")
2448 (match_operand:SI 2 "const_int_operand" "M"))
2452 [(set_attr "length" "4")
2453 (set_attr "predicable" "yes")]
2456 (define_insn "insv_t2"
2457 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2458 (match_operand:SI 1 "const_int_operand" "M")
2459 (match_operand:SI 2 "const_int_operand" "M"))
2460 (match_operand:SI 3 "s_register_operand" "r"))]
2462 "bfi%?\t%0, %3, %2, %1"
2463 [(set_attr "length" "4")
2464 (set_attr "predicable" "yes")]
2467 ; constants for op 2 will never be given to these patterns.
2468 (define_insn_and_split "*anddi_notdi_di"
2469 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2470 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2471 (match_operand:DI 2 "s_register_operand" "r,0")))]
2474 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2475 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2476 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2479 operands[3] = gen_highpart (SImode, operands[0]);
2480 operands[0] = gen_lowpart (SImode, operands[0]);
2481 operands[4] = gen_highpart (SImode, operands[1]);
2482 operands[1] = gen_lowpart (SImode, operands[1]);
2483 operands[5] = gen_highpart (SImode, operands[2]);
2484 operands[2] = gen_lowpart (SImode, operands[2]);
2486 [(set_attr "length" "8")
2487 (set_attr "predicable" "yes")]
2490 (define_insn_and_split "*anddi_notzesidi_di"
2491 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2492 (and:DI (not:DI (zero_extend:DI
2493 (match_operand:SI 2 "s_register_operand" "r,r")))
2494 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2497 bic%?\\t%Q0, %Q1, %2
2499 ; (not (zero_extend ...)) allows us to just copy the high word from
2500 ; operand1 to operand0.
2503 && operands[0] != operands[1]"
2504 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2505 (set (match_dup 3) (match_dup 4))]
2508 operands[3] = gen_highpart (SImode, operands[0]);
2509 operands[0] = gen_lowpart (SImode, operands[0]);
2510 operands[4] = gen_highpart (SImode, operands[1]);
2511 operands[1] = gen_lowpart (SImode, operands[1]);
2513 [(set_attr "length" "4,8")
2514 (set_attr "predicable" "yes")]
2517 (define_insn_and_split "*anddi_notsesidi_di"
2518 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2519 (and:DI (not:DI (sign_extend:DI
2520 (match_operand:SI 2 "s_register_operand" "r,r")))
2521 (match_operand:DI 1 "s_register_operand" "0,r")))]
2524 "TARGET_32BIT && reload_completed"
2525 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2526 (set (match_dup 3) (and:SI (not:SI
2527 (ashiftrt:SI (match_dup 2) (const_int 31)))
2531 operands[3] = gen_highpart (SImode, operands[0]);
2532 operands[0] = gen_lowpart (SImode, operands[0]);
2533 operands[4] = gen_highpart (SImode, operands[1]);
2534 operands[1] = gen_lowpart (SImode, operands[1]);
2536 [(set_attr "length" "8")
2537 (set_attr "predicable" "yes")]
2540 (define_insn "andsi_notsi_si"
2541 [(set (match_operand:SI 0 "s_register_operand" "=r")
2542 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2543 (match_operand:SI 1 "s_register_operand" "r")))]
2545 "bic%?\\t%0, %1, %2"
2546 [(set_attr "predicable" "yes")]
2549 (define_insn "bicsi3"
2550 [(set (match_operand:SI 0 "register_operand" "=l")
2551 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2552 (match_operand:SI 2 "register_operand" "0")))]
2555 [(set_attr "length" "2")]
2558 (define_insn "andsi_not_shiftsi_si"
2559 [(set (match_operand:SI 0 "s_register_operand" "=r")
2560 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2561 [(match_operand:SI 2 "s_register_operand" "r")
2562 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2563 (match_operand:SI 1 "s_register_operand" "r")))]
2565 "bic%?\\t%0, %1, %2%S4"
2566 [(set_attr "predicable" "yes")
2567 (set_attr "shift" "2")
2568 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2569 (const_string "alu_shift")
2570 (const_string "alu_shift_reg")))]
2573 (define_insn "*andsi_notsi_si_compare0"
2574 [(set (reg:CC_NOOV CC_REGNUM)
2576 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2577 (match_operand:SI 1 "s_register_operand" "r"))
2579 (set (match_operand:SI 0 "s_register_operand" "=r")
2580 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2582 "bic%.\\t%0, %1, %2"
2583 [(set_attr "conds" "set")]
2586 (define_insn "*andsi_notsi_si_compare0_scratch"
2587 [(set (reg:CC_NOOV CC_REGNUM)
2589 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2590 (match_operand:SI 1 "s_register_operand" "r"))
2592 (clobber (match_scratch:SI 0 "=r"))]
2594 "bic%.\\t%0, %1, %2"
2595 [(set_attr "conds" "set")]
2598 (define_insn "iordi3"
2599 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2601 (match_operand:DI 2 "s_register_operand" "r,r")))]
2602 "TARGET_32BIT && ! TARGET_IWMMXT"
2604 [(set_attr "length" "8")
2605 (set_attr "predicable" "yes")]
2608 (define_insn "*iordi_zesidi_di"
2609 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2610 (ior:DI (zero_extend:DI
2611 (match_operand:SI 2 "s_register_operand" "r,r"))
2612 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2615 orr%?\\t%Q0, %Q1, %2
2617 [(set_attr "length" "4,8")
2618 (set_attr "predicable" "yes")]
2621 (define_insn "*iordi_sesidi_di"
2622 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2623 (ior:DI (sign_extend:DI
2624 (match_operand:SI 2 "s_register_operand" "r,r"))
2625 (match_operand:DI 1 "s_register_operand" "0,r")))]
2628 [(set_attr "length" "8")
2629 (set_attr "predicable" "yes")]
2632 (define_expand "iorsi3"
2633 [(set (match_operand:SI 0 "s_register_operand" "")
2634 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2635 (match_operand:SI 2 "reg_or_int_operand" "")))]
2638 if (GET_CODE (operands[2]) == CONST_INT)
2642 arm_split_constant (IOR, SImode, NULL_RTX,
2643 INTVAL (operands[2]), operands[0], operands[1],
2644 optimize && can_create_pseudo_p ());
2647 else /* TARGET_THUMB1 */
2649 rtx tmp = force_reg (SImode, operands[2]);
2650 if (rtx_equal_p (operands[0], operands[1]))
2654 operands[2] = operands[1];
2662 (define_insn_and_split "*arm_iorsi3"
2663 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2664 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2665 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2671 && GET_CODE (operands[2]) == CONST_INT
2672 && !const_ok_for_arm (INTVAL (operands[2]))"
2673 [(clobber (const_int 0))]
2675 arm_split_constant (IOR, SImode, curr_insn,
2676 INTVAL (operands[2]), operands[0], operands[1], 0);
2679 [(set_attr "length" "4,16")
2680 (set_attr "predicable" "yes")]
2683 (define_insn "*thumb1_iorsi3"
2684 [(set (match_operand:SI 0 "register_operand" "=l")
2685 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2686 (match_operand:SI 2 "register_operand" "l")))]
2689 [(set_attr "length" "2")]
2693 [(match_scratch:SI 3 "r")
2694 (set (match_operand:SI 0 "arm_general_register_operand" "")
2695 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2696 (match_operand:SI 2 "const_int_operand" "")))]
2698 && !const_ok_for_arm (INTVAL (operands[2]))
2699 && const_ok_for_arm (~INTVAL (operands[2]))"
2700 [(set (match_dup 3) (match_dup 2))
2701 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2705 (define_insn "*iorsi3_compare0"
2706 [(set (reg:CC_NOOV CC_REGNUM)
2707 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2708 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2710 (set (match_operand:SI 0 "s_register_operand" "=r")
2711 (ior:SI (match_dup 1) (match_dup 2)))]
2713 "orr%.\\t%0, %1, %2"
2714 [(set_attr "conds" "set")]
2717 (define_insn "*iorsi3_compare0_scratch"
2718 [(set (reg:CC_NOOV CC_REGNUM)
2719 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2720 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2722 (clobber (match_scratch:SI 0 "=r"))]
2724 "orr%.\\t%0, %1, %2"
2725 [(set_attr "conds" "set")]
2728 (define_insn "xordi3"
2729 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2730 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2731 (match_operand:DI 2 "s_register_operand" "r,r")))]
2732 "TARGET_32BIT && !TARGET_IWMMXT"
2734 [(set_attr "length" "8")
2735 (set_attr "predicable" "yes")]
2738 (define_insn "*xordi_zesidi_di"
2739 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2740 (xor:DI (zero_extend:DI
2741 (match_operand:SI 2 "s_register_operand" "r,r"))
2742 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2745 eor%?\\t%Q0, %Q1, %2
2747 [(set_attr "length" "4,8")
2748 (set_attr "predicable" "yes")]
2751 (define_insn "*xordi_sesidi_di"
2752 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2753 (xor:DI (sign_extend:DI
2754 (match_operand:SI 2 "s_register_operand" "r,r"))
2755 (match_operand:DI 1 "s_register_operand" "0,r")))]
2758 [(set_attr "length" "8")
2759 (set_attr "predicable" "yes")]
2762 (define_expand "xorsi3"
2763 [(set (match_operand:SI 0 "s_register_operand" "")
2764 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2765 (match_operand:SI 2 "reg_or_int_operand" "")))]
2767 "if (GET_CODE (operands[2]) == CONST_INT)
2771 arm_split_constant (XOR, SImode, NULL_RTX,
2772 INTVAL (operands[2]), operands[0], operands[1],
2773 optimize && can_create_pseudo_p ());
2776 else /* TARGET_THUMB1 */
2778 rtx tmp = force_reg (SImode, operands[2]);
2779 if (rtx_equal_p (operands[0], operands[1]))
2783 operands[2] = operands[1];
2790 (define_insn "*arm_xorsi3"
2791 [(set (match_operand:SI 0 "s_register_operand" "=r")
2792 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2793 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2795 "eor%?\\t%0, %1, %2"
2796 [(set_attr "predicable" "yes")]
2799 (define_insn "*thumb1_xorsi3"
2800 [(set (match_operand:SI 0 "register_operand" "=l")
2801 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2802 (match_operand:SI 2 "register_operand" "l")))]
2805 [(set_attr "length" "2")]
2808 (define_insn "*xorsi3_compare0"
2809 [(set (reg:CC_NOOV CC_REGNUM)
2810 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2811 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2813 (set (match_operand:SI 0 "s_register_operand" "=r")
2814 (xor:SI (match_dup 1) (match_dup 2)))]
2816 "eor%.\\t%0, %1, %2"
2817 [(set_attr "conds" "set")]
2820 (define_insn "*xorsi3_compare0_scratch"
2821 [(set (reg:CC_NOOV CC_REGNUM)
2822 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2823 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2827 [(set_attr "conds" "set")]
2830 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2831 ; (NOT D) we can sometimes merge the final NOT into one of the following
2835 [(set (match_operand:SI 0 "s_register_operand" "")
2836 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2837 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2838 (match_operand:SI 3 "arm_rhs_operand" "")))
2839 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2841 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2842 (not:SI (match_dup 3))))
2843 (set (match_dup 0) (not:SI (match_dup 4)))]
2847 (define_insn "*andsi_iorsi3_notsi"
2848 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2849 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2850 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2851 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2853 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2854 [(set_attr "length" "8")
2855 (set_attr "ce_count" "2")
2856 (set_attr "predicable" "yes")]
2859 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2860 ; insns are available?
2862 [(set (match_operand:SI 0 "s_register_operand" "")
2863 (match_operator:SI 1 "logical_binary_operator"
2864 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2865 (match_operand:SI 3 "const_int_operand" "")
2866 (match_operand:SI 4 "const_int_operand" ""))
2867 (match_operator:SI 9 "logical_binary_operator"
2868 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2869 (match_operand:SI 6 "const_int_operand" ""))
2870 (match_operand:SI 7 "s_register_operand" "")])]))
2871 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2873 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2874 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2877 [(ashift:SI (match_dup 2) (match_dup 4))
2881 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2884 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2888 [(set (match_operand:SI 0 "s_register_operand" "")
2889 (match_operator:SI 1 "logical_binary_operator"
2890 [(match_operator:SI 9 "logical_binary_operator"
2891 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2892 (match_operand:SI 6 "const_int_operand" ""))
2893 (match_operand:SI 7 "s_register_operand" "")])
2894 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2895 (match_operand:SI 3 "const_int_operand" "")
2896 (match_operand:SI 4 "const_int_operand" ""))]))
2897 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2899 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2900 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2903 [(ashift:SI (match_dup 2) (match_dup 4))
2907 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2910 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2914 [(set (match_operand:SI 0 "s_register_operand" "")
2915 (match_operator:SI 1 "logical_binary_operator"
2916 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2917 (match_operand:SI 3 "const_int_operand" "")
2918 (match_operand:SI 4 "const_int_operand" ""))
2919 (match_operator:SI 9 "logical_binary_operator"
2920 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2921 (match_operand:SI 6 "const_int_operand" ""))
2922 (match_operand:SI 7 "s_register_operand" "")])]))
2923 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2925 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2926 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2929 [(ashift:SI (match_dup 2) (match_dup 4))
2933 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2936 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2940 [(set (match_operand:SI 0 "s_register_operand" "")
2941 (match_operator:SI 1 "logical_binary_operator"
2942 [(match_operator:SI 9 "logical_binary_operator"
2943 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2944 (match_operand:SI 6 "const_int_operand" ""))
2945 (match_operand:SI 7 "s_register_operand" "")])
2946 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2947 (match_operand:SI 3 "const_int_operand" "")
2948 (match_operand:SI 4 "const_int_operand" ""))]))
2949 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2951 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2952 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2955 [(ashift:SI (match_dup 2) (match_dup 4))
2959 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2962 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2966 ;; Minimum and maximum insns
2968 (define_expand "smaxsi3"
2970 (set (match_operand:SI 0 "s_register_operand" "")
2971 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2972 (match_operand:SI 2 "arm_rhs_operand" "")))
2973 (clobber (reg:CC CC_REGNUM))])]
2976 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2978 /* No need for a clobber of the condition code register here. */
2979 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2980 gen_rtx_SMAX (SImode, operands[1],
2986 (define_insn "*smax_0"
2987 [(set (match_operand:SI 0 "s_register_operand" "=r")
2988 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2991 "bic%?\\t%0, %1, %1, asr #31"
2992 [(set_attr "predicable" "yes")]
2995 (define_insn "*smax_m1"
2996 [(set (match_operand:SI 0 "s_register_operand" "=r")
2997 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3000 "orr%?\\t%0, %1, %1, asr #31"
3001 [(set_attr "predicable" "yes")]
3004 (define_insn "*arm_smax_insn"
3005 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3006 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3007 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3008 (clobber (reg:CC CC_REGNUM))]
3011 cmp\\t%1, %2\;movlt\\t%0, %2
3012 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3013 [(set_attr "conds" "clob")
3014 (set_attr "length" "8,12")]
3017 (define_expand "sminsi3"
3019 (set (match_operand:SI 0 "s_register_operand" "")
3020 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3021 (match_operand:SI 2 "arm_rhs_operand" "")))
3022 (clobber (reg:CC CC_REGNUM))])]
3025 if (operands[2] == const0_rtx)
3027 /* No need for a clobber of the condition code register here. */
3028 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3029 gen_rtx_SMIN (SImode, operands[1],
3035 (define_insn "*smin_0"
3036 [(set (match_operand:SI 0 "s_register_operand" "=r")
3037 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3040 "and%?\\t%0, %1, %1, asr #31"
3041 [(set_attr "predicable" "yes")]
3044 (define_insn "*arm_smin_insn"
3045 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3046 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3047 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3048 (clobber (reg:CC CC_REGNUM))]
3051 cmp\\t%1, %2\;movge\\t%0, %2
3052 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3053 [(set_attr "conds" "clob")
3054 (set_attr "length" "8,12")]
3057 (define_expand "umaxsi3"
3059 (set (match_operand:SI 0 "s_register_operand" "")
3060 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3061 (match_operand:SI 2 "arm_rhs_operand" "")))
3062 (clobber (reg:CC CC_REGNUM))])]
3067 (define_insn "*arm_umaxsi3"
3068 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3069 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3070 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3071 (clobber (reg:CC CC_REGNUM))]
3074 cmp\\t%1, %2\;movcc\\t%0, %2
3075 cmp\\t%1, %2\;movcs\\t%0, %1
3076 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3077 [(set_attr "conds" "clob")
3078 (set_attr "length" "8,8,12")]
3081 (define_expand "uminsi3"
3083 (set (match_operand:SI 0 "s_register_operand" "")
3084 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3085 (match_operand:SI 2 "arm_rhs_operand" "")))
3086 (clobber (reg:CC CC_REGNUM))])]
3091 (define_insn "*arm_uminsi3"
3092 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3093 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3094 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3095 (clobber (reg:CC CC_REGNUM))]
3098 cmp\\t%1, %2\;movcs\\t%0, %2
3099 cmp\\t%1, %2\;movcc\\t%0, %1
3100 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3101 [(set_attr "conds" "clob")
3102 (set_attr "length" "8,8,12")]
3105 (define_insn "*store_minmaxsi"
3106 [(set (match_operand:SI 0 "memory_operand" "=m")
3107 (match_operator:SI 3 "minmax_operator"
3108 [(match_operand:SI 1 "s_register_operand" "r")
3109 (match_operand:SI 2 "s_register_operand" "r")]))
3110 (clobber (reg:CC CC_REGNUM))]
3113 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3114 operands[1], operands[2]);
3115 output_asm_insn (\"cmp\\t%1, %2\", operands);
3117 output_asm_insn (\"ite\t%d3\", operands);
3118 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3119 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3122 [(set_attr "conds" "clob")
3123 (set (attr "length")
3124 (if_then_else (eq_attr "is_thumb" "yes")
3127 (set_attr "type" "store1")]
3130 ; Reject the frame pointer in operand[1], since reloading this after
3131 ; it has been eliminated can cause carnage.
3132 (define_insn "*minmax_arithsi"
3133 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3134 (match_operator:SI 4 "shiftable_operator"
3135 [(match_operator:SI 5 "minmax_operator"
3136 [(match_operand:SI 2 "s_register_operand" "r,r")
3137 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3138 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3139 (clobber (reg:CC CC_REGNUM))]
3140 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3143 enum rtx_code code = GET_CODE (operands[4]);
3146 if (which_alternative != 0 || operands[3] != const0_rtx
3147 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3152 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3153 operands[2], operands[3]);
3154 output_asm_insn (\"cmp\\t%2, %3\", operands);
3158 output_asm_insn (\"ite\\t%d5\", operands);
3160 output_asm_insn (\"it\\t%d5\", operands);
3162 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3164 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3167 [(set_attr "conds" "clob")
3168 (set (attr "length")
3169 (if_then_else (eq_attr "is_thumb" "yes")
3175 ;; Shift and rotation insns
3177 (define_expand "ashldi3"
3178 [(set (match_operand:DI 0 "s_register_operand" "")
3179 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3180 (match_operand:SI 2 "reg_or_int_operand" "")))]
3183 if (GET_CODE (operands[2]) == CONST_INT)
3185 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3187 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3190 /* Ideally we shouldn't fail here if we could know that operands[1]
3191 ends up already living in an iwmmxt register. Otherwise it's
3192 cheaper to have the alternate code being generated than moving
3193 values to iwmmxt regs and back. */
3196 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3201 (define_insn "arm_ashldi3_1bit"
3202 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3203 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3205 (clobber (reg:CC CC_REGNUM))]
3207 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3208 [(set_attr "conds" "clob")
3209 (set_attr "length" "8")]
3212 (define_expand "ashlsi3"
3213 [(set (match_operand:SI 0 "s_register_operand" "")
3214 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3215 (match_operand:SI 2 "arm_rhs_operand" "")))]
3218 if (GET_CODE (operands[2]) == CONST_INT
3219 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3221 emit_insn (gen_movsi (operands[0], const0_rtx));
3227 (define_insn "*thumb1_ashlsi3"
3228 [(set (match_operand:SI 0 "register_operand" "=l,l")
3229 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3230 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3233 [(set_attr "length" "2")]
3236 (define_expand "ashrdi3"
3237 [(set (match_operand:DI 0 "s_register_operand" "")
3238 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3239 (match_operand:SI 2 "reg_or_int_operand" "")))]
3242 if (GET_CODE (operands[2]) == CONST_INT)
3244 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3246 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3249 /* Ideally we shouldn't fail here if we could know that operands[1]
3250 ends up already living in an iwmmxt register. Otherwise it's
3251 cheaper to have the alternate code being generated than moving
3252 values to iwmmxt regs and back. */
3255 else if (!TARGET_REALLY_IWMMXT)
3260 (define_insn "arm_ashrdi3_1bit"
3261 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3262 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3264 (clobber (reg:CC CC_REGNUM))]
3266 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3267 [(set_attr "conds" "clob")
3268 (set_attr "length" "8")]
3271 (define_expand "ashrsi3"
3272 [(set (match_operand:SI 0 "s_register_operand" "")
3273 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3274 (match_operand:SI 2 "arm_rhs_operand" "")))]
3277 if (GET_CODE (operands[2]) == CONST_INT
3278 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3279 operands[2] = GEN_INT (31);
3283 (define_insn "*thumb1_ashrsi3"
3284 [(set (match_operand:SI 0 "register_operand" "=l,l")
3285 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3286 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3289 [(set_attr "length" "2")]
3292 (define_expand "lshrdi3"
3293 [(set (match_operand:DI 0 "s_register_operand" "")
3294 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3295 (match_operand:SI 2 "reg_or_int_operand" "")))]
3298 if (GET_CODE (operands[2]) == CONST_INT)
3300 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3302 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3305 /* Ideally we shouldn't fail here if we could know that operands[1]
3306 ends up already living in an iwmmxt register. Otherwise it's
3307 cheaper to have the alternate code being generated than moving
3308 values to iwmmxt regs and back. */
3311 else if (!TARGET_REALLY_IWMMXT)
3316 (define_insn "arm_lshrdi3_1bit"
3317 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3318 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3320 (clobber (reg:CC CC_REGNUM))]
3322 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3323 [(set_attr "conds" "clob")
3324 (set_attr "length" "8")]
3327 (define_expand "lshrsi3"
3328 [(set (match_operand:SI 0 "s_register_operand" "")
3329 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3330 (match_operand:SI 2 "arm_rhs_operand" "")))]
3333 if (GET_CODE (operands[2]) == CONST_INT
3334 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3336 emit_insn (gen_movsi (operands[0], const0_rtx));
3342 (define_insn "*thumb1_lshrsi3"
3343 [(set (match_operand:SI 0 "register_operand" "=l,l")
3344 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3345 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3348 [(set_attr "length" "2")]
3351 (define_expand "rotlsi3"
3352 [(set (match_operand:SI 0 "s_register_operand" "")
3353 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3354 (match_operand:SI 2 "reg_or_int_operand" "")))]
3357 if (GET_CODE (operands[2]) == CONST_INT)
3358 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3361 rtx reg = gen_reg_rtx (SImode);
3362 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3368 (define_expand "rotrsi3"
3369 [(set (match_operand:SI 0 "s_register_operand" "")
3370 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3371 (match_operand:SI 2 "arm_rhs_operand" "")))]
3376 if (GET_CODE (operands[2]) == CONST_INT
3377 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3378 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3380 else /* TARGET_THUMB1 */
3382 if (GET_CODE (operands [2]) == CONST_INT)
3383 operands [2] = force_reg (SImode, operands[2]);
3388 (define_insn "*thumb1_rotrsi3"
3389 [(set (match_operand:SI 0 "register_operand" "=l")
3390 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3391 (match_operand:SI 2 "register_operand" "l")))]
3394 [(set_attr "length" "2")]
3397 (define_insn "*arm_shiftsi3"
3398 [(set (match_operand:SI 0 "s_register_operand" "=r")
3399 (match_operator:SI 3 "shift_operator"
3400 [(match_operand:SI 1 "s_register_operand" "r")
3401 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3403 "* return arm_output_shift(operands, 0);"
3404 [(set_attr "predicable" "yes")
3405 (set_attr "shift" "1")
3406 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3407 (const_string "alu_shift")
3408 (const_string "alu_shift_reg")))]
3411 (define_insn "*shiftsi3_compare0"
3412 [(set (reg:CC_NOOV CC_REGNUM)
3413 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3414 [(match_operand:SI 1 "s_register_operand" "r")
3415 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3417 (set (match_operand:SI 0 "s_register_operand" "=r")
3418 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3420 "* return arm_output_shift(operands, 1);"
3421 [(set_attr "conds" "set")
3422 (set_attr "shift" "1")
3423 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3424 (const_string "alu_shift")
3425 (const_string "alu_shift_reg")))]
3428 (define_insn "*shiftsi3_compare0_scratch"
3429 [(set (reg:CC_NOOV CC_REGNUM)
3430 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3431 [(match_operand:SI 1 "s_register_operand" "r")
3432 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3434 (clobber (match_scratch:SI 0 "=r"))]
3436 "* return arm_output_shift(operands, 1);"
3437 [(set_attr "conds" "set")
3438 (set_attr "shift" "1")]
3441 (define_insn "*arm_notsi_shiftsi"
3442 [(set (match_operand:SI 0 "s_register_operand" "=r")
3443 (not:SI (match_operator:SI 3 "shift_operator"
3444 [(match_operand:SI 1 "s_register_operand" "r")
3445 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3448 [(set_attr "predicable" "yes")
3449 (set_attr "shift" "1")
3450 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3451 (const_string "alu_shift")
3452 (const_string "alu_shift_reg")))]
3455 (define_insn "*arm_notsi_shiftsi_compare0"
3456 [(set (reg:CC_NOOV CC_REGNUM)
3457 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3458 [(match_operand:SI 1 "s_register_operand" "r")
3459 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3461 (set (match_operand:SI 0 "s_register_operand" "=r")
3462 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3465 [(set_attr "conds" "set")
3466 (set_attr "shift" "1")
3467 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3468 (const_string "alu_shift")
3469 (const_string "alu_shift_reg")))]
3472 (define_insn "*arm_not_shiftsi_compare0_scratch"
3473 [(set (reg:CC_NOOV CC_REGNUM)
3474 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3475 [(match_operand:SI 1 "s_register_operand" "r")
3476 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3478 (clobber (match_scratch:SI 0 "=r"))]
3481 [(set_attr "conds" "set")
3482 (set_attr "shift" "1")
3483 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3484 (const_string "alu_shift")
3485 (const_string "alu_shift_reg")))]
3488 ;; We don't really have extzv, but defining this using shifts helps
3489 ;; to reduce register pressure later on.
3491 (define_expand "extzv"
3493 (ashift:SI (match_operand:SI 1 "register_operand" "")
3494 (match_operand:SI 2 "const_int_operand" "")))
3495 (set (match_operand:SI 0 "register_operand" "")
3496 (lshiftrt:SI (match_dup 4)
3497 (match_operand:SI 3 "const_int_operand" "")))]
3498 "TARGET_THUMB1 || arm_arch_thumb2"
3501 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3502 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3504 if (arm_arch_thumb2)
3506 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3511 operands[3] = GEN_INT (rshift);
3515 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3519 operands[2] = GEN_INT (lshift);
3520 operands[4] = gen_reg_rtx (SImode);
3525 [(set (match_operand:SI 0 "s_register_operand" "=r")
3526 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3527 (match_operand:SI 2 "const_int_operand" "M")
3528 (match_operand:SI 3 "const_int_operand" "M")))]
3530 "sbfx%?\t%0, %1, %3, %2"
3531 [(set_attr "length" "4")
3532 (set_attr "predicable" "yes")]
3535 (define_insn "extzv_t2"
3536 [(set (match_operand:SI 0 "s_register_operand" "=r")
3537 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3538 (match_operand:SI 2 "const_int_operand" "M")
3539 (match_operand:SI 3 "const_int_operand" "M")))]
3541 "ubfx%?\t%0, %1, %3, %2"
3542 [(set_attr "length" "4")
3543 (set_attr "predicable" "yes")]
3547 ;; Unary arithmetic insns
3549 (define_expand "negdi2"
3551 [(set (match_operand:DI 0 "s_register_operand" "")
3552 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3553 (clobber (reg:CC CC_REGNUM))])]
3558 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3559 ;; The first alternative allows the common case of a *full* overlap.
3560 (define_insn "*arm_negdi2"
3561 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3562 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3563 (clobber (reg:CC CC_REGNUM))]
3565 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3566 [(set_attr "conds" "clob")
3567 (set_attr "length" "8")]
3570 (define_insn "*thumb1_negdi2"
3571 [(set (match_operand:DI 0 "register_operand" "=&l")
3572 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3573 (clobber (reg:CC CC_REGNUM))]
3575 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3576 [(set_attr "length" "6")]
3579 (define_expand "negsi2"
3580 [(set (match_operand:SI 0 "s_register_operand" "")
3581 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3586 (define_insn "*arm_negsi2"
3587 [(set (match_operand:SI 0 "s_register_operand" "=r")
3588 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3590 "rsb%?\\t%0, %1, #0"
3591 [(set_attr "predicable" "yes")]
3594 (define_insn "*thumb1_negsi2"
3595 [(set (match_operand:SI 0 "register_operand" "=l")
3596 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3599 [(set_attr "length" "2")]
3602 (define_expand "negsf2"
3603 [(set (match_operand:SF 0 "s_register_operand" "")
3604 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3605 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3609 (define_expand "negdf2"
3610 [(set (match_operand:DF 0 "s_register_operand" "")
3611 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3612 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3615 ;; abssi2 doesn't really clobber the condition codes if a different register
3616 ;; is being set. To keep things simple, assume during rtl manipulations that
3617 ;; it does, but tell the final scan operator the truth. Similarly for
3620 (define_expand "abssi2"
3622 [(set (match_operand:SI 0 "s_register_operand" "")
3623 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3624 (clobber (match_dup 2))])]
3628 operands[2] = gen_rtx_SCRATCH (SImode);
3630 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3633 (define_insn "*arm_abssi2"
3634 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3635 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3636 (clobber (reg:CC CC_REGNUM))]
3639 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3640 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3641 [(set_attr "conds" "clob,*")
3642 (set_attr "shift" "1")
3643 ;; predicable can't be set based on the variant, so left as no
3644 (set_attr "length" "8")]
3647 (define_insn_and_split "*thumb1_abssi2"
3648 [(set (match_operand:SI 0 "s_register_operand" "=l")
3649 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3650 (clobber (match_scratch:SI 2 "=&l"))]
3653 "TARGET_THUMB1 && reload_completed"
3654 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3655 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3656 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3658 [(set_attr "length" "6")]
3661 (define_insn "*arm_neg_abssi2"
3662 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3663 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3664 (clobber (reg:CC CC_REGNUM))]
3667 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3668 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3669 [(set_attr "conds" "clob,*")
3670 (set_attr "shift" "1")
3671 ;; predicable can't be set based on the variant, so left as no
3672 (set_attr "length" "8")]
3675 (define_insn_and_split "*thumb1_neg_abssi2"
3676 [(set (match_operand:SI 0 "s_register_operand" "=l")
3677 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3678 (clobber (match_scratch:SI 2 "=&l"))]
3681 "TARGET_THUMB1 && reload_completed"
3682 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3683 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3684 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3686 [(set_attr "length" "6")]
3689 (define_expand "abssf2"
3690 [(set (match_operand:SF 0 "s_register_operand" "")
3691 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3692 "TARGET_32BIT && TARGET_HARD_FLOAT"
3695 (define_expand "absdf2"
3696 [(set (match_operand:DF 0 "s_register_operand" "")
3697 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3698 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3701 (define_expand "sqrtsf2"
3702 [(set (match_operand:SF 0 "s_register_operand" "")
3703 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3704 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3707 (define_expand "sqrtdf2"
3708 [(set (match_operand:DF 0 "s_register_operand" "")
3709 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3710 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3713 (define_insn_and_split "one_cmpldi2"
3714 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3715 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3718 "TARGET_32BIT && reload_completed"
3719 [(set (match_dup 0) (not:SI (match_dup 1)))
3720 (set (match_dup 2) (not:SI (match_dup 3)))]
3723 operands[2] = gen_highpart (SImode, operands[0]);
3724 operands[0] = gen_lowpart (SImode, operands[0]);
3725 operands[3] = gen_highpart (SImode, operands[1]);
3726 operands[1] = gen_lowpart (SImode, operands[1]);
3728 [(set_attr "length" "8")
3729 (set_attr "predicable" "yes")]
3732 (define_expand "one_cmplsi2"
3733 [(set (match_operand:SI 0 "s_register_operand" "")
3734 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3739 (define_insn "*arm_one_cmplsi2"
3740 [(set (match_operand:SI 0 "s_register_operand" "=r")
3741 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3744 [(set_attr "predicable" "yes")]
3747 (define_insn "*thumb1_one_cmplsi2"
3748 [(set (match_operand:SI 0 "register_operand" "=l")
3749 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3752 [(set_attr "length" "2")]
3755 (define_insn "*notsi_compare0"
3756 [(set (reg:CC_NOOV CC_REGNUM)
3757 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3759 (set (match_operand:SI 0 "s_register_operand" "=r")
3760 (not:SI (match_dup 1)))]
3763 [(set_attr "conds" "set")]
3766 (define_insn "*notsi_compare0_scratch"
3767 [(set (reg:CC_NOOV CC_REGNUM)
3768 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3770 (clobber (match_scratch:SI 0 "=r"))]
3773 [(set_attr "conds" "set")]
3776 ;; Fixed <--> Floating conversion insns
3778 (define_expand "floatsihf2"
3779 [(set (match_operand:HF 0 "general_operand" "")
3780 (float:HF (match_operand:SI 1 "general_operand" "")))]
3784 rtx op1 = gen_reg_rtx (SFmode);
3785 expand_float (op1, operands[1], 0);
3786 op1 = convert_to_mode (HFmode, op1, 0);
3787 emit_move_insn (operands[0], op1);
3792 (define_expand "floatdihf2"
3793 [(set (match_operand:HF 0 "general_operand" "")
3794 (float:HF (match_operand:DI 1 "general_operand" "")))]
3798 rtx op1 = gen_reg_rtx (SFmode);
3799 expand_float (op1, operands[1], 0);
3800 op1 = convert_to_mode (HFmode, op1, 0);
3801 emit_move_insn (operands[0], op1);
3806 (define_expand "floatsisf2"
3807 [(set (match_operand:SF 0 "s_register_operand" "")
3808 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3809 "TARGET_32BIT && TARGET_HARD_FLOAT"
3811 if (TARGET_MAVERICK)
3813 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3818 (define_expand "floatsidf2"
3819 [(set (match_operand:DF 0 "s_register_operand" "")
3820 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3821 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3823 if (TARGET_MAVERICK)
3825 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3830 (define_expand "fix_trunchfsi2"
3831 [(set (match_operand:SI 0 "general_operand" "")
3832 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3836 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3837 expand_fix (operands[0], op1, 0);
3842 (define_expand "fix_trunchfdi2"
3843 [(set (match_operand:DI 0 "general_operand" "")
3844 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3848 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3849 expand_fix (operands[0], op1, 0);
3854 (define_expand "fix_truncsfsi2"
3855 [(set (match_operand:SI 0 "s_register_operand" "")
3856 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3857 "TARGET_32BIT && TARGET_HARD_FLOAT"
3859 if (TARGET_MAVERICK)
3861 if (!cirrus_fp_register (operands[0], SImode))
3862 operands[0] = force_reg (SImode, operands[0]);
3863 if (!cirrus_fp_register (operands[1], SFmode))
3864 operands[1] = force_reg (SFmode, operands[0]);
3865 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3870 (define_expand "fix_truncdfsi2"
3871 [(set (match_operand:SI 0 "s_register_operand" "")
3872 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3873 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3875 if (TARGET_MAVERICK)
3877 if (!cirrus_fp_register (operands[1], DFmode))
3878 operands[1] = force_reg (DFmode, operands[0]);
3879 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3886 (define_expand "truncdfsf2"
3887 [(set (match_operand:SF 0 "s_register_operand" "")
3889 (match_operand:DF 1 "s_register_operand" "")))]
3890 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3894 /* DFmode -> HFmode conversions have to go through SFmode. */
3895 (define_expand "truncdfhf2"
3896 [(set (match_operand:HF 0 "general_operand" "")
3898 (match_operand:DF 1 "general_operand" "")))]
3903 op1 = convert_to_mode (SFmode, operands[1], 0);
3904 op1 = convert_to_mode (HFmode, op1, 0);
3905 emit_move_insn (operands[0], op1);
3910 ;; Zero and sign extension instructions.
3912 (define_expand "zero_extendsidi2"
3913 [(set (match_operand:DI 0 "s_register_operand" "")
3914 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3919 (define_insn "*arm_zero_extendsidi2"
3920 [(set (match_operand:DI 0 "s_register_operand" "=r")
3921 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3924 if (REGNO (operands[1])
3925 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3926 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3927 return \"mov%?\\t%R0, #0\";
3929 [(set_attr "length" "8")
3930 (set_attr "predicable" "yes")]
3933 (define_expand "zero_extendqidi2"
3934 [(set (match_operand:DI 0 "s_register_operand" "")
3935 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3940 (define_insn "*arm_zero_extendqidi2"
3941 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3942 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3945 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3946 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3947 [(set_attr "length" "8")
3948 (set_attr "predicable" "yes")
3949 (set_attr "type" "*,load_byte")
3950 (set_attr "pool_range" "*,4092")
3951 (set_attr "neg_pool_range" "*,4084")]
3954 (define_expand "extendsidi2"
3955 [(set (match_operand:DI 0 "s_register_operand" "")
3956 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3961 (define_insn "*arm_extendsidi2"
3962 [(set (match_operand:DI 0 "s_register_operand" "=r")
3963 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3966 if (REGNO (operands[1])
3967 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3968 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3969 return \"mov%?\\t%R0, %Q0, asr #31\";
3971 [(set_attr "length" "8")
3972 (set_attr "shift" "1")
3973 (set_attr "predicable" "yes")]
3976 (define_expand "zero_extendhisi2"
3978 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3980 (set (match_operand:SI 0 "s_register_operand" "")
3981 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3985 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3987 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3988 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3992 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3994 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3998 if (!s_register_operand (operands[1], HImode))
3999 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4003 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4004 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4008 operands[1] = gen_lowpart (SImode, operands[1]);
4009 operands[2] = gen_reg_rtx (SImode);
4013 (define_insn "*thumb1_zero_extendhisi2"
4014 [(set (match_operand:SI 0 "register_operand" "=l")
4015 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4016 "TARGET_THUMB1 && !arm_arch6"
4018 rtx mem = XEXP (operands[1], 0);
4020 if (GET_CODE (mem) == CONST)
4021 mem = XEXP (mem, 0);
4023 if (GET_CODE (mem) == LABEL_REF)
4024 return \"ldr\\t%0, %1\";
4026 if (GET_CODE (mem) == PLUS)
4028 rtx a = XEXP (mem, 0);
4029 rtx b = XEXP (mem, 1);
4031 /* This can happen due to bugs in reload. */
4032 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4035 ops[0] = operands[0];
4038 output_asm_insn (\"mov %0, %1\", ops);
4040 XEXP (mem, 0) = operands[0];
4043 else if ( GET_CODE (a) == LABEL_REF
4044 && GET_CODE (b) == CONST_INT)
4045 return \"ldr\\t%0, %1\";
4048 return \"ldrh\\t%0, %1\";
4050 [(set_attr "length" "4")
4051 (set_attr "type" "load_byte")
4052 (set_attr "pool_range" "60")]
4055 (define_insn "*thumb1_zero_extendhisi2_v6"
4056 [(set (match_operand:SI 0 "register_operand" "=l,l")
4057 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4058 "TARGET_THUMB1 && arm_arch6"
4062 if (which_alternative == 0)
4063 return \"uxth\\t%0, %1\";
4065 mem = XEXP (operands[1], 0);
4067 if (GET_CODE (mem) == CONST)
4068 mem = XEXP (mem, 0);
4070 if (GET_CODE (mem) == LABEL_REF)
4071 return \"ldr\\t%0, %1\";
4073 if (GET_CODE (mem) == PLUS)
4075 rtx a = XEXP (mem, 0);
4076 rtx b = XEXP (mem, 1);
4078 /* This can happen due to bugs in reload. */
4079 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4082 ops[0] = operands[0];
4085 output_asm_insn (\"mov %0, %1\", ops);
4087 XEXP (mem, 0) = operands[0];
4090 else if ( GET_CODE (a) == LABEL_REF
4091 && GET_CODE (b) == CONST_INT)
4092 return \"ldr\\t%0, %1\";
4095 return \"ldrh\\t%0, %1\";
4097 [(set_attr "length" "2,4")
4098 (set_attr "type" "alu_shift,load_byte")
4099 (set_attr "pool_range" "*,60")]
4102 (define_insn "*arm_zero_extendhisi2"
4103 [(set (match_operand:SI 0 "s_register_operand" "=r")
4104 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4105 "TARGET_ARM && arm_arch4 && !arm_arch6"
4107 [(set_attr "type" "load_byte")
4108 (set_attr "predicable" "yes")
4109 (set_attr "pool_range" "256")
4110 (set_attr "neg_pool_range" "244")]
4113 (define_insn "*arm_zero_extendhisi2_v6"
4114 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4115 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4116 "TARGET_ARM && arm_arch6"
4120 [(set_attr "type" "alu_shift,load_byte")
4121 (set_attr "predicable" "yes")
4122 (set_attr "pool_range" "*,256")
4123 (set_attr "neg_pool_range" "*,244")]
4126 (define_insn "*arm_zero_extendhisi2addsi"
4127 [(set (match_operand:SI 0 "s_register_operand" "=r")
4128 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4129 (match_operand:SI 2 "s_register_operand" "r")))]
4131 "uxtah%?\\t%0, %2, %1"
4132 [(set_attr "type" "alu_shift")
4133 (set_attr "predicable" "yes")]
4136 (define_expand "zero_extendqisi2"
4137 [(set (match_operand:SI 0 "s_register_operand" "")
4138 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4141 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4145 emit_insn (gen_andsi3 (operands[0],
4146 gen_lowpart (SImode, operands[1]),
4149 else /* TARGET_THUMB */
4151 rtx temp = gen_reg_rtx (SImode);
4154 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4155 operands[1] = gen_lowpart (SImode, operands[1]);
4158 ops[1] = operands[1];
4159 ops[2] = GEN_INT (24);
4161 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4162 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4164 ops[0] = operands[0];
4166 ops[2] = GEN_INT (24);
4168 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4169 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4176 (define_insn "*thumb1_zero_extendqisi2"
4177 [(set (match_operand:SI 0 "register_operand" "=l")
4178 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4179 "TARGET_THUMB1 && !arm_arch6"
4181 [(set_attr "length" "2")
4182 (set_attr "type" "load_byte")
4183 (set_attr "pool_range" "32")]
4186 (define_insn "*thumb1_zero_extendqisi2_v6"
4187 [(set (match_operand:SI 0 "register_operand" "=l,l")
4188 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4189 "TARGET_THUMB1 && arm_arch6"
4193 [(set_attr "length" "2,2")
4194 (set_attr "type" "alu_shift,load_byte")
4195 (set_attr "pool_range" "*,32")]
4198 (define_insn "*arm_zero_extendqisi2"
4199 [(set (match_operand:SI 0 "s_register_operand" "=r")
4200 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4201 "TARGET_ARM && !arm_arch6"
4202 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4203 [(set_attr "type" "load_byte")
4204 (set_attr "predicable" "yes")
4205 (set_attr "pool_range" "4096")
4206 (set_attr "neg_pool_range" "4084")]
4209 (define_insn "*arm_zero_extendqisi2_v6"
4210 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4211 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4212 "TARGET_ARM && arm_arch6"
4215 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4216 [(set_attr "type" "alu_shift,load_byte")
4217 (set_attr "predicable" "yes")
4218 (set_attr "pool_range" "*,4096")
4219 (set_attr "neg_pool_range" "*,4084")]
4222 (define_insn "*arm_zero_extendqisi2addsi"
4223 [(set (match_operand:SI 0 "s_register_operand" "=r")
4224 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4225 (match_operand:SI 2 "s_register_operand" "r")))]
4227 "uxtab%?\\t%0, %2, %1"
4228 [(set_attr "predicable" "yes")
4229 (set_attr "insn" "xtab")
4230 (set_attr "type" "alu_shift")]
4234 [(set (match_operand:SI 0 "s_register_operand" "")
4235 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4236 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4237 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4238 [(set (match_dup 2) (match_dup 1))
4239 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4244 [(set (match_operand:SI 0 "s_register_operand" "")
4245 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4246 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4247 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4248 [(set (match_dup 2) (match_dup 1))
4249 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4253 (define_code_iterator ior_xor [ior xor])
4256 [(set (match_operand:SI 0 "s_register_operand" "")
4257 (ior_xor:SI (and:SI (ashift:SI
4258 (match_operand:SI 1 "s_register_operand" "")
4259 (match_operand:SI 2 "const_int_operand" ""))
4260 (match_operand:SI 3 "const_int_operand" ""))
4262 (match_operator 5 "subreg_lowpart_operator"
4263 [(match_operand:SI 4 "s_register_operand" "")]))))]
4265 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4266 == (GET_MODE_MASK (GET_MODE (operands[5]))
4267 & (GET_MODE_MASK (GET_MODE (operands[5]))
4268 << (INTVAL (operands[2])))))"
4269 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4271 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4272 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4275 (define_insn "*compareqi_eq0"
4276 [(set (reg:CC_Z CC_REGNUM)
4277 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4281 [(set_attr "conds" "set")]
4284 (define_expand "extendhisi2"
4286 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4288 (set (match_operand:SI 0 "s_register_operand" "")
4289 (ashiftrt:SI (match_dup 2)
4294 if (GET_CODE (operands[1]) == MEM)
4298 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4303 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4304 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4309 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4311 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4315 if (!s_register_operand (operands[1], HImode))
4316 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4321 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4323 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4324 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4329 operands[1] = gen_lowpart (SImode, operands[1]);
4330 operands[2] = gen_reg_rtx (SImode);
4334 (define_insn "thumb1_extendhisi2"
4335 [(set (match_operand:SI 0 "register_operand" "=l")
4336 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4337 (clobber (match_scratch:SI 2 "=&l"))]
4338 "TARGET_THUMB1 && !arm_arch6"
4342 rtx mem = XEXP (operands[1], 0);
4344 /* This code used to try to use 'V', and fix the address only if it was
4345 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4346 range of QImode offsets, and offsettable_address_p does a QImode
4349 if (GET_CODE (mem) == CONST)
4350 mem = XEXP (mem, 0);
4352 if (GET_CODE (mem) == LABEL_REF)
4353 return \"ldr\\t%0, %1\";
4355 if (GET_CODE (mem) == PLUS)
4357 rtx a = XEXP (mem, 0);
4358 rtx b = XEXP (mem, 1);
4360 if (GET_CODE (a) == LABEL_REF
4361 && GET_CODE (b) == CONST_INT)
4362 return \"ldr\\t%0, %1\";
4364 if (GET_CODE (b) == REG)
4365 return \"ldrsh\\t%0, %1\";
4373 ops[2] = const0_rtx;
4376 gcc_assert (GET_CODE (ops[1]) == REG);
4378 ops[0] = operands[0];
4379 ops[3] = operands[2];
4380 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4383 [(set_attr "length" "4")
4384 (set_attr "type" "load_byte")
4385 (set_attr "pool_range" "1020")]
4388 ;; We used to have an early-clobber on the scratch register here.
4389 ;; However, there's a bug somewhere in reload which means that this
4390 ;; can be partially ignored during spill allocation if the memory
4391 ;; address also needs reloading; this causes us to die later on when
4392 ;; we try to verify the operands. Fortunately, we don't really need
4393 ;; the early-clobber: we can always use operand 0 if operand 2
4394 ;; overlaps the address.
4395 (define_insn "*thumb1_extendhisi2_insn_v6"
4396 [(set (match_operand:SI 0 "register_operand" "=l,l")
4397 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4398 (clobber (match_scratch:SI 2 "=X,l"))]
4399 "TARGET_THUMB1 && arm_arch6"
4405 if (which_alternative == 0)
4406 return \"sxth\\t%0, %1\";
4408 mem = XEXP (operands[1], 0);
4410 /* This code used to try to use 'V', and fix the address only if it was
4411 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4412 range of QImode offsets, and offsettable_address_p does a QImode
4415 if (GET_CODE (mem) == CONST)
4416 mem = XEXP (mem, 0);
4418 if (GET_CODE (mem) == LABEL_REF)
4419 return \"ldr\\t%0, %1\";
4421 if (GET_CODE (mem) == PLUS)
4423 rtx a = XEXP (mem, 0);
4424 rtx b = XEXP (mem, 1);
4426 if (GET_CODE (a) == LABEL_REF
4427 && GET_CODE (b) == CONST_INT)
4428 return \"ldr\\t%0, %1\";
4430 if (GET_CODE (b) == REG)
4431 return \"ldrsh\\t%0, %1\";
4439 ops[2] = const0_rtx;
4442 gcc_assert (GET_CODE (ops[1]) == REG);
4444 ops[0] = operands[0];
4445 if (reg_mentioned_p (operands[2], ops[1]))
4448 ops[3] = operands[2];
4449 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4452 [(set_attr "length" "2,4")
4453 (set_attr "type" "alu_shift,load_byte")
4454 (set_attr "pool_range" "*,1020")]
4457 ;; This pattern will only be used when ldsh is not available
4458 (define_expand "extendhisi2_mem"
4459 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4461 (zero_extend:SI (match_dup 7)))
4462 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4463 (set (match_operand:SI 0 "" "")
4464 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4469 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4471 mem1 = change_address (operands[1], QImode, addr);
4472 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4473 operands[0] = gen_lowpart (SImode, operands[0]);
4475 operands[2] = gen_reg_rtx (SImode);
4476 operands[3] = gen_reg_rtx (SImode);
4477 operands[6] = gen_reg_rtx (SImode);
4480 if (BYTES_BIG_ENDIAN)
4482 operands[4] = operands[2];
4483 operands[5] = operands[3];
4487 operands[4] = operands[3];
4488 operands[5] = operands[2];
4493 (define_insn "*arm_extendhisi2"
4494 [(set (match_operand:SI 0 "s_register_operand" "=r")
4495 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4496 "TARGET_ARM && arm_arch4 && !arm_arch6"
4497 "ldr%(sh%)\\t%0, %1"
4498 [(set_attr "type" "load_byte")
4499 (set_attr "predicable" "yes")
4500 (set_attr "pool_range" "256")
4501 (set_attr "neg_pool_range" "244")]
4504 ;; ??? Check Thumb-2 pool range
4505 (define_insn "*arm_extendhisi2_v6"
4506 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4507 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4508 "TARGET_32BIT && arm_arch6"
4512 [(set_attr "type" "alu_shift,load_byte")
4513 (set_attr "predicable" "yes")
4514 (set_attr "pool_range" "*,256")
4515 (set_attr "neg_pool_range" "*,244")]
4518 (define_insn "*arm_extendhisi2addsi"
4519 [(set (match_operand:SI 0 "s_register_operand" "=r")
4520 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4521 (match_operand:SI 2 "s_register_operand" "r")))]
4523 "sxtah%?\\t%0, %2, %1"
4526 (define_expand "extendqihi2"
4528 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4530 (set (match_operand:HI 0 "s_register_operand" "")
4531 (ashiftrt:SI (match_dup 2)
4536 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4538 emit_insn (gen_rtx_SET (VOIDmode,
4540 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4543 if (!s_register_operand (operands[1], QImode))
4544 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4545 operands[0] = gen_lowpart (SImode, operands[0]);
4546 operands[1] = gen_lowpart (SImode, operands[1]);
4547 operands[2] = gen_reg_rtx (SImode);
4551 (define_insn "*arm_extendqihi_insn"
4552 [(set (match_operand:HI 0 "s_register_operand" "=r")
4553 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4554 "TARGET_ARM && arm_arch4"
4555 "ldr%(sb%)\\t%0, %1"
4556 [(set_attr "type" "load_byte")
4557 (set_attr "predicable" "yes")
4558 (set_attr "pool_range" "256")
4559 (set_attr "neg_pool_range" "244")]
4562 (define_expand "extendqisi2"
4564 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4566 (set (match_operand:SI 0 "s_register_operand" "")
4567 (ashiftrt:SI (match_dup 2)
4572 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4574 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4575 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4579 if (!s_register_operand (operands[1], QImode))
4580 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4584 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4585 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4589 operands[1] = gen_lowpart (SImode, operands[1]);
4590 operands[2] = gen_reg_rtx (SImode);
4594 (define_insn "*arm_extendqisi"
4595 [(set (match_operand:SI 0 "s_register_operand" "=r")
4596 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4597 "TARGET_ARM && arm_arch4 && !arm_arch6"
4598 "ldr%(sb%)\\t%0, %1"
4599 [(set_attr "type" "load_byte")
4600 (set_attr "predicable" "yes")
4601 (set_attr "pool_range" "256")
4602 (set_attr "neg_pool_range" "244")]
4605 (define_insn "*arm_extendqisi_v6"
4606 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4608 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4609 "TARGET_ARM && arm_arch6"
4613 [(set_attr "type" "alu_shift,load_byte")
4614 (set_attr "predicable" "yes")
4615 (set_attr "pool_range" "*,256")
4616 (set_attr "neg_pool_range" "*,244")]
4619 (define_insn "*arm_extendqisi2addsi"
4620 [(set (match_operand:SI 0 "s_register_operand" "=r")
4621 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4622 (match_operand:SI 2 "s_register_operand" "r")))]
4624 "sxtab%?\\t%0, %2, %1"
4625 [(set_attr "type" "alu_shift")
4626 (set_attr "insn" "xtab")
4627 (set_attr "predicable" "yes")]
4630 (define_insn "*thumb1_extendqisi2"
4631 [(set (match_operand:SI 0 "register_operand" "=l,l")
4632 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4633 "TARGET_THUMB1 && !arm_arch6"
4637 rtx mem = XEXP (operands[1], 0);
4639 if (GET_CODE (mem) == CONST)
4640 mem = XEXP (mem, 0);
4642 if (GET_CODE (mem) == LABEL_REF)
4643 return \"ldr\\t%0, %1\";
4645 if (GET_CODE (mem) == PLUS
4646 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4647 return \"ldr\\t%0, %1\";
4649 if (which_alternative == 0)
4650 return \"ldrsb\\t%0, %1\";
4652 ops[0] = operands[0];
4654 if (GET_CODE (mem) == PLUS)
4656 rtx a = XEXP (mem, 0);
4657 rtx b = XEXP (mem, 1);
4662 if (GET_CODE (a) == REG)
4664 if (GET_CODE (b) == REG)
4665 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4666 else if (REGNO (a) == REGNO (ops[0]))
4668 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4669 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4670 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4673 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4677 gcc_assert (GET_CODE (b) == REG);
4678 if (REGNO (b) == REGNO (ops[0]))
4680 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4681 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4682 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4685 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4688 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4690 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4691 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4692 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4697 ops[2] = const0_rtx;
4699 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4703 [(set_attr "length" "2,6")
4704 (set_attr "type" "load_byte,load_byte")
4705 (set_attr "pool_range" "32,32")]
4708 (define_insn "*thumb1_extendqisi2_v6"
4709 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4710 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4711 "TARGET_THUMB1 && arm_arch6"
4717 if (which_alternative == 0)
4718 return \"sxtb\\t%0, %1\";
4720 mem = XEXP (operands[1], 0);
4722 if (GET_CODE (mem) == CONST)
4723 mem = XEXP (mem, 0);
4725 if (GET_CODE (mem) == LABEL_REF)
4726 return \"ldr\\t%0, %1\";
4728 if (GET_CODE (mem) == PLUS
4729 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4730 return \"ldr\\t%0, %1\";
4732 if (which_alternative == 0)
4733 return \"ldrsb\\t%0, %1\";
4735 ops[0] = operands[0];
4737 if (GET_CODE (mem) == PLUS)
4739 rtx a = XEXP (mem, 0);
4740 rtx b = XEXP (mem, 1);
4745 if (GET_CODE (a) == REG)
4747 if (GET_CODE (b) == REG)
4748 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4749 else if (REGNO (a) == REGNO (ops[0]))
4751 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4752 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4755 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4759 gcc_assert (GET_CODE (b) == REG);
4760 if (REGNO (b) == REGNO (ops[0]))
4762 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4763 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4766 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4769 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4771 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4772 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4777 ops[2] = const0_rtx;
4779 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4783 [(set_attr "length" "2,2,4")
4784 (set_attr "type" "alu_shift,load_byte,load_byte")
4785 (set_attr "pool_range" "*,32,32")]
4788 (define_expand "extendsfdf2"
4789 [(set (match_operand:DF 0 "s_register_operand" "")
4790 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4791 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4795 /* HFmode -> DFmode conversions have to go through SFmode. */
4796 (define_expand "extendhfdf2"
4797 [(set (match_operand:DF 0 "general_operand" "")
4798 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4803 op1 = convert_to_mode (SFmode, operands[1], 0);
4804 op1 = convert_to_mode (DFmode, op1, 0);
4805 emit_insn (gen_movdf (operands[0], op1));
4810 ;; Move insns (including loads and stores)
4812 ;; XXX Just some ideas about movti.
4813 ;; I don't think these are a good idea on the arm, there just aren't enough
4815 ;;(define_expand "loadti"
4816 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4817 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4820 ;;(define_expand "storeti"
4821 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4822 ;; (match_operand:TI 1 "s_register_operand" ""))]
4825 ;;(define_expand "movti"
4826 ;; [(set (match_operand:TI 0 "general_operand" "")
4827 ;; (match_operand:TI 1 "general_operand" ""))]
4833 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4834 ;; operands[1] = copy_to_reg (operands[1]);
4835 ;; if (GET_CODE (operands[0]) == MEM)
4836 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4837 ;; else if (GET_CODE (operands[1]) == MEM)
4838 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4842 ;; emit_insn (insn);
4846 ;; Recognize garbage generated above.
4849 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4850 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4854 ;; register mem = (which_alternative < 3);
4855 ;; register const char *template;
4857 ;; operands[mem] = XEXP (operands[mem], 0);
4858 ;; switch (which_alternative)
4860 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4861 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4862 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4863 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4864 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4865 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4867 ;; output_asm_insn (template, operands);
4871 (define_expand "movdi"
4872 [(set (match_operand:DI 0 "general_operand" "")
4873 (match_operand:DI 1 "general_operand" ""))]
4876 if (can_create_pseudo_p ())
4878 if (GET_CODE (operands[0]) != REG)
4879 operands[1] = force_reg (DImode, operands[1]);
4884 (define_insn "*arm_movdi"
4885 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4886 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4888 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4890 && ( register_operand (operands[0], DImode)
4891 || register_operand (operands[1], DImode))"
4893 switch (which_alternative)
4900 return output_move_double (operands);
4903 [(set_attr "length" "8,12,16,8,8")
4904 (set_attr "type" "*,*,*,load2,store2")
4905 (set_attr "pool_range" "*,*,*,1020,*")
4906 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4910 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4911 (match_operand:ANY64 1 "const_double_operand" ""))]
4914 && (arm_const_double_inline_cost (operands[1])
4915 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4918 arm_split_constant (SET, SImode, curr_insn,
4919 INTVAL (gen_lowpart (SImode, operands[1])),
4920 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4921 arm_split_constant (SET, SImode, curr_insn,
4922 INTVAL (gen_highpart_mode (SImode,
4923 GET_MODE (operands[0]),
4925 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4930 ; If optimizing for size, or if we have load delay slots, then
4931 ; we want to split the constant into two separate operations.
4932 ; In both cases this may split a trivial part into a single data op
4933 ; leaving a single complex constant to load. We can also get longer
4934 ; offsets in a LDR which means we get better chances of sharing the pool
4935 ; entries. Finally, we can normally do a better job of scheduling
4936 ; LDR instructions than we can with LDM.
4937 ; This pattern will only match if the one above did not.
4939 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4940 (match_operand:ANY64 1 "const_double_operand" ""))]
4941 "TARGET_ARM && reload_completed
4942 && arm_const_double_by_parts (operands[1])"
4943 [(set (match_dup 0) (match_dup 1))
4944 (set (match_dup 2) (match_dup 3))]
4946 operands[2] = gen_highpart (SImode, operands[0]);
4947 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4949 operands[0] = gen_lowpart (SImode, operands[0]);
4950 operands[1] = gen_lowpart (SImode, operands[1]);
4955 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4956 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4957 "TARGET_EITHER && reload_completed"
4958 [(set (match_dup 0) (match_dup 1))
4959 (set (match_dup 2) (match_dup 3))]
4961 operands[2] = gen_highpart (SImode, operands[0]);
4962 operands[3] = gen_highpart (SImode, operands[1]);
4963 operands[0] = gen_lowpart (SImode, operands[0]);
4964 operands[1] = gen_lowpart (SImode, operands[1]);
4966 /* Handle a partial overlap. */
4967 if (rtx_equal_p (operands[0], operands[3]))
4969 rtx tmp0 = operands[0];
4970 rtx tmp1 = operands[1];
4972 operands[0] = operands[2];
4973 operands[1] = operands[3];
4980 ;; We can't actually do base+index doubleword loads if the index and
4981 ;; destination overlap. Split here so that we at least have chance to
4984 [(set (match_operand:DI 0 "s_register_operand" "")
4985 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4986 (match_operand:SI 2 "s_register_operand" ""))))]
4988 && reg_overlap_mentioned_p (operands[0], operands[1])
4989 && reg_overlap_mentioned_p (operands[0], operands[2])"
4991 (plus:SI (match_dup 1)
4994 (mem:DI (match_dup 4)))]
4996 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5000 ;;; ??? This should have alternatives for constants.
5001 ;;; ??? This was originally identical to the movdf_insn pattern.
5002 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5003 ;;; thumb_reorg with a memory reference.
5004 (define_insn "*thumb1_movdi_insn"
5005 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5006 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5008 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5009 && ( register_operand (operands[0], DImode)
5010 || register_operand (operands[1], DImode))"
5013 switch (which_alternative)
5017 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5018 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5019 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5021 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5023 operands[1] = GEN_INT (- INTVAL (operands[1]));
5024 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5026 return \"ldmia\\t%1, {%0, %H0}\";
5028 return \"stmia\\t%0, {%1, %H1}\";
5030 return thumb_load_double_from_address (operands);
5032 operands[2] = gen_rtx_MEM (SImode,
5033 plus_constant (XEXP (operands[0], 0), 4));
5034 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5037 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5038 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5039 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5042 [(set_attr "length" "4,4,6,2,2,6,4,4")
5043 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5044 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5047 (define_expand "movsi"
5048 [(set (match_operand:SI 0 "general_operand" "")
5049 (match_operand:SI 1 "general_operand" ""))]
5053 rtx base, offset, tmp;
5057 /* Everything except mem = const or mem = mem can be done easily. */
5058 if (GET_CODE (operands[0]) == MEM)
5059 operands[1] = force_reg (SImode, operands[1]);
5060 if (arm_general_register_operand (operands[0], SImode)
5061 && GET_CODE (operands[1]) == CONST_INT
5062 && !(const_ok_for_arm (INTVAL (operands[1]))
5063 || const_ok_for_arm (~INTVAL (operands[1]))))
5065 arm_split_constant (SET, SImode, NULL_RTX,
5066 INTVAL (operands[1]), operands[0], NULL_RTX,
5067 optimize && can_create_pseudo_p ());
5071 if (TARGET_USE_MOVT && !target_word_relocations
5072 && GET_CODE (operands[1]) == SYMBOL_REF
5073 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5075 arm_emit_movpair (operands[0], operands[1]);
5079 else /* TARGET_THUMB1... */
5081 if (can_create_pseudo_p ())
5083 if (GET_CODE (operands[0]) != REG)
5084 operands[1] = force_reg (SImode, operands[1]);
5088 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5090 split_const (operands[1], &base, &offset);
5091 if (GET_CODE (base) == SYMBOL_REF
5092 && !offset_within_block_p (base, INTVAL (offset)))
5094 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5095 emit_move_insn (tmp, base);
5096 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5101 /* Recognize the case where operand[1] is a reference to thread-local
5102 data and load its address to a register. */
5103 if (arm_tls_referenced_p (operands[1]))
5105 rtx tmp = operands[1];
5108 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5110 addend = XEXP (XEXP (tmp, 0), 1);
5111 tmp = XEXP (XEXP (tmp, 0), 0);
5114 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5115 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5117 tmp = legitimize_tls_address (tmp,
5118 !can_create_pseudo_p () ? operands[0] : 0);
5121 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5122 tmp = force_operand (tmp, operands[0]);
5127 && (CONSTANT_P (operands[1])
5128 || symbol_mentioned_p (operands[1])
5129 || label_mentioned_p (operands[1])))
5130 operands[1] = legitimize_pic_address (operands[1], SImode,
5131 (!can_create_pseudo_p ()
5138 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5139 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5140 ;; so this does not matter.
5141 (define_insn "*arm_movt"
5142 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5143 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5144 (match_operand:SI 2 "general_operand" "i")))]
5146 "movt%?\t%0, #:upper16:%c2"
5147 [(set_attr "predicable" "yes")
5148 (set_attr "length" "4")]
5151 (define_insn "*arm_movsi_insn"
5152 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5153 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5154 "TARGET_ARM && ! TARGET_IWMMXT
5155 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5156 && ( register_operand (operands[0], SImode)
5157 || register_operand (operands[1], SImode))"
5165 [(set_attr "type" "*,*,*,*,load1,store1")
5166 (set_attr "predicable" "yes")
5167 (set_attr "pool_range" "*,*,*,*,4096,*")
5168 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5172 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5173 (match_operand:SI 1 "const_int_operand" ""))]
5175 && (!(const_ok_for_arm (INTVAL (operands[1]))
5176 || const_ok_for_arm (~INTVAL (operands[1]))))"
5177 [(clobber (const_int 0))]
5179 arm_split_constant (SET, SImode, NULL_RTX,
5180 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5185 (define_insn "*thumb1_movsi_insn"
5186 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5187 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5189 && ( register_operand (operands[0], SImode)
5190 || register_operand (operands[1], SImode))"
5201 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5202 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5203 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5207 [(set (match_operand:SI 0 "register_operand" "")
5208 (match_operand:SI 1 "const_int_operand" ""))]
5209 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5210 [(set (match_dup 0) (match_dup 1))
5211 (set (match_dup 0) (neg:SI (match_dup 0)))]
5212 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5216 [(set (match_operand:SI 0 "register_operand" "")
5217 (match_operand:SI 1 "const_int_operand" ""))]
5218 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5219 [(set (match_dup 0) (match_dup 1))
5220 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5223 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5224 unsigned HOST_WIDE_INT mask = 0xff;
5227 for (i = 0; i < 25; i++)
5228 if ((val & (mask << i)) == val)
5231 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5235 operands[1] = GEN_INT (val >> i);
5236 operands[2] = GEN_INT (i);
5240 ;; When generating pic, we need to load the symbol offset into a register.
5241 ;; So that the optimizer does not confuse this with a normal symbol load
5242 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5243 ;; since that is the only type of relocation we can use.
5245 ;; The rather odd constraints on the following are to force reload to leave
5246 ;; the insn alone, and to force the minipool generation pass to then move
5247 ;; the GOT symbol to memory.
5249 (define_insn "pic_load_addr_32bit"
5250 [(set (match_operand:SI 0 "s_register_operand" "=r")
5251 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5252 "TARGET_32BIT && flag_pic"
5254 [(set_attr "type" "load1")
5255 (set_attr "pool_range" "4096")
5256 (set (attr "neg_pool_range")
5257 (if_then_else (eq_attr "is_thumb" "no")
5262 (define_insn "pic_load_addr_thumb1"
5263 [(set (match_operand:SI 0 "s_register_operand" "=l")
5264 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5265 "TARGET_THUMB1 && flag_pic"
5267 [(set_attr "type" "load1")
5268 (set (attr "pool_range") (const_int 1024))]
5271 (define_insn "pic_add_dot_plus_four"
5272 [(set (match_operand:SI 0 "register_operand" "=r")
5273 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5275 (match_operand 2 "" "")]
5279 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5280 INTVAL (operands[2]));
5281 return \"add\\t%0, %|pc\";
5283 [(set_attr "length" "2")]
5286 (define_insn "pic_add_dot_plus_eight"
5287 [(set (match_operand:SI 0 "register_operand" "=r")
5288 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5290 (match_operand 2 "" "")]
5294 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5295 INTVAL (operands[2]));
5296 return \"add%?\\t%0, %|pc, %1\";
5298 [(set_attr "predicable" "yes")]
5301 (define_insn "tls_load_dot_plus_eight"
5302 [(set (match_operand:SI 0 "register_operand" "=r")
5303 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5305 (match_operand 2 "" "")]
5309 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5310 INTVAL (operands[2]));
5311 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5313 [(set_attr "predicable" "yes")]
5316 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5317 ;; followed by a load. These sequences can be crunched down to
5318 ;; tls_load_dot_plus_eight by a peephole.
5321 [(set (match_operand:SI 0 "register_operand" "")
5322 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5324 (match_operand 1 "" "")]
5326 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5327 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5329 (mem:SI (unspec:SI [(match_dup 3)
5336 (define_insn "pic_offset_arm"
5337 [(set (match_operand:SI 0 "register_operand" "=r")
5338 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5339 (unspec:SI [(match_operand:SI 2 "" "X")]
5340 UNSPEC_PIC_OFFSET))))]
5341 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5342 "ldr%?\\t%0, [%1,%2]"
5343 [(set_attr "type" "load1")]
5346 (define_expand "builtin_setjmp_receiver"
5347 [(label_ref (match_operand 0 "" ""))]
5351 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5353 if (arm_pic_register != INVALID_REGNUM)
5354 arm_load_pic_register (1UL << 3);
5358 ;; If copying one reg to another we can set the condition codes according to
5359 ;; its value. Such a move is common after a return from subroutine and the
5360 ;; result is being tested against zero.
5362 (define_insn "*movsi_compare0"
5363 [(set (reg:CC CC_REGNUM)
5364 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5366 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5372 [(set_attr "conds" "set")]
5375 ;; Subroutine to store a half word from a register into memory.
5376 ;; Operand 0 is the source register (HImode)
5377 ;; Operand 1 is the destination address in a register (SImode)
5379 ;; In both this routine and the next, we must be careful not to spill
5380 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5381 ;; can generate unrecognizable rtl.
5383 (define_expand "storehi"
5384 [;; store the low byte
5385 (set (match_operand 1 "" "") (match_dup 3))
5386 ;; extract the high byte
5388 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5389 ;; store the high byte
5390 (set (match_dup 4) (match_dup 5))]
5394 rtx op1 = operands[1];
5395 rtx addr = XEXP (op1, 0);
5396 enum rtx_code code = GET_CODE (addr);
5398 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5400 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5402 operands[4] = adjust_address (op1, QImode, 1);
5403 operands[1] = adjust_address (operands[1], QImode, 0);
5404 operands[3] = gen_lowpart (QImode, operands[0]);
5405 operands[0] = gen_lowpart (SImode, operands[0]);
5406 operands[2] = gen_reg_rtx (SImode);
5407 operands[5] = gen_lowpart (QImode, operands[2]);
5411 (define_expand "storehi_bigend"
5412 [(set (match_dup 4) (match_dup 3))
5414 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5415 (set (match_operand 1 "" "") (match_dup 5))]
5419 rtx op1 = operands[1];
5420 rtx addr = XEXP (op1, 0);
5421 enum rtx_code code = GET_CODE (addr);
5423 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5425 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5427 operands[4] = adjust_address (op1, QImode, 1);
5428 operands[1] = adjust_address (operands[1], QImode, 0);
5429 operands[3] = gen_lowpart (QImode, operands[0]);
5430 operands[0] = gen_lowpart (SImode, operands[0]);
5431 operands[2] = gen_reg_rtx (SImode);
5432 operands[5] = gen_lowpart (QImode, operands[2]);
5436 ;; Subroutine to store a half word integer constant into memory.
5437 (define_expand "storeinthi"
5438 [(set (match_operand 0 "" "")
5439 (match_operand 1 "" ""))
5440 (set (match_dup 3) (match_dup 2))]
5444 HOST_WIDE_INT value = INTVAL (operands[1]);
5445 rtx addr = XEXP (operands[0], 0);
5446 rtx op0 = operands[0];
5447 enum rtx_code code = GET_CODE (addr);
5449 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5451 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5453 operands[1] = gen_reg_rtx (SImode);
5454 if (BYTES_BIG_ENDIAN)
5456 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5457 if ((value & 255) == ((value >> 8) & 255))
5458 operands[2] = operands[1];
5461 operands[2] = gen_reg_rtx (SImode);
5462 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5467 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5468 if ((value & 255) == ((value >> 8) & 255))
5469 operands[2] = operands[1];
5472 operands[2] = gen_reg_rtx (SImode);
5473 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5477 operands[3] = adjust_address (op0, QImode, 1);
5478 operands[0] = adjust_address (operands[0], QImode, 0);
5479 operands[2] = gen_lowpart (QImode, operands[2]);
5480 operands[1] = gen_lowpart (QImode, operands[1]);
5484 (define_expand "storehi_single_op"
5485 [(set (match_operand:HI 0 "memory_operand" "")
5486 (match_operand:HI 1 "general_operand" ""))]
5487 "TARGET_32BIT && arm_arch4"
5489 if (!s_register_operand (operands[1], HImode))
5490 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5494 (define_expand "movhi"
5495 [(set (match_operand:HI 0 "general_operand" "")
5496 (match_operand:HI 1 "general_operand" ""))]
5501 if (can_create_pseudo_p ())
5503 if (GET_CODE (operands[0]) == MEM)
5507 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5510 if (GET_CODE (operands[1]) == CONST_INT)
5511 emit_insn (gen_storeinthi (operands[0], operands[1]));
5514 if (GET_CODE (operands[1]) == MEM)
5515 operands[1] = force_reg (HImode, operands[1]);
5516 if (BYTES_BIG_ENDIAN)
5517 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5519 emit_insn (gen_storehi (operands[1], operands[0]));
5523 /* Sign extend a constant, and keep it in an SImode reg. */
5524 else if (GET_CODE (operands[1]) == CONST_INT)
5526 rtx reg = gen_reg_rtx (SImode);
5527 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5529 /* If the constant is already valid, leave it alone. */
5530 if (!const_ok_for_arm (val))
5532 /* If setting all the top bits will make the constant
5533 loadable in a single instruction, then set them.
5534 Otherwise, sign extend the number. */
5536 if (const_ok_for_arm (~(val | ~0xffff)))
5538 else if (val & 0x8000)
5542 emit_insn (gen_movsi (reg, GEN_INT (val)));
5543 operands[1] = gen_lowpart (HImode, reg);
5545 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5546 && GET_CODE (operands[1]) == MEM)
5548 rtx reg = gen_reg_rtx (SImode);
5550 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5551 operands[1] = gen_lowpart (HImode, reg);
5553 else if (!arm_arch4)
5555 if (GET_CODE (operands[1]) == MEM)
5558 rtx offset = const0_rtx;
5559 rtx reg = gen_reg_rtx (SImode);
5561 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5562 || (GET_CODE (base) == PLUS
5563 && (GET_CODE (offset = XEXP (base, 1))
5565 && ((INTVAL(offset) & 1) != 1)
5566 && GET_CODE (base = XEXP (base, 0)) == REG))
5567 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5571 new_rtx = widen_memory_access (operands[1], SImode,
5572 ((INTVAL (offset) & ~3)
5573 - INTVAL (offset)));
5574 emit_insn (gen_movsi (reg, new_rtx));
5575 if (((INTVAL (offset) & 2) != 0)
5576 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5578 rtx reg2 = gen_reg_rtx (SImode);
5580 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5585 emit_insn (gen_movhi_bytes (reg, operands[1]));
5587 operands[1] = gen_lowpart (HImode, reg);
5591 /* Handle loading a large integer during reload. */
5592 else if (GET_CODE (operands[1]) == CONST_INT
5593 && !const_ok_for_arm (INTVAL (operands[1]))
5594 && !const_ok_for_arm (~INTVAL (operands[1])))
5596 /* Writing a constant to memory needs a scratch, which should
5597 be handled with SECONDARY_RELOADs. */
5598 gcc_assert (GET_CODE (operands[0]) == REG);
5600 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5601 emit_insn (gen_movsi (operands[0], operands[1]));
5605 else if (TARGET_THUMB2)
5607 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5608 if (can_create_pseudo_p ())
5610 if (GET_CODE (operands[0]) != REG)
5611 operands[1] = force_reg (HImode, operands[1]);
5612 /* Zero extend a constant, and keep it in an SImode reg. */
5613 else if (GET_CODE (operands[1]) == CONST_INT)
5615 rtx reg = gen_reg_rtx (SImode);
5616 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5618 emit_insn (gen_movsi (reg, GEN_INT (val)));
5619 operands[1] = gen_lowpart (HImode, reg);
5623 else /* TARGET_THUMB1 */
5625 if (can_create_pseudo_p ())
5627 if (GET_CODE (operands[1]) == CONST_INT)
5629 rtx reg = gen_reg_rtx (SImode);
5631 emit_insn (gen_movsi (reg, operands[1]));
5632 operands[1] = gen_lowpart (HImode, reg);
5635 /* ??? We shouldn't really get invalid addresses here, but this can
5636 happen if we are passed a SP (never OK for HImode/QImode) or
5637 virtual register (also rejected as illegitimate for HImode/QImode)
5638 relative address. */
5639 /* ??? This should perhaps be fixed elsewhere, for instance, in
5640 fixup_stack_1, by checking for other kinds of invalid addresses,
5641 e.g. a bare reference to a virtual register. This may confuse the
5642 alpha though, which must handle this case differently. */
5643 if (GET_CODE (operands[0]) == MEM
5644 && !memory_address_p (GET_MODE (operands[0]),
5645 XEXP (operands[0], 0)))
5647 = replace_equiv_address (operands[0],
5648 copy_to_reg (XEXP (operands[0], 0)));
5650 if (GET_CODE (operands[1]) == MEM
5651 && !memory_address_p (GET_MODE (operands[1]),
5652 XEXP (operands[1], 0)))
5654 = replace_equiv_address (operands[1],
5655 copy_to_reg (XEXP (operands[1], 0)));
5657 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5659 rtx reg = gen_reg_rtx (SImode);
5661 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5662 operands[1] = gen_lowpart (HImode, reg);
5665 if (GET_CODE (operands[0]) == MEM)
5666 operands[1] = force_reg (HImode, operands[1]);
5668 else if (GET_CODE (operands[1]) == CONST_INT
5669 && !satisfies_constraint_I (operands[1]))
5671 /* Handle loading a large integer during reload. */
5673 /* Writing a constant to memory needs a scratch, which should
5674 be handled with SECONDARY_RELOADs. */
5675 gcc_assert (GET_CODE (operands[0]) == REG);
5677 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5678 emit_insn (gen_movsi (operands[0], operands[1]));
5685 (define_insn "*thumb1_movhi_insn"
5686 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5687 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5689 && ( register_operand (operands[0], HImode)
5690 || register_operand (operands[1], HImode))"
5692 switch (which_alternative)
5694 case 0: return \"add %0, %1, #0\";
5695 case 2: return \"strh %1, %0\";
5696 case 3: return \"mov %0, %1\";
5697 case 4: return \"mov %0, %1\";
5698 case 5: return \"mov %0, %1\";
5699 default: gcc_unreachable ();
5701 /* The stack pointer can end up being taken as an index register.
5702 Catch this case here and deal with it. */
5703 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5704 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5705 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5708 ops[0] = operands[0];
5709 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5711 output_asm_insn (\"mov %0, %1\", ops);
5713 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5716 return \"ldrh %0, %1\";
5718 [(set_attr "length" "2,4,2,2,2,2")
5719 (set_attr "type" "*,load1,store1,*,*,*")]
5723 (define_expand "movhi_bytes"
5724 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5726 (zero_extend:SI (match_dup 6)))
5727 (set (match_operand:SI 0 "" "")
5728 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5733 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5735 mem1 = change_address (operands[1], QImode, addr);
5736 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5737 operands[0] = gen_lowpart (SImode, operands[0]);
5739 operands[2] = gen_reg_rtx (SImode);
5740 operands[3] = gen_reg_rtx (SImode);
5743 if (BYTES_BIG_ENDIAN)
5745 operands[4] = operands[2];
5746 operands[5] = operands[3];
5750 operands[4] = operands[3];
5751 operands[5] = operands[2];
5756 (define_expand "movhi_bigend"
5758 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5761 (ashiftrt:SI (match_dup 2) (const_int 16)))
5762 (set (match_operand:HI 0 "s_register_operand" "")
5766 operands[2] = gen_reg_rtx (SImode);
5767 operands[3] = gen_reg_rtx (SImode);
5768 operands[4] = gen_lowpart (HImode, operands[3]);
5772 ;; Pattern to recognize insn generated default case above
5773 (define_insn "*movhi_insn_arch4"
5774 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5775 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5778 && (GET_CODE (operands[1]) != CONST_INT
5779 || const_ok_for_arm (INTVAL (operands[1]))
5780 || const_ok_for_arm (~INTVAL (operands[1])))"
5782 mov%?\\t%0, %1\\t%@ movhi
5783 mvn%?\\t%0, #%B1\\t%@ movhi
5784 str%(h%)\\t%1, %0\\t%@ movhi
5785 ldr%(h%)\\t%0, %1\\t%@ movhi"
5786 [(set_attr "type" "*,*,store1,load1")
5787 (set_attr "predicable" "yes")
5788 (set_attr "pool_range" "*,*,*,256")
5789 (set_attr "neg_pool_range" "*,*,*,244")]
5792 (define_insn "*movhi_bytes"
5793 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5794 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5797 mov%?\\t%0, %1\\t%@ movhi
5798 mvn%?\\t%0, #%B1\\t%@ movhi"
5799 [(set_attr "predicable" "yes")]
5802 (define_expand "thumb_movhi_clobber"
5803 [(set (match_operand:HI 0 "memory_operand" "")
5804 (match_operand:HI 1 "register_operand" ""))
5805 (clobber (match_operand:DI 2 "register_operand" ""))]
5808 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5809 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5811 emit_insn (gen_movhi (operands[0], operands[1]));
5814 /* XXX Fixme, need to handle other cases here as well. */
5819 ;; We use a DImode scratch because we may occasionally need an additional
5820 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5821 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5822 (define_expand "reload_outhi"
5823 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5824 (match_operand:HI 1 "s_register_operand" "r")
5825 (match_operand:DI 2 "s_register_operand" "=&l")])]
5828 arm_reload_out_hi (operands);
5830 thumb_reload_out_hi (operands);
5835 (define_expand "reload_inhi"
5836 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5837 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5838 (match_operand:DI 2 "s_register_operand" "=&r")])]
5842 arm_reload_in_hi (operands);
5844 thumb_reload_out_hi (operands);
5848 (define_expand "movqi"
5849 [(set (match_operand:QI 0 "general_operand" "")
5850 (match_operand:QI 1 "general_operand" ""))]
5853 /* Everything except mem = const or mem = mem can be done easily */
5855 if (can_create_pseudo_p ())
5857 if (GET_CODE (operands[1]) == CONST_INT)
5859 rtx reg = gen_reg_rtx (SImode);
5861 /* For thumb we want an unsigned immediate, then we are more likely
5862 to be able to use a movs insn. */
5864 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5866 emit_insn (gen_movsi (reg, operands[1]));
5867 operands[1] = gen_lowpart (QImode, reg);
5872 /* ??? We shouldn't really get invalid addresses here, but this can
5873 happen if we are passed a SP (never OK for HImode/QImode) or
5874 virtual register (also rejected as illegitimate for HImode/QImode)
5875 relative address. */
5876 /* ??? This should perhaps be fixed elsewhere, for instance, in
5877 fixup_stack_1, by checking for other kinds of invalid addresses,
5878 e.g. a bare reference to a virtual register. This may confuse the
5879 alpha though, which must handle this case differently. */
5880 if (GET_CODE (operands[0]) == MEM
5881 && !memory_address_p (GET_MODE (operands[0]),
5882 XEXP (operands[0], 0)))
5884 = replace_equiv_address (operands[0],
5885 copy_to_reg (XEXP (operands[0], 0)));
5886 if (GET_CODE (operands[1]) == MEM
5887 && !memory_address_p (GET_MODE (operands[1]),
5888 XEXP (operands[1], 0)))
5890 = replace_equiv_address (operands[1],
5891 copy_to_reg (XEXP (operands[1], 0)));
5894 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5896 rtx reg = gen_reg_rtx (SImode);
5898 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5899 operands[1] = gen_lowpart (QImode, reg);
5902 if (GET_CODE (operands[0]) == MEM)
5903 operands[1] = force_reg (QImode, operands[1]);
5905 else if (TARGET_THUMB
5906 && GET_CODE (operands[1]) == CONST_INT
5907 && !satisfies_constraint_I (operands[1]))
5909 /* Handle loading a large integer during reload. */
5911 /* Writing a constant to memory needs a scratch, which should
5912 be handled with SECONDARY_RELOADs. */
5913 gcc_assert (GET_CODE (operands[0]) == REG);
5915 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5916 emit_insn (gen_movsi (operands[0], operands[1]));
5923 (define_insn "*arm_movqi_insn"
5924 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5925 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5927 && ( register_operand (operands[0], QImode)
5928 || register_operand (operands[1], QImode))"
5934 [(set_attr "type" "*,*,load1,store1")
5935 (set_attr "predicable" "yes")]
5938 (define_insn "*thumb1_movqi_insn"
5939 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5940 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5942 && ( register_operand (operands[0], QImode)
5943 || register_operand (operands[1], QImode))"
5951 [(set_attr "length" "2")
5952 (set_attr "type" "*,load1,store1,*,*,*")
5953 (set_attr "pool_range" "*,32,*,*,*,*")]
5957 (define_expand "movhf"
5958 [(set (match_operand:HF 0 "general_operand" "")
5959 (match_operand:HF 1 "general_operand" ""))]
5964 if (GET_CODE (operands[0]) == MEM)
5965 operands[1] = force_reg (HFmode, operands[1]);
5967 else /* TARGET_THUMB1 */
5969 if (can_create_pseudo_p ())
5971 if (GET_CODE (operands[0]) != REG)
5972 operands[1] = force_reg (HFmode, operands[1]);
5978 (define_insn "*arm32_movhf"
5979 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5980 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5981 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5982 && ( s_register_operand (operands[0], HFmode)
5983 || s_register_operand (operands[1], HFmode))"
5985 switch (which_alternative)
5987 case 0: /* ARM register from memory */
5988 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5989 case 1: /* memory from ARM register */
5990 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5991 case 2: /* ARM register from ARM register */
5992 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5993 case 3: /* ARM register from constant */
5999 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6000 bits = real_to_target (NULL, &r, HFmode);
6001 ops[0] = operands[0];
6002 ops[1] = GEN_INT (bits);
6003 ops[2] = GEN_INT (bits & 0xff00);
6004 ops[3] = GEN_INT (bits & 0x00ff);
6006 if (arm_arch_thumb2)
6007 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6009 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6016 [(set_attr "conds" "unconditional")
6017 (set_attr "type" "load1,store1,*,*")
6018 (set_attr "length" "4,4,4,8")
6019 (set_attr "predicable" "yes")
6023 (define_insn "*thumb1_movhf"
6024 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6025 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6027 && ( s_register_operand (operands[0], HFmode)
6028 || s_register_operand (operands[1], HFmode))"
6030 switch (which_alternative)
6035 gcc_assert (GET_CODE(operands[1]) == MEM);
6036 addr = XEXP (operands[1], 0);
6037 if (GET_CODE (addr) == LABEL_REF
6038 || (GET_CODE (addr) == CONST
6039 && GET_CODE (XEXP (addr, 0)) == PLUS
6040 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6041 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6043 /* Constant pool entry. */
6044 return \"ldr\\t%0, %1\";
6046 return \"ldrh\\t%0, %1\";
6048 case 2: return \"strh\\t%1, %0\";
6049 default: return \"mov\\t%0, %1\";
6052 [(set_attr "length" "2")
6053 (set_attr "type" "*,load1,store1,*,*")
6054 (set_attr "pool_range" "*,1020,*,*,*")]
6057 (define_expand "movsf"
6058 [(set (match_operand:SF 0 "general_operand" "")
6059 (match_operand:SF 1 "general_operand" ""))]
6064 if (GET_CODE (operands[0]) == MEM)
6065 operands[1] = force_reg (SFmode, operands[1]);
6067 else /* TARGET_THUMB1 */
6069 if (can_create_pseudo_p ())
6071 if (GET_CODE (operands[0]) != REG)
6072 operands[1] = force_reg (SFmode, operands[1]);
6078 ;; Transform a floating-point move of a constant into a core register into
6079 ;; an SImode operation.
6081 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6082 (match_operand:SF 1 "immediate_operand" ""))]
6085 && GET_CODE (operands[1]) == CONST_DOUBLE"
6086 [(set (match_dup 2) (match_dup 3))]
6088 operands[2] = gen_lowpart (SImode, operands[0]);
6089 operands[3] = gen_lowpart (SImode, operands[1]);
6090 if (operands[2] == 0 || operands[3] == 0)
6095 (define_insn "*arm_movsf_soft_insn"
6096 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6097 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6099 && TARGET_SOFT_FLOAT
6100 && (GET_CODE (operands[0]) != MEM
6101 || register_operand (operands[1], SFmode))"
6104 ldr%?\\t%0, %1\\t%@ float
6105 str%?\\t%1, %0\\t%@ float"
6106 [(set_attr "length" "4,4,4")
6107 (set_attr "predicable" "yes")
6108 (set_attr "type" "*,load1,store1")
6109 (set_attr "pool_range" "*,4096,*")
6110 (set_attr "neg_pool_range" "*,4084,*")]
6113 ;;; ??? This should have alternatives for constants.
6114 (define_insn "*thumb1_movsf_insn"
6115 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6116 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6118 && ( register_operand (operands[0], SFmode)
6119 || register_operand (operands[1], SFmode))"
6128 [(set_attr "length" "2")
6129 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6130 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6133 (define_expand "movdf"
6134 [(set (match_operand:DF 0 "general_operand" "")
6135 (match_operand:DF 1 "general_operand" ""))]
6140 if (GET_CODE (operands[0]) == MEM)
6141 operands[1] = force_reg (DFmode, operands[1]);
6143 else /* TARGET_THUMB */
6145 if (can_create_pseudo_p ())
6147 if (GET_CODE (operands[0]) != REG)
6148 operands[1] = force_reg (DFmode, operands[1]);
6154 ;; Reloading a df mode value stored in integer regs to memory can require a
6156 (define_expand "reload_outdf"
6157 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6158 (match_operand:DF 1 "s_register_operand" "r")
6159 (match_operand:SI 2 "s_register_operand" "=&r")]
6163 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6166 operands[2] = XEXP (operands[0], 0);
6167 else if (code == POST_INC || code == PRE_DEC)
6169 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6170 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6171 emit_insn (gen_movdi (operands[0], operands[1]));
6174 else if (code == PRE_INC)
6176 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6178 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6181 else if (code == POST_DEC)
6182 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6184 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6185 XEXP (XEXP (operands[0], 0), 1)));
6187 emit_insn (gen_rtx_SET (VOIDmode,
6188 replace_equiv_address (operands[0], operands[2]),
6191 if (code == POST_DEC)
6192 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6198 (define_insn "*movdf_soft_insn"
6199 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6200 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6201 "TARGET_ARM && TARGET_SOFT_FLOAT
6202 && ( register_operand (operands[0], DFmode)
6203 || register_operand (operands[1], DFmode))"
6205 switch (which_alternative)
6212 return output_move_double (operands);
6215 [(set_attr "length" "8,12,16,8,8")
6216 (set_attr "type" "*,*,*,load2,store2")
6217 (set_attr "pool_range" "1020")
6218 (set_attr "neg_pool_range" "1008")]
6221 ;;; ??? This should have alternatives for constants.
6222 ;;; ??? This was originally identical to the movdi_insn pattern.
6223 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6224 ;;; thumb_reorg with a memory reference.
6225 (define_insn "*thumb_movdf_insn"
6226 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6227 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6229 && ( register_operand (operands[0], DFmode)
6230 || register_operand (operands[1], DFmode))"
6232 switch (which_alternative)
6236 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6237 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6238 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6240 return \"ldmia\\t%1, {%0, %H0}\";
6242 return \"stmia\\t%0, {%1, %H1}\";
6244 return thumb_load_double_from_address (operands);
6246 operands[2] = gen_rtx_MEM (SImode,
6247 plus_constant (XEXP (operands[0], 0), 4));
6248 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6251 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6252 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6253 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6256 [(set_attr "length" "4,2,2,6,4,4")
6257 (set_attr "type" "*,load2,store2,load2,store2,*")
6258 (set_attr "pool_range" "*,*,*,1020,*,*")]
6261 (define_expand "movxf"
6262 [(set (match_operand:XF 0 "general_operand" "")
6263 (match_operand:XF 1 "general_operand" ""))]
6264 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6266 if (GET_CODE (operands[0]) == MEM)
6267 operands[1] = force_reg (XFmode, operands[1]);
6273 ;; load- and store-multiple insns
6274 ;; The arm can load/store any set of registers, provided that they are in
6275 ;; ascending order; but that is beyond GCC so stick with what it knows.
6277 (define_expand "load_multiple"
6278 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6279 (match_operand:SI 1 "" ""))
6280 (use (match_operand:SI 2 "" ""))])]
6283 HOST_WIDE_INT offset = 0;
6285 /* Support only fixed point registers. */
6286 if (GET_CODE (operands[2]) != CONST_INT
6287 || INTVAL (operands[2]) > 14
6288 || INTVAL (operands[2]) < 2
6289 || GET_CODE (operands[1]) != MEM
6290 || GET_CODE (operands[0]) != REG
6291 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6292 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6296 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6297 force_reg (SImode, XEXP (operands[1], 0)),
6298 TRUE, FALSE, operands[1], &offset);
6301 ;; Load multiple with write-back
6303 (define_insn "*ldmsi_postinc4"
6304 [(match_parallel 0 "load_multiple_operation"
6305 [(set (match_operand:SI 1 "s_register_operand" "=r")
6306 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6308 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6309 (mem:SI (match_dup 2)))
6310 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6311 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6312 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6313 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6314 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6315 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6316 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6317 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6318 [(set_attr "type" "load4")
6319 (set_attr "predicable" "yes")]
6322 (define_insn "*ldmsi_postinc4_thumb1"
6323 [(match_parallel 0 "load_multiple_operation"
6324 [(set (match_operand:SI 1 "s_register_operand" "=l")
6325 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6327 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6328 (mem:SI (match_dup 2)))
6329 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6330 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6331 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6332 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6333 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6334 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6335 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6336 "ldmia\\t%1!, {%3, %4, %5, %6}"
6337 [(set_attr "type" "load4")]
6340 (define_insn "*ldmsi_postinc3"
6341 [(match_parallel 0 "load_multiple_operation"
6342 [(set (match_operand:SI 1 "s_register_operand" "=r")
6343 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6345 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6346 (mem:SI (match_dup 2)))
6347 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6348 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6349 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6350 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6351 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6352 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6353 [(set_attr "type" "load3")
6354 (set_attr "predicable" "yes")]
6357 (define_insn "*ldmsi_postinc2"
6358 [(match_parallel 0 "load_multiple_operation"
6359 [(set (match_operand:SI 1 "s_register_operand" "=r")
6360 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6362 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6363 (mem:SI (match_dup 2)))
6364 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6365 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6366 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6367 "ldm%(ia%)\\t%1!, {%3, %4}"
6368 [(set_attr "type" "load2")
6369 (set_attr "predicable" "yes")]
6372 ;; Ordinary load multiple
6374 (define_insn "*ldmsi4"
6375 [(match_parallel 0 "load_multiple_operation"
6376 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6377 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6378 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6379 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6380 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6381 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6382 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6383 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6384 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6385 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6386 [(set_attr "type" "load4")
6387 (set_attr "predicable" "yes")]
6390 (define_insn "*ldmsi3"
6391 [(match_parallel 0 "load_multiple_operation"
6392 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6393 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6394 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6395 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6396 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6397 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6398 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6399 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6400 [(set_attr "type" "load3")
6401 (set_attr "predicable" "yes")]
6404 (define_insn "*ldmsi2"
6405 [(match_parallel 0 "load_multiple_operation"
6406 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6407 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6408 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6409 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6410 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6411 "ldm%(ia%)\\t%1, {%2, %3}"
6412 [(set_attr "type" "load2")
6413 (set_attr "predicable" "yes")]
6416 (define_expand "store_multiple"
6417 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6418 (match_operand:SI 1 "" ""))
6419 (use (match_operand:SI 2 "" ""))])]
6422 HOST_WIDE_INT offset = 0;
6424 /* Support only fixed point registers. */
6425 if (GET_CODE (operands[2]) != CONST_INT
6426 || INTVAL (operands[2]) > 14
6427 || INTVAL (operands[2]) < 2
6428 || GET_CODE (operands[1]) != REG
6429 || GET_CODE (operands[0]) != MEM
6430 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6431 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6435 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6436 force_reg (SImode, XEXP (operands[0], 0)),
6437 TRUE, FALSE, operands[0], &offset);
6440 ;; Store multiple with write-back
6442 (define_insn "*stmsi_postinc4"
6443 [(match_parallel 0 "store_multiple_operation"
6444 [(set (match_operand:SI 1 "s_register_operand" "=r")
6445 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6447 (set (mem:SI (match_dup 2))
6448 (match_operand:SI 3 "arm_hard_register_operand" ""))
6449 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6450 (match_operand:SI 4 "arm_hard_register_operand" ""))
6451 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6452 (match_operand:SI 5 "arm_hard_register_operand" ""))
6453 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6454 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6455 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6456 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6457 [(set_attr "predicable" "yes")
6458 (set_attr "type" "store4")]
6461 (define_insn "*stmsi_postinc4_thumb1"
6462 [(match_parallel 0 "store_multiple_operation"
6463 [(set (match_operand:SI 1 "s_register_operand" "=l")
6464 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6466 (set (mem:SI (match_dup 2))
6467 (match_operand:SI 3 "arm_hard_register_operand" ""))
6468 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6469 (match_operand:SI 4 "arm_hard_register_operand" ""))
6470 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6471 (match_operand:SI 5 "arm_hard_register_operand" ""))
6472 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6473 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6474 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6475 "stmia\\t%1!, {%3, %4, %5, %6}"
6476 [(set_attr "type" "store4")]
6479 (define_insn "*stmsi_postinc3"
6480 [(match_parallel 0 "store_multiple_operation"
6481 [(set (match_operand:SI 1 "s_register_operand" "=r")
6482 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6484 (set (mem:SI (match_dup 2))
6485 (match_operand:SI 3 "arm_hard_register_operand" ""))
6486 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6487 (match_operand:SI 4 "arm_hard_register_operand" ""))
6488 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6489 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6490 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6491 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6492 [(set_attr "predicable" "yes")
6493 (set_attr "type" "store3")]
6496 (define_insn "*stmsi_postinc2"
6497 [(match_parallel 0 "store_multiple_operation"
6498 [(set (match_operand:SI 1 "s_register_operand" "=r")
6499 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6501 (set (mem:SI (match_dup 2))
6502 (match_operand:SI 3 "arm_hard_register_operand" ""))
6503 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6504 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6505 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6506 "stm%(ia%)\\t%1!, {%3, %4}"
6507 [(set_attr "predicable" "yes")
6508 (set_attr "type" "store2")]
6511 ;; Ordinary store multiple
6513 (define_insn "*stmsi4"
6514 [(match_parallel 0 "store_multiple_operation"
6515 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6516 (match_operand:SI 2 "arm_hard_register_operand" ""))
6517 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6518 (match_operand:SI 3 "arm_hard_register_operand" ""))
6519 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6520 (match_operand:SI 4 "arm_hard_register_operand" ""))
6521 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6522 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6523 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6524 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6525 [(set_attr "predicable" "yes")
6526 (set_attr "type" "store4")]
6529 (define_insn "*stmsi3"
6530 [(match_parallel 0 "store_multiple_operation"
6531 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6532 (match_operand:SI 2 "arm_hard_register_operand" ""))
6533 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6534 (match_operand:SI 3 "arm_hard_register_operand" ""))
6535 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6536 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6537 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6538 "stm%(ia%)\\t%1, {%2, %3, %4}"
6539 [(set_attr "predicable" "yes")
6540 (set_attr "type" "store3")]
6543 (define_insn "*stmsi2"
6544 [(match_parallel 0 "store_multiple_operation"
6545 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6546 (match_operand:SI 2 "arm_hard_register_operand" ""))
6547 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6548 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6549 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6550 "stm%(ia%)\\t%1, {%2, %3}"
6551 [(set_attr "predicable" "yes")
6552 (set_attr "type" "store2")]
6555 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6556 ;; We could let this apply for blocks of less than this, but it clobbers so
6557 ;; many registers that there is then probably a better way.
6559 (define_expand "movmemqi"
6560 [(match_operand:BLK 0 "general_operand" "")
6561 (match_operand:BLK 1 "general_operand" "")
6562 (match_operand:SI 2 "const_int_operand" "")
6563 (match_operand:SI 3 "const_int_operand" "")]
6568 if (arm_gen_movmemqi (operands))
6572 else /* TARGET_THUMB1 */
6574 if ( INTVAL (operands[3]) != 4
6575 || INTVAL (operands[2]) > 48)
6578 thumb_expand_movmemqi (operands);
6584 ;; Thumb block-move insns
6586 (define_insn "movmem12b"
6587 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6588 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6589 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6590 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6591 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6592 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6593 (set (match_operand:SI 0 "register_operand" "=l")
6594 (plus:SI (match_dup 2) (const_int 12)))
6595 (set (match_operand:SI 1 "register_operand" "=l")
6596 (plus:SI (match_dup 3) (const_int 12)))
6597 (clobber (match_scratch:SI 4 "=&l"))
6598 (clobber (match_scratch:SI 5 "=&l"))
6599 (clobber (match_scratch:SI 6 "=&l"))]
6601 "* return thumb_output_move_mem_multiple (3, operands);"
6602 [(set_attr "length" "4")
6603 ; This isn't entirely accurate... It loads as well, but in terms of
6604 ; scheduling the following insn it is better to consider it as a store
6605 (set_attr "type" "store3")]
6608 (define_insn "movmem8b"
6609 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6610 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6611 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6612 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6613 (set (match_operand:SI 0 "register_operand" "=l")
6614 (plus:SI (match_dup 2) (const_int 8)))
6615 (set (match_operand:SI 1 "register_operand" "=l")
6616 (plus:SI (match_dup 3) (const_int 8)))
6617 (clobber (match_scratch:SI 4 "=&l"))
6618 (clobber (match_scratch:SI 5 "=&l"))]
6620 "* return thumb_output_move_mem_multiple (2, operands);"
6621 [(set_attr "length" "4")
6622 ; This isn't entirely accurate... It loads as well, but in terms of
6623 ; scheduling the following insn it is better to consider it as a store
6624 (set_attr "type" "store2")]
6629 ;; Compare & branch insns
6630 ;; The range calculations are based as follows:
6631 ;; For forward branches, the address calculation returns the address of
6632 ;; the next instruction. This is 2 beyond the branch instruction.
6633 ;; For backward branches, the address calculation returns the address of
6634 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6635 ;; instruction for the shortest sequence, and 4 before the branch instruction
6636 ;; if we have to jump around an unconditional branch.
6637 ;; To the basic branch range the PC offset must be added (this is +4).
6638 ;; So for forward branches we have
6639 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6640 ;; And for backward branches we have
6641 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6643 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6644 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6646 (define_expand "cbranchsi4"
6647 [(set (pc) (if_then_else
6648 (match_operator 0 "arm_comparison_operator"
6649 [(match_operand:SI 1 "s_register_operand" "")
6650 (match_operand:SI 2 "nonmemory_operand" "")])
6651 (label_ref (match_operand 3 "" ""))
6653 "TARGET_THUMB1 || TARGET_32BIT"
6657 if (!arm_add_operand (operands[2], SImode))
6658 operands[2] = force_reg (SImode, operands[2]);
6659 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6663 if (thumb1_cmpneg_operand (operands[2], SImode))
6665 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6666 operands[3], operands[0]));
6669 if (!thumb1_cmp_operand (operands[2], SImode))
6670 operands[2] = force_reg (SImode, operands[2]);
6673 ;; A pattern to recognize a special situation and optimize for it.
6674 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6675 ;; due to the available addressing modes. Hence, convert a signed comparison
6676 ;; with zero into an unsigned comparison with 127 if possible.
6677 (define_expand "cbranchqi4"
6678 [(set (pc) (if_then_else
6679 (match_operator 0 "lt_ge_comparison_operator"
6680 [(match_operand:QI 1 "memory_operand" "")
6681 (match_operand:QI 2 "const0_operand" "")])
6682 (label_ref (match_operand 3 "" ""))
6687 xops[1] = gen_reg_rtx (SImode);
6688 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6689 xops[2] = GEN_INT (127);
6690 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6691 VOIDmode, xops[1], xops[2]);
6692 xops[3] = operands[3];
6693 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6697 (define_expand "cbranchsf4"
6698 [(set (pc) (if_then_else
6699 (match_operator 0 "arm_comparison_operator"
6700 [(match_operand:SF 1 "s_register_operand" "")
6701 (match_operand:SF 2 "arm_float_compare_operand" "")])
6702 (label_ref (match_operand 3 "" ""))
6704 "TARGET_32BIT && TARGET_HARD_FLOAT"
6705 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6706 operands[3])); DONE;"
6709 (define_expand "cbranchdf4"
6710 [(set (pc) (if_then_else
6711 (match_operator 0 "arm_comparison_operator"
6712 [(match_operand:DF 1 "s_register_operand" "")
6713 (match_operand:DF 2 "arm_float_compare_operand" "")])
6714 (label_ref (match_operand 3 "" ""))
6716 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6717 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6718 operands[3])); DONE;"
6721 ;; this uses the Cirrus DI compare instruction
6722 (define_expand "cbranchdi4"
6723 [(set (pc) (if_then_else
6724 (match_operator 0 "arm_comparison_operator"
6725 [(match_operand:DI 1 "cirrus_fp_register" "")
6726 (match_operand:DI 2 "cirrus_fp_register" "")])
6727 (label_ref (match_operand 3 "" ""))
6729 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6730 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6731 operands[3])); DONE;"
6734 (define_insn "cbranchsi4_insn"
6735 [(set (pc) (if_then_else
6736 (match_operator 0 "arm_comparison_operator"
6737 [(match_operand:SI 1 "s_register_operand" "l,*h")
6738 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6739 (label_ref (match_operand 3 "" ""))
6743 rtx t = prev_nonnote_insn (insn);
6746 && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
6748 t = XEXP (SET_SRC (PATTERN (t)), 0);
6749 if (!rtx_equal_p (XEXP (t, 0), operands[1])
6750 || !rtx_equal_p (XEXP (t, 1), operands[2]))
6756 output_asm_insn (\"cmp\\t%1, %2\", operands);
6758 switch (get_attr_length (insn))
6760 case 4: return \"b%d0\\t%l3\";
6761 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6762 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6765 [(set (attr "far_jump")
6767 (eq_attr "length" "8")
6768 (const_string "yes")
6769 (const_string "no")))
6770 (set (attr "length")
6772 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6773 (le (minus (match_dup 3) (pc)) (const_int 256)))
6776 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6777 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6782 (define_insn "cbranchsi4_scratch"
6783 [(set (pc) (if_then_else
6784 (match_operator 4 "arm_comparison_operator"
6785 [(match_operand:SI 1 "s_register_operand" "l,0")
6786 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6787 (label_ref (match_operand 3 "" ""))
6789 (clobber (match_scratch:SI 0 "=l,l"))]
6792 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6794 switch (get_attr_length (insn))
6796 case 4: return \"b%d4\\t%l3\";
6797 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6798 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6801 [(set (attr "far_jump")
6803 (eq_attr "length" "8")
6804 (const_string "yes")
6805 (const_string "no")))
6806 (set (attr "length")
6808 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6809 (le (minus (match_dup 3) (pc)) (const_int 256)))
6812 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6813 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6818 (define_insn "*movsi_cbranchsi4"
6821 (match_operator 3 "arm_comparison_operator"
6822 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6824 (label_ref (match_operand 2 "" ""))
6826 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6830 if (which_alternative == 0)
6831 output_asm_insn (\"cmp\t%0, #0\", operands);
6832 else if (which_alternative == 1)
6833 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6836 output_asm_insn (\"cmp\t%1, #0\", operands);
6837 if (which_alternative == 2)
6838 output_asm_insn (\"mov\t%0, %1\", operands);
6840 output_asm_insn (\"str\t%1, %0\", operands);
6842 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6844 case 4: return \"b%d3\\t%l2\";
6845 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6846 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6849 [(set (attr "far_jump")
6851 (ior (and (gt (symbol_ref ("which_alternative"))
6853 (eq_attr "length" "8"))
6854 (eq_attr "length" "10"))
6855 (const_string "yes")
6856 (const_string "no")))
6857 (set (attr "length")
6859 (le (symbol_ref ("which_alternative"))
6862 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6863 (le (minus (match_dup 2) (pc)) (const_int 256)))
6866 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6867 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6871 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6872 (le (minus (match_dup 2) (pc)) (const_int 256)))
6875 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6876 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6882 [(set (match_operand:SI 0 "low_register_operand" "")
6883 (match_operand:SI 1 "low_register_operand" ""))
6885 (if_then_else (match_operator 2 "arm_comparison_operator"
6886 [(match_dup 1) (const_int 0)])
6887 (label_ref (match_operand 3 "" ""))
6892 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6893 (label_ref (match_dup 3))
6895 (set (match_dup 0) (match_dup 1))])]
6899 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6900 ;; merge cases like this because the op1 is a hard register in
6901 ;; CLASS_LIKELY_SPILLED_P.
6903 [(set (match_operand:SI 0 "low_register_operand" "")
6904 (match_operand:SI 1 "low_register_operand" ""))
6906 (if_then_else (match_operator 2 "arm_comparison_operator"
6907 [(match_dup 0) (const_int 0)])
6908 (label_ref (match_operand 3 "" ""))
6913 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6914 (label_ref (match_dup 3))
6916 (set (match_dup 0) (match_dup 1))])]
6920 (define_insn "*negated_cbranchsi4"
6923 (match_operator 0 "equality_operator"
6924 [(match_operand:SI 1 "s_register_operand" "l")
6925 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6926 (label_ref (match_operand 3 "" ""))
6930 output_asm_insn (\"cmn\\t%1, %2\", operands);
6931 switch (get_attr_length (insn))
6933 case 4: return \"b%d0\\t%l3\";
6934 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6935 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6938 [(set (attr "far_jump")
6940 (eq_attr "length" "8")
6941 (const_string "yes")
6942 (const_string "no")))
6943 (set (attr "length")
6945 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6946 (le (minus (match_dup 3) (pc)) (const_int 256)))
6949 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6950 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6955 (define_insn "*tbit_cbranch"
6958 (match_operator 0 "equality_operator"
6959 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6961 (match_operand:SI 2 "const_int_operand" "i"))
6963 (label_ref (match_operand 3 "" ""))
6965 (clobber (match_scratch:SI 4 "=l"))]
6970 op[0] = operands[4];
6971 op[1] = operands[1];
6972 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6974 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6975 switch (get_attr_length (insn))
6977 case 4: return \"b%d0\\t%l3\";
6978 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6979 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6982 [(set (attr "far_jump")
6984 (eq_attr "length" "8")
6985 (const_string "yes")
6986 (const_string "no")))
6987 (set (attr "length")
6989 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6990 (le (minus (match_dup 3) (pc)) (const_int 256)))
6993 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6994 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6999 (define_insn "*tlobits_cbranch"
7002 (match_operator 0 "equality_operator"
7003 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7004 (match_operand:SI 2 "const_int_operand" "i")
7007 (label_ref (match_operand 3 "" ""))
7009 (clobber (match_scratch:SI 4 "=l"))]
7014 op[0] = operands[4];
7015 op[1] = operands[1];
7016 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7018 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7019 switch (get_attr_length (insn))
7021 case 4: return \"b%d0\\t%l3\";
7022 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7023 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7026 [(set (attr "far_jump")
7028 (eq_attr "length" "8")
7029 (const_string "yes")
7030 (const_string "no")))
7031 (set (attr "length")
7033 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7034 (le (minus (match_dup 3) (pc)) (const_int 256)))
7037 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7038 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7043 (define_insn "*tstsi3_cbranch"
7046 (match_operator 3 "equality_operator"
7047 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7048 (match_operand:SI 1 "s_register_operand" "l"))
7050 (label_ref (match_operand 2 "" ""))
7055 output_asm_insn (\"tst\\t%0, %1\", operands);
7056 switch (get_attr_length (insn))
7058 case 4: return \"b%d3\\t%l2\";
7059 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7060 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7063 [(set (attr "far_jump")
7065 (eq_attr "length" "8")
7066 (const_string "yes")
7067 (const_string "no")))
7068 (set (attr "length")
7070 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7071 (le (minus (match_dup 2) (pc)) (const_int 256)))
7074 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7075 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7080 (define_insn "*andsi3_cbranch"
7083 (match_operator 5 "equality_operator"
7084 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7085 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7087 (label_ref (match_operand 4 "" ""))
7089 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7090 (and:SI (match_dup 2) (match_dup 3)))
7091 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7095 if (which_alternative == 0)
7096 output_asm_insn (\"and\\t%0, %3\", operands);
7097 else if (which_alternative == 1)
7099 output_asm_insn (\"and\\t%1, %3\", operands);
7100 output_asm_insn (\"mov\\t%0, %1\", operands);
7104 output_asm_insn (\"and\\t%1, %3\", operands);
7105 output_asm_insn (\"str\\t%1, %0\", operands);
7108 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7110 case 4: return \"b%d5\\t%l4\";
7111 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7112 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7115 [(set (attr "far_jump")
7117 (ior (and (eq (symbol_ref ("which_alternative"))
7119 (eq_attr "length" "8"))
7120 (eq_attr "length" "10"))
7121 (const_string "yes")
7122 (const_string "no")))
7123 (set (attr "length")
7125 (eq (symbol_ref ("which_alternative"))
7128 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7129 (le (minus (match_dup 4) (pc)) (const_int 256)))
7132 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7133 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7137 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7138 (le (minus (match_dup 4) (pc)) (const_int 256)))
7141 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7142 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7147 (define_insn "*orrsi3_cbranch_scratch"
7150 (match_operator 4 "equality_operator"
7151 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7152 (match_operand:SI 2 "s_register_operand" "l"))
7154 (label_ref (match_operand 3 "" ""))
7156 (clobber (match_scratch:SI 0 "=l"))]
7160 output_asm_insn (\"orr\\t%0, %2\", operands);
7161 switch (get_attr_length (insn))
7163 case 4: return \"b%d4\\t%l3\";
7164 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7165 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7168 [(set (attr "far_jump")
7170 (eq_attr "length" "8")
7171 (const_string "yes")
7172 (const_string "no")))
7173 (set (attr "length")
7175 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7176 (le (minus (match_dup 3) (pc)) (const_int 256)))
7179 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7180 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7185 (define_insn "*orrsi3_cbranch"
7188 (match_operator 5 "equality_operator"
7189 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7190 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7192 (label_ref (match_operand 4 "" ""))
7194 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7195 (ior:SI (match_dup 2) (match_dup 3)))
7196 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7200 if (which_alternative == 0)
7201 output_asm_insn (\"orr\\t%0, %3\", operands);
7202 else if (which_alternative == 1)
7204 output_asm_insn (\"orr\\t%1, %3\", operands);
7205 output_asm_insn (\"mov\\t%0, %1\", operands);
7209 output_asm_insn (\"orr\\t%1, %3\", operands);
7210 output_asm_insn (\"str\\t%1, %0\", operands);
7213 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7215 case 4: return \"b%d5\\t%l4\";
7216 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7217 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7220 [(set (attr "far_jump")
7222 (ior (and (eq (symbol_ref ("which_alternative"))
7224 (eq_attr "length" "8"))
7225 (eq_attr "length" "10"))
7226 (const_string "yes")
7227 (const_string "no")))
7228 (set (attr "length")
7230 (eq (symbol_ref ("which_alternative"))
7233 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7234 (le (minus (match_dup 4) (pc)) (const_int 256)))
7237 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7238 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7242 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7243 (le (minus (match_dup 4) (pc)) (const_int 256)))
7246 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7247 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7252 (define_insn "*xorsi3_cbranch_scratch"
7255 (match_operator 4 "equality_operator"
7256 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7257 (match_operand:SI 2 "s_register_operand" "l"))
7259 (label_ref (match_operand 3 "" ""))
7261 (clobber (match_scratch:SI 0 "=l"))]
7265 output_asm_insn (\"eor\\t%0, %2\", operands);
7266 switch (get_attr_length (insn))
7268 case 4: return \"b%d4\\t%l3\";
7269 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7270 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7273 [(set (attr "far_jump")
7275 (eq_attr "length" "8")
7276 (const_string "yes")
7277 (const_string "no")))
7278 (set (attr "length")
7280 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7281 (le (minus (match_dup 3) (pc)) (const_int 256)))
7284 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7285 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7290 (define_insn "*xorsi3_cbranch"
7293 (match_operator 5 "equality_operator"
7294 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7295 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7297 (label_ref (match_operand 4 "" ""))
7299 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7300 (xor:SI (match_dup 2) (match_dup 3)))
7301 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7305 if (which_alternative == 0)
7306 output_asm_insn (\"eor\\t%0, %3\", operands);
7307 else if (which_alternative == 1)
7309 output_asm_insn (\"eor\\t%1, %3\", operands);
7310 output_asm_insn (\"mov\\t%0, %1\", operands);
7314 output_asm_insn (\"eor\\t%1, %3\", operands);
7315 output_asm_insn (\"str\\t%1, %0\", operands);
7318 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7320 case 4: return \"b%d5\\t%l4\";
7321 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7322 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7325 [(set (attr "far_jump")
7327 (ior (and (eq (symbol_ref ("which_alternative"))
7329 (eq_attr "length" "8"))
7330 (eq_attr "length" "10"))
7331 (const_string "yes")
7332 (const_string "no")))
7333 (set (attr "length")
7335 (eq (symbol_ref ("which_alternative"))
7338 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7339 (le (minus (match_dup 4) (pc)) (const_int 256)))
7342 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7343 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7347 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7348 (le (minus (match_dup 4) (pc)) (const_int 256)))
7351 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7352 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7357 (define_insn "*bicsi3_cbranch_scratch"
7360 (match_operator 4 "equality_operator"
7361 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7362 (match_operand:SI 1 "s_register_operand" "0"))
7364 (label_ref (match_operand 3 "" ""))
7366 (clobber (match_scratch:SI 0 "=l"))]
7370 output_asm_insn (\"bic\\t%0, %2\", operands);
7371 switch (get_attr_length (insn))
7373 case 4: return \"b%d4\\t%l3\";
7374 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7375 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7378 [(set (attr "far_jump")
7380 (eq_attr "length" "8")
7381 (const_string "yes")
7382 (const_string "no")))
7383 (set (attr "length")
7385 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7386 (le (minus (match_dup 3) (pc)) (const_int 256)))
7389 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7390 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7395 (define_insn "*bicsi3_cbranch"
7398 (match_operator 5 "equality_operator"
7399 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7400 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7402 (label_ref (match_operand 4 "" ""))
7404 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7405 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7406 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7410 if (which_alternative == 0)
7411 output_asm_insn (\"bic\\t%0, %3\", operands);
7412 else if (which_alternative <= 2)
7414 output_asm_insn (\"bic\\t%1, %3\", operands);
7415 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7416 conditions again, since we're only testing for equality. */
7417 output_asm_insn (\"mov\\t%0, %1\", operands);
7421 output_asm_insn (\"bic\\t%1, %3\", operands);
7422 output_asm_insn (\"str\\t%1, %0\", operands);
7425 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7427 case 4: return \"b%d5\\t%l4\";
7428 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7429 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7432 [(set (attr "far_jump")
7434 (ior (and (eq (symbol_ref ("which_alternative"))
7436 (eq_attr "length" "8"))
7437 (eq_attr "length" "10"))
7438 (const_string "yes")
7439 (const_string "no")))
7440 (set (attr "length")
7442 (eq (symbol_ref ("which_alternative"))
7445 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7446 (le (minus (match_dup 4) (pc)) (const_int 256)))
7449 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7450 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7454 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7455 (le (minus (match_dup 4) (pc)) (const_int 256)))
7458 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7459 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7464 (define_insn "*cbranchne_decr1"
7466 (if_then_else (match_operator 3 "equality_operator"
7467 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7469 (label_ref (match_operand 4 "" ""))
7471 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7472 (plus:SI (match_dup 2) (const_int -1)))
7473 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7478 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7480 VOIDmode, operands[2], const1_rtx);
7481 cond[1] = operands[4];
7483 if (which_alternative == 0)
7484 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7485 else if (which_alternative == 1)
7487 /* We must provide an alternative for a hi reg because reload
7488 cannot handle output reloads on a jump instruction, but we
7489 can't subtract into that. Fortunately a mov from lo to hi
7490 does not clobber the condition codes. */
7491 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7492 output_asm_insn (\"mov\\t%0, %1\", operands);
7496 /* Similarly, but the target is memory. */
7497 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7498 output_asm_insn (\"str\\t%1, %0\", operands);
7501 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7504 output_asm_insn (\"b%d0\\t%l1\", cond);
7507 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7508 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7510 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7511 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7515 [(set (attr "far_jump")
7517 (ior (and (eq (symbol_ref ("which_alternative"))
7519 (eq_attr "length" "8"))
7520 (eq_attr "length" "10"))
7521 (const_string "yes")
7522 (const_string "no")))
7523 (set_attr_alternative "length"
7527 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7528 (le (minus (match_dup 4) (pc)) (const_int 256)))
7531 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7532 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7537 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7538 (le (minus (match_dup 4) (pc)) (const_int 256)))
7541 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7542 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7547 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7548 (le (minus (match_dup 4) (pc)) (const_int 256)))
7551 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7552 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7557 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7558 (le (minus (match_dup 4) (pc)) (const_int 256)))
7561 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7562 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7567 (define_insn "*addsi3_cbranch"
7570 (match_operator 4 "arm_comparison_operator"
7572 (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
7573 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
7575 (label_ref (match_operand 5 "" ""))
7578 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7579 (plus:SI (match_dup 2) (match_dup 3)))
7580 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7582 && (GET_CODE (operands[4]) == EQ
7583 || GET_CODE (operands[4]) == NE
7584 || GET_CODE (operands[4]) == GE
7585 || GET_CODE (operands[4]) == LT)"
7590 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7591 cond[1] = operands[2];
7592 cond[2] = operands[3];
7594 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7595 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7597 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7599 if (which_alternative >= 2
7600 && which_alternative < 4)
7601 output_asm_insn (\"mov\\t%0, %1\", operands);
7602 else if (which_alternative >= 4)
7603 output_asm_insn (\"str\\t%1, %0\", operands);
7605 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7608 return \"b%d4\\t%l5\";
7610 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7612 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7616 [(set (attr "far_jump")
7618 (ior (and (lt (symbol_ref ("which_alternative"))
7620 (eq_attr "length" "8"))
7621 (eq_attr "length" "10"))
7622 (const_string "yes")
7623 (const_string "no")))
7624 (set (attr "length")
7626 (lt (symbol_ref ("which_alternative"))
7629 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7630 (le (minus (match_dup 5) (pc)) (const_int 256)))
7633 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7634 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7638 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7639 (le (minus (match_dup 5) (pc)) (const_int 256)))
7642 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7643 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7648 (define_insn "*addsi3_cbranch_scratch"
7651 (match_operator 3 "arm_comparison_operator"
7653 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7654 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7656 (label_ref (match_operand 4 "" ""))
7658 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7660 && (GET_CODE (operands[3]) == EQ
7661 || GET_CODE (operands[3]) == NE
7662 || GET_CODE (operands[3]) == GE
7663 || GET_CODE (operands[3]) == LT)"
7666 switch (which_alternative)
7669 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7672 output_asm_insn (\"cmn\t%1, %2\", operands);
7675 if (INTVAL (operands[2]) < 0)
7676 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7678 output_asm_insn (\"add\t%0, %1, %2\", operands);
7681 if (INTVAL (operands[2]) < 0)
7682 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7684 output_asm_insn (\"add\t%0, %0, %2\", operands);
7688 switch (get_attr_length (insn))
7691 return \"b%d3\\t%l4\";
7693 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7695 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7699 [(set (attr "far_jump")
7701 (eq_attr "length" "8")
7702 (const_string "yes")
7703 (const_string "no")))
7704 (set (attr "length")
7706 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7707 (le (minus (match_dup 4) (pc)) (const_int 256)))
7710 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7711 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7716 (define_insn "*subsi3_cbranch"
7719 (match_operator 4 "arm_comparison_operator"
7721 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7722 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7724 (label_ref (match_operand 5 "" ""))
7726 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7727 (minus:SI (match_dup 2) (match_dup 3)))
7728 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7730 && (GET_CODE (operands[4]) == EQ
7731 || GET_CODE (operands[4]) == NE
7732 || GET_CODE (operands[4]) == GE
7733 || GET_CODE (operands[4]) == LT)"
7736 if (which_alternative == 0)
7737 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7738 else if (which_alternative == 1)
7740 /* We must provide an alternative for a hi reg because reload
7741 cannot handle output reloads on a jump instruction, but we
7742 can't subtract into that. Fortunately a mov from lo to hi
7743 does not clobber the condition codes. */
7744 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7745 output_asm_insn (\"mov\\t%0, %1\", operands);
7749 /* Similarly, but the target is memory. */
7750 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7751 output_asm_insn (\"str\\t%1, %0\", operands);
7754 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7757 return \"b%d4\\t%l5\";
7759 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7761 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7765 [(set (attr "far_jump")
7767 (ior (and (eq (symbol_ref ("which_alternative"))
7769 (eq_attr "length" "8"))
7770 (eq_attr "length" "10"))
7771 (const_string "yes")
7772 (const_string "no")))
7773 (set (attr "length")
7775 (eq (symbol_ref ("which_alternative"))
7778 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7779 (le (minus (match_dup 5) (pc)) (const_int 256)))
7782 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7783 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7787 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7788 (le (minus (match_dup 5) (pc)) (const_int 256)))
7791 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7792 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7797 (define_insn "*subsi3_cbranch_scratch"
7800 (match_operator 0 "arm_comparison_operator"
7801 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7802 (match_operand:SI 2 "nonmemory_operand" "l"))
7804 (label_ref (match_operand 3 "" ""))
7807 && (GET_CODE (operands[0]) == EQ
7808 || GET_CODE (operands[0]) == NE
7809 || GET_CODE (operands[0]) == GE
7810 || GET_CODE (operands[0]) == LT)"
7812 output_asm_insn (\"cmp\\t%1, %2\", operands);
7813 switch (get_attr_length (insn))
7815 case 4: return \"b%d0\\t%l3\";
7816 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7817 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7820 [(set (attr "far_jump")
7822 (eq_attr "length" "8")
7823 (const_string "yes")
7824 (const_string "no")))
7825 (set (attr "length")
7827 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7828 (le (minus (match_dup 3) (pc)) (const_int 256)))
7831 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7832 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7837 ;; Comparison and test insns
7839 (define_insn "*arm_cmpsi_insn"
7840 [(set (reg:CC CC_REGNUM)
7841 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7842 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7847 [(set_attr "conds" "set")]
7850 (define_insn "*arm_cmpsi_shiftsi"
7851 [(set (reg:CC CC_REGNUM)
7852 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7853 (match_operator:SI 3 "shift_operator"
7854 [(match_operand:SI 1 "s_register_operand" "r")
7855 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7858 [(set_attr "conds" "set")
7859 (set_attr "shift" "1")
7860 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7861 (const_string "alu_shift")
7862 (const_string "alu_shift_reg")))]
7865 (define_insn "*arm_cmpsi_shiftsi_swp"
7866 [(set (reg:CC_SWP CC_REGNUM)
7867 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7868 [(match_operand:SI 1 "s_register_operand" "r")
7869 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7870 (match_operand:SI 0 "s_register_operand" "r")))]
7873 [(set_attr "conds" "set")
7874 (set_attr "shift" "1")
7875 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7876 (const_string "alu_shift")
7877 (const_string "alu_shift_reg")))]
7880 (define_insn "*arm_cmpsi_negshiftsi_si"
7881 [(set (reg:CC_Z CC_REGNUM)
7883 (neg:SI (match_operator:SI 1 "shift_operator"
7884 [(match_operand:SI 2 "s_register_operand" "r")
7885 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7886 (match_operand:SI 0 "s_register_operand" "r")))]
7889 [(set_attr "conds" "set")
7890 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7891 (const_string "alu_shift")
7892 (const_string "alu_shift_reg")))]
7895 ;; Cirrus SF compare instruction
7896 (define_insn "*cirrus_cmpsf"
7897 [(set (reg:CCFP CC_REGNUM)
7898 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7899 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7900 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7901 "cfcmps%?\\tr15, %V0, %V1"
7902 [(set_attr "type" "mav_farith")
7903 (set_attr "cirrus" "compare")]
7906 ;; Cirrus DF compare instruction
7907 (define_insn "*cirrus_cmpdf"
7908 [(set (reg:CCFP CC_REGNUM)
7909 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7910 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7911 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7912 "cfcmpd%?\\tr15, %V0, %V1"
7913 [(set_attr "type" "mav_farith")
7914 (set_attr "cirrus" "compare")]
7917 (define_insn "*cirrus_cmpdi"
7918 [(set (reg:CC CC_REGNUM)
7919 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7920 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7921 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7922 "cfcmp64%?\\tr15, %V0, %V1"
7923 [(set_attr "type" "mav_farith")
7924 (set_attr "cirrus" "compare")]
7927 ; This insn allows redundant compares to be removed by cse, nothing should
7928 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7929 ; is deleted later on. The match_dup will match the mode here, so that
7930 ; mode changes of the condition codes aren't lost by this even though we don't
7931 ; specify what they are.
7933 (define_insn "*deleted_compare"
7934 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7936 "\\t%@ deleted compare"
7937 [(set_attr "conds" "set")
7938 (set_attr "length" "0")]
7942 ;; Conditional branch insns
7944 (define_expand "cbranch_cc"
7946 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7947 (match_operand 2 "" "")])
7948 (label_ref (match_operand 3 "" ""))
7951 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7952 operands[1], operands[2]);
7953 operands[2] = const0_rtx;"
7957 ;; Patterns to match conditional branch insns.
7960 (define_insn "*arm_cond_branch"
7962 (if_then_else (match_operator 1 "arm_comparison_operator"
7963 [(match_operand 2 "cc_register" "") (const_int 0)])
7964 (label_ref (match_operand 0 "" ""))
7968 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7970 arm_ccfsm_state += 2;
7973 return \"b%d1\\t%l0\";
7975 [(set_attr "conds" "use")
7976 (set_attr "type" "branch")]
7979 (define_insn "*arm_cond_branch_reversed"
7981 (if_then_else (match_operator 1 "arm_comparison_operator"
7982 [(match_operand 2 "cc_register" "") (const_int 0)])
7984 (label_ref (match_operand 0 "" ""))))]
7987 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7989 arm_ccfsm_state += 2;
7992 return \"b%D1\\t%l0\";
7994 [(set_attr "conds" "use")
7995 (set_attr "type" "branch")]
8002 (define_expand "cstore_cc"
8003 [(set (match_operand:SI 0 "s_register_operand" "")
8004 (match_operator:SI 1 "" [(match_operand 2 "" "")
8005 (match_operand 3 "" "")]))]
8007 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8008 operands[2], operands[3]);
8009 operands[3] = const0_rtx;"
8012 (define_insn "*mov_scc"
8013 [(set (match_operand:SI 0 "s_register_operand" "=r")
8014 (match_operator:SI 1 "arm_comparison_operator"
8015 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8017 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8018 [(set_attr "conds" "use")
8019 (set_attr "length" "8")]
8022 (define_insn "*mov_negscc"
8023 [(set (match_operand:SI 0 "s_register_operand" "=r")
8024 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8025 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8027 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8028 [(set_attr "conds" "use")
8029 (set_attr "length" "8")]
8032 (define_insn "*mov_notscc"
8033 [(set (match_operand:SI 0 "s_register_operand" "=r")
8034 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8035 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8037 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8038 [(set_attr "conds" "use")
8039 (set_attr "length" "8")]
8042 (define_expand "cstoresi4"
8043 [(set (match_operand:SI 0 "s_register_operand" "")
8044 (match_operator:SI 1 "arm_comparison_operator"
8045 [(match_operand:SI 2 "s_register_operand" "")
8046 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8047 "TARGET_32BIT || TARGET_THUMB1"
8049 rtx op3, scratch, scratch2;
8053 if (!arm_add_operand (operands[3], SImode))
8054 operands[3] = force_reg (SImode, operands[3]);
8055 emit_insn (gen_cstore_cc (operands[0], operands[1],
8056 operands[2], operands[3]));
8060 if (operands[3] == const0_rtx)
8062 switch (GET_CODE (operands[1]))
8065 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8069 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8073 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8074 NULL_RTX, 0, OPTAB_WIDEN);
8075 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8076 NULL_RTX, 0, OPTAB_WIDEN);
8077 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8078 operands[0], 1, OPTAB_WIDEN);
8082 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8084 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8085 NULL_RTX, 1, OPTAB_WIDEN);
8089 scratch = expand_binop (SImode, ashr_optab, operands[2],
8090 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8091 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8092 NULL_RTX, 0, OPTAB_WIDEN);
8093 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8097 /* LT is handled by generic code. No need for unsigned with 0. */
8104 switch (GET_CODE (operands[1]))
8107 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8108 NULL_RTX, 0, OPTAB_WIDEN);
8109 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8113 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8114 NULL_RTX, 0, OPTAB_WIDEN);
8115 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8119 op3 = force_reg (SImode, operands[3]);
8121 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8122 NULL_RTX, 1, OPTAB_WIDEN);
8123 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8124 NULL_RTX, 0, OPTAB_WIDEN);
8125 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8131 if (!thumb1_cmp_operand (op3, SImode))
8132 op3 = force_reg (SImode, op3);
8133 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8134 NULL_RTX, 0, OPTAB_WIDEN);
8135 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8136 NULL_RTX, 1, OPTAB_WIDEN);
8137 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8142 op3 = force_reg (SImode, operands[3]);
8143 scratch = force_reg (SImode, const0_rtx);
8144 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8150 if (!thumb1_cmp_operand (op3, SImode))
8151 op3 = force_reg (SImode, op3);
8152 scratch = force_reg (SImode, const0_rtx);
8153 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8159 if (!thumb1_cmp_operand (op3, SImode))
8160 op3 = force_reg (SImode, op3);
8161 scratch = gen_reg_rtx (SImode);
8162 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8166 op3 = force_reg (SImode, operands[3]);
8167 scratch = gen_reg_rtx (SImode);
8168 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8171 /* No good sequences for GT, LT. */
8178 (define_expand "cstoresf4"
8179 [(set (match_operand:SI 0 "s_register_operand" "")
8180 (match_operator:SI 1 "arm_comparison_operator"
8181 [(match_operand:SF 2 "s_register_operand" "")
8182 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8183 "TARGET_32BIT && TARGET_HARD_FLOAT"
8184 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8185 operands[2], operands[3])); DONE;"
8188 (define_expand "cstoredf4"
8189 [(set (match_operand:SI 0 "s_register_operand" "")
8190 (match_operator:SI 1 "arm_comparison_operator"
8191 [(match_operand:DF 2 "s_register_operand" "")
8192 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8193 "TARGET_32BIT && TARGET_HARD_FLOAT"
8194 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8195 operands[2], operands[3])); DONE;"
8198 ;; this uses the Cirrus DI compare instruction
8199 (define_expand "cstoredi4"
8200 [(set (match_operand:SI 0 "s_register_operand" "")
8201 (match_operator:SI 1 "arm_comparison_operator"
8202 [(match_operand:DI 2 "cirrus_fp_register" "")
8203 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8204 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8205 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8206 operands[2], operands[3])); DONE;"
8210 (define_expand "cstoresi_eq0_thumb1"
8212 [(set (match_operand:SI 0 "s_register_operand" "")
8213 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8215 (clobber (match_dup:SI 2))])]
8217 "operands[2] = gen_reg_rtx (SImode);"
8220 (define_expand "cstoresi_ne0_thumb1"
8222 [(set (match_operand:SI 0 "s_register_operand" "")
8223 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8225 (clobber (match_dup:SI 2))])]
8227 "operands[2] = gen_reg_rtx (SImode);"
8230 (define_insn "*cstoresi_eq0_thumb1_insn"
8231 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8232 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8234 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8237 neg\\t%0, %1\;adc\\t%0, %0, %1
8238 neg\\t%2, %1\;adc\\t%0, %1, %2"
8239 [(set_attr "length" "4")]
8242 (define_insn "*cstoresi_ne0_thumb1_insn"
8243 [(set (match_operand:SI 0 "s_register_operand" "=l")
8244 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8246 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8248 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8249 [(set_attr "length" "4")]
8252 ;; Used as part of the expansion of thumb ltu and gtu sequences
8253 (define_insn "cstoresi_nltu_thumb1"
8254 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8255 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8256 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8258 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8259 [(set_attr "length" "4")]
8262 (define_insn_and_split "cstoresi_ltu_thumb1"
8263 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8264 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8265 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8270 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8271 (set (match_dup 0) (neg:SI (match_dup 3)))]
8272 "operands[3] = gen_reg_rtx (SImode);"
8273 [(set_attr "length" "4")]
8276 ;; Used as part of the expansion of thumb les sequence.
8277 (define_insn "thumb1_addsi3_addgeu"
8278 [(set (match_operand:SI 0 "s_register_operand" "=l")
8279 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8280 (match_operand:SI 2 "s_register_operand" "l"))
8281 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8282 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8284 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8285 [(set_attr "length" "4")]
8289 ;; Conditional move insns
8291 (define_expand "movsicc"
8292 [(set (match_operand:SI 0 "s_register_operand" "")
8293 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8294 (match_operand:SI 2 "arm_not_operand" "")
8295 (match_operand:SI 3 "arm_not_operand" "")))]
8299 enum rtx_code code = GET_CODE (operands[1]);
8302 if (code == UNEQ || code == LTGT)
8305 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8306 XEXP (operands[1], 1));
8307 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8311 (define_expand "movsfcc"
8312 [(set (match_operand:SF 0 "s_register_operand" "")
8313 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8314 (match_operand:SF 2 "s_register_operand" "")
8315 (match_operand:SF 3 "nonmemory_operand" "")))]
8316 "TARGET_32BIT && TARGET_HARD_FLOAT"
8319 enum rtx_code code = GET_CODE (operands[1]);
8322 if (code == UNEQ || code == LTGT)
8325 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8326 Otherwise, ensure it is a valid FP add operand */
8327 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8328 || (!arm_float_add_operand (operands[3], SFmode)))
8329 operands[3] = force_reg (SFmode, operands[3]);
8331 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8332 XEXP (operands[1], 1));
8333 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8337 (define_expand "movdfcc"
8338 [(set (match_operand:DF 0 "s_register_operand" "")
8339 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8340 (match_operand:DF 2 "s_register_operand" "")
8341 (match_operand:DF 3 "arm_float_add_operand" "")))]
8342 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8345 enum rtx_code code = GET_CODE (operands[1]);
8348 if (code == UNEQ || code == LTGT)
8351 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8352 XEXP (operands[1], 1));
8353 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8357 (define_insn "*movsicc_insn"
8358 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8360 (match_operator 3 "arm_comparison_operator"
8361 [(match_operand 4 "cc_register" "") (const_int 0)])
8362 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8363 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8370 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8371 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8372 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8373 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8374 [(set_attr "length" "4,4,4,4,8,8,8,8")
8375 (set_attr "conds" "use")]
8378 (define_insn "*movsfcc_soft_insn"
8379 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8380 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8381 [(match_operand 4 "cc_register" "") (const_int 0)])
8382 (match_operand:SF 1 "s_register_operand" "0,r")
8383 (match_operand:SF 2 "s_register_operand" "r,0")))]
8384 "TARGET_ARM && TARGET_SOFT_FLOAT"
8388 [(set_attr "conds" "use")]
8392 ;; Jump and linkage insns
8394 (define_expand "jump"
8396 (label_ref (match_operand 0 "" "")))]
8401 (define_insn "*arm_jump"
8403 (label_ref (match_operand 0 "" "")))]
8407 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8409 arm_ccfsm_state += 2;
8412 return \"b%?\\t%l0\";
8415 [(set_attr "predicable" "yes")]
8418 (define_insn "*thumb_jump"
8420 (label_ref (match_operand 0 "" "")))]
8423 if (get_attr_length (insn) == 2)
8425 return \"bl\\t%l0\\t%@ far jump\";
8427 [(set (attr "far_jump")
8429 (eq_attr "length" "4")
8430 (const_string "yes")
8431 (const_string "no")))
8432 (set (attr "length")
8434 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8435 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8440 (define_expand "call"
8441 [(parallel [(call (match_operand 0 "memory_operand" "")
8442 (match_operand 1 "general_operand" ""))
8443 (use (match_operand 2 "" ""))
8444 (clobber (reg:SI LR_REGNUM))])]
8450 /* In an untyped call, we can get NULL for operand 2. */
8451 if (operands[2] == NULL_RTX)
8452 operands[2] = const0_rtx;
8454 /* Decide if we should generate indirect calls by loading the
8455 32-bit address of the callee into a register before performing the
8457 callee = XEXP (operands[0], 0);
8458 if (GET_CODE (callee) == SYMBOL_REF
8459 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8461 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8463 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8464 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8469 (define_expand "call_internal"
8470 [(parallel [(call (match_operand 0 "memory_operand" "")
8471 (match_operand 1 "general_operand" ""))
8472 (use (match_operand 2 "" ""))
8473 (clobber (reg:SI LR_REGNUM))])])
8475 (define_insn "*call_reg_armv5"
8476 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8477 (match_operand 1 "" ""))
8478 (use (match_operand 2 "" ""))
8479 (clobber (reg:SI LR_REGNUM))]
8480 "TARGET_ARM && arm_arch5"
8482 [(set_attr "type" "call")]
8485 (define_insn "*call_reg_arm"
8486 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8487 (match_operand 1 "" ""))
8488 (use (match_operand 2 "" ""))
8489 (clobber (reg:SI LR_REGNUM))]
8490 "TARGET_ARM && !arm_arch5"
8492 return output_call (operands);
8494 ;; length is worst case, normally it is only two
8495 [(set_attr "length" "12")
8496 (set_attr "type" "call")]
8500 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8501 ;; considered a function call by the branch predictor of some cores (PR40887).
8502 ;; Falls back to blx rN (*call_reg_armv5).
8504 (define_insn "*call_mem"
8505 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8506 (match_operand 1 "" ""))
8507 (use (match_operand 2 "" ""))
8508 (clobber (reg:SI LR_REGNUM))]
8509 "TARGET_ARM && !arm_arch5"
8511 return output_call_mem (operands);
8513 [(set_attr "length" "12")
8514 (set_attr "type" "call")]
8517 (define_insn "*call_reg_thumb1_v5"
8518 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8519 (match_operand 1 "" ""))
8520 (use (match_operand 2 "" ""))
8521 (clobber (reg:SI LR_REGNUM))]
8522 "TARGET_THUMB1 && arm_arch5"
8524 [(set_attr "length" "2")
8525 (set_attr "type" "call")]
8528 (define_insn "*call_reg_thumb1"
8529 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8530 (match_operand 1 "" ""))
8531 (use (match_operand 2 "" ""))
8532 (clobber (reg:SI LR_REGNUM))]
8533 "TARGET_THUMB1 && !arm_arch5"
8536 if (!TARGET_CALLER_INTERWORKING)
8537 return thumb_call_via_reg (operands[0]);
8538 else if (operands[1] == const0_rtx)
8539 return \"bl\\t%__interwork_call_via_%0\";
8540 else if (frame_pointer_needed)
8541 return \"bl\\t%__interwork_r7_call_via_%0\";
8543 return \"bl\\t%__interwork_r11_call_via_%0\";
8545 [(set_attr "type" "call")]
8548 (define_expand "call_value"
8549 [(parallel [(set (match_operand 0 "" "")
8550 (call (match_operand 1 "memory_operand" "")
8551 (match_operand 2 "general_operand" "")))
8552 (use (match_operand 3 "" ""))
8553 (clobber (reg:SI LR_REGNUM))])]
8559 /* In an untyped call, we can get NULL for operand 2. */
8560 if (operands[3] == 0)
8561 operands[3] = const0_rtx;
8563 /* Decide if we should generate indirect calls by loading the
8564 32-bit address of the callee into a register before performing the
8566 callee = XEXP (operands[1], 0);
8567 if (GET_CODE (callee) == SYMBOL_REF
8568 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8570 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8572 pat = gen_call_value_internal (operands[0], operands[1],
8573 operands[2], operands[3]);
8574 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8579 (define_expand "call_value_internal"
8580 [(parallel [(set (match_operand 0 "" "")
8581 (call (match_operand 1 "memory_operand" "")
8582 (match_operand 2 "general_operand" "")))
8583 (use (match_operand 3 "" ""))
8584 (clobber (reg:SI LR_REGNUM))])])
8586 (define_insn "*call_value_reg_armv5"
8587 [(set (match_operand 0 "" "")
8588 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8589 (match_operand 2 "" "")))
8590 (use (match_operand 3 "" ""))
8591 (clobber (reg:SI LR_REGNUM))]
8592 "TARGET_ARM && arm_arch5"
8594 [(set_attr "type" "call")]
8597 (define_insn "*call_value_reg_arm"
8598 [(set (match_operand 0 "" "")
8599 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8600 (match_operand 2 "" "")))
8601 (use (match_operand 3 "" ""))
8602 (clobber (reg:SI LR_REGNUM))]
8603 "TARGET_ARM && !arm_arch5"
8605 return output_call (&operands[1]);
8607 [(set_attr "length" "12")
8608 (set_attr "type" "call")]
8611 ;; Note: see *call_mem
8613 (define_insn "*call_value_mem"
8614 [(set (match_operand 0 "" "")
8615 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8616 (match_operand 2 "" "")))
8617 (use (match_operand 3 "" ""))
8618 (clobber (reg:SI LR_REGNUM))]
8619 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8621 return output_call_mem (&operands[1]);
8623 [(set_attr "length" "12")
8624 (set_attr "type" "call")]
8627 (define_insn "*call_value_reg_thumb1_v5"
8628 [(set (match_operand 0 "" "")
8629 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8630 (match_operand 2 "" "")))
8631 (use (match_operand 3 "" ""))
8632 (clobber (reg:SI LR_REGNUM))]
8633 "TARGET_THUMB1 && arm_arch5"
8635 [(set_attr "length" "2")
8636 (set_attr "type" "call")]
8639 (define_insn "*call_value_reg_thumb1"
8640 [(set (match_operand 0 "" "")
8641 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8642 (match_operand 2 "" "")))
8643 (use (match_operand 3 "" ""))
8644 (clobber (reg:SI LR_REGNUM))]
8645 "TARGET_THUMB1 && !arm_arch5"
8648 if (!TARGET_CALLER_INTERWORKING)
8649 return thumb_call_via_reg (operands[1]);
8650 else if (operands[2] == const0_rtx)
8651 return \"bl\\t%__interwork_call_via_%1\";
8652 else if (frame_pointer_needed)
8653 return \"bl\\t%__interwork_r7_call_via_%1\";
8655 return \"bl\\t%__interwork_r11_call_via_%1\";
8657 [(set_attr "type" "call")]
8660 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8661 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8663 (define_insn "*call_symbol"
8664 [(call (mem:SI (match_operand:SI 0 "" ""))
8665 (match_operand 1 "" ""))
8666 (use (match_operand 2 "" ""))
8667 (clobber (reg:SI LR_REGNUM))]
8669 && (GET_CODE (operands[0]) == SYMBOL_REF)
8670 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8673 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8675 [(set_attr "type" "call")]
8678 (define_insn "*call_value_symbol"
8679 [(set (match_operand 0 "" "")
8680 (call (mem:SI (match_operand:SI 1 "" ""))
8681 (match_operand:SI 2 "" "")))
8682 (use (match_operand 3 "" ""))
8683 (clobber (reg:SI LR_REGNUM))]
8685 && (GET_CODE (operands[1]) == SYMBOL_REF)
8686 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8689 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8691 [(set_attr "type" "call")]
8694 (define_insn "*call_insn"
8695 [(call (mem:SI (match_operand:SI 0 "" ""))
8696 (match_operand:SI 1 "" ""))
8697 (use (match_operand 2 "" ""))
8698 (clobber (reg:SI LR_REGNUM))]
8700 && GET_CODE (operands[0]) == SYMBOL_REF
8701 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8703 [(set_attr "length" "4")
8704 (set_attr "type" "call")]
8707 (define_insn "*call_value_insn"
8708 [(set (match_operand 0 "" "")
8709 (call (mem:SI (match_operand 1 "" ""))
8710 (match_operand 2 "" "")))
8711 (use (match_operand 3 "" ""))
8712 (clobber (reg:SI LR_REGNUM))]
8714 && GET_CODE (operands[1]) == SYMBOL_REF
8715 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8717 [(set_attr "length" "4")
8718 (set_attr "type" "call")]
8721 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8722 (define_expand "sibcall"
8723 [(parallel [(call (match_operand 0 "memory_operand" "")
8724 (match_operand 1 "general_operand" ""))
8726 (use (match_operand 2 "" ""))])]
8730 if (operands[2] == NULL_RTX)
8731 operands[2] = const0_rtx;
8735 (define_expand "sibcall_value"
8736 [(parallel [(set (match_operand 0 "" "")
8737 (call (match_operand 1 "memory_operand" "")
8738 (match_operand 2 "general_operand" "")))
8740 (use (match_operand 3 "" ""))])]
8744 if (operands[3] == NULL_RTX)
8745 operands[3] = const0_rtx;
8749 (define_insn "*sibcall_insn"
8750 [(call (mem:SI (match_operand:SI 0 "" "X"))
8751 (match_operand 1 "" ""))
8753 (use (match_operand 2 "" ""))]
8754 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8756 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8758 [(set_attr "type" "call")]
8761 (define_insn "*sibcall_value_insn"
8762 [(set (match_operand 0 "" "")
8763 (call (mem:SI (match_operand:SI 1 "" "X"))
8764 (match_operand 2 "" "")))
8766 (use (match_operand 3 "" ""))]
8767 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8769 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8771 [(set_attr "type" "call")]
8774 (define_expand "return"
8776 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8779 ;; Often the return insn will be the same as loading from memory, so set attr
8780 (define_insn "*arm_return"
8782 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8785 if (arm_ccfsm_state == 2)
8787 arm_ccfsm_state += 2;
8790 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8792 [(set_attr "type" "load1")
8793 (set_attr "length" "12")
8794 (set_attr "predicable" "yes")]
8797 (define_insn "*cond_return"
8799 (if_then_else (match_operator 0 "arm_comparison_operator"
8800 [(match_operand 1 "cc_register" "") (const_int 0)])
8803 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8806 if (arm_ccfsm_state == 2)
8808 arm_ccfsm_state += 2;
8811 return output_return_instruction (operands[0], TRUE, FALSE);
8813 [(set_attr "conds" "use")
8814 (set_attr "length" "12")
8815 (set_attr "type" "load1")]
8818 (define_insn "*cond_return_inverted"
8820 (if_then_else (match_operator 0 "arm_comparison_operator"
8821 [(match_operand 1 "cc_register" "") (const_int 0)])
8824 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8827 if (arm_ccfsm_state == 2)
8829 arm_ccfsm_state += 2;
8832 return output_return_instruction (operands[0], TRUE, TRUE);
8834 [(set_attr "conds" "use")
8835 (set_attr "length" "12")
8836 (set_attr "type" "load1")]
8839 ;; Generate a sequence of instructions to determine if the processor is
8840 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8843 (define_expand "return_addr_mask"
8845 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8847 (set (match_operand:SI 0 "s_register_operand" "")
8848 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8850 (const_int 67108860)))] ; 0x03fffffc
8853 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8856 (define_insn "*check_arch2"
8857 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8858 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8861 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8862 [(set_attr "length" "8")
8863 (set_attr "conds" "set")]
8866 ;; Call subroutine returning any type.
8868 (define_expand "untyped_call"
8869 [(parallel [(call (match_operand 0 "" "")
8871 (match_operand 1 "" "")
8872 (match_operand 2 "" "")])]
8877 rtx par = gen_rtx_PARALLEL (VOIDmode,
8878 rtvec_alloc (XVECLEN (operands[2], 0)));
8879 rtx addr = gen_reg_rtx (Pmode);
8883 emit_move_insn (addr, XEXP (operands[1], 0));
8884 mem = change_address (operands[1], BLKmode, addr);
8886 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8888 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8890 /* Default code only uses r0 as a return value, but we could
8891 be using anything up to 4 registers. */
8892 if (REGNO (src) == R0_REGNUM)
8893 src = gen_rtx_REG (TImode, R0_REGNUM);
8895 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8897 size += GET_MODE_SIZE (GET_MODE (src));
8900 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8905 for (i = 0; i < XVECLEN (par, 0); i++)
8907 HOST_WIDE_INT offset = 0;
8908 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8911 emit_move_insn (addr, plus_constant (addr, size));
8913 mem = change_address (mem, GET_MODE (reg), NULL);
8914 if (REGNO (reg) == R0_REGNUM)
8916 /* On thumb we have to use a write-back instruction. */
8917 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8918 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8919 size = TARGET_ARM ? 16 : 0;
8923 emit_move_insn (mem, reg);
8924 size = GET_MODE_SIZE (GET_MODE (reg));
8928 /* The optimizer does not know that the call sets the function value
8929 registers we stored in the result block. We avoid problems by
8930 claiming that all hard registers are used and clobbered at this
8932 emit_insn (gen_blockage ());
8938 (define_expand "untyped_return"
8939 [(match_operand:BLK 0 "memory_operand" "")
8940 (match_operand 1 "" "")]
8945 rtx addr = gen_reg_rtx (Pmode);
8949 emit_move_insn (addr, XEXP (operands[0], 0));
8950 mem = change_address (operands[0], BLKmode, addr);
8952 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8954 HOST_WIDE_INT offset = 0;
8955 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8958 emit_move_insn (addr, plus_constant (addr, size));
8960 mem = change_address (mem, GET_MODE (reg), NULL);
8961 if (REGNO (reg) == R0_REGNUM)
8963 /* On thumb we have to use a write-back instruction. */
8964 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8965 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8966 size = TARGET_ARM ? 16 : 0;
8970 emit_move_insn (reg, mem);
8971 size = GET_MODE_SIZE (GET_MODE (reg));
8975 /* Emit USE insns before the return. */
8976 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8977 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8979 /* Construct the return. */
8980 expand_naked_return ();
8986 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8987 ;; all of memory. This blocks insns from being moved across this point.
8989 (define_insn "blockage"
8990 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8993 [(set_attr "length" "0")
8994 (set_attr "type" "block")]
8997 (define_expand "casesi"
8998 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8999 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9000 (match_operand:SI 2 "const_int_operand" "") ; total range
9001 (match_operand:SI 3 "" "") ; table label
9002 (match_operand:SI 4 "" "")] ; Out of range label
9003 "TARGET_32BIT || optimize_size || flag_pic"
9006 enum insn_code code;
9007 if (operands[1] != const0_rtx)
9009 rtx reg = gen_reg_rtx (SImode);
9011 emit_insn (gen_addsi3 (reg, operands[0],
9012 GEN_INT (-INTVAL (operands[1]))));
9017 code = CODE_FOR_arm_casesi_internal;
9018 else if (TARGET_THUMB1)
9019 code = CODE_FOR_thumb1_casesi_internal_pic;
9021 code = CODE_FOR_thumb2_casesi_internal_pic;
9023 code = CODE_FOR_thumb2_casesi_internal;
9025 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9026 operands[2] = force_reg (SImode, operands[2]);
9028 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9029 operands[3], operands[4]));
9034 ;; The USE in this pattern is needed to tell flow analysis that this is
9035 ;; a CASESI insn. It has no other purpose.
9036 (define_insn "arm_casesi_internal"
9037 [(parallel [(set (pc)
9039 (leu (match_operand:SI 0 "s_register_operand" "r")
9040 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9041 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9042 (label_ref (match_operand 2 "" ""))))
9043 (label_ref (match_operand 3 "" ""))))
9044 (clobber (reg:CC CC_REGNUM))
9045 (use (label_ref (match_dup 2)))])]
9049 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9050 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9052 [(set_attr "conds" "clob")
9053 (set_attr "length" "12")]
9056 (define_expand "thumb1_casesi_internal_pic"
9057 [(match_operand:SI 0 "s_register_operand" "")
9058 (match_operand:SI 1 "thumb1_cmp_operand" "")
9059 (match_operand 2 "" "")
9060 (match_operand 3 "" "")]
9064 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9065 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9067 reg0 = gen_rtx_REG (SImode, 0);
9068 emit_move_insn (reg0, operands[0]);
9069 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9074 (define_insn "thumb1_casesi_dispatch"
9075 [(parallel [(set (pc) (unspec [(reg:SI 0)
9076 (label_ref (match_operand 0 "" ""))
9077 ;; (label_ref (match_operand 1 "" ""))
9079 UNSPEC_THUMB1_CASESI))
9080 (clobber (reg:SI IP_REGNUM))
9081 (clobber (reg:SI LR_REGNUM))])]
9083 "* return thumb1_output_casesi(operands);"
9084 [(set_attr "length" "4")]
9087 (define_expand "indirect_jump"
9089 (match_operand:SI 0 "s_register_operand" ""))]
9092 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9093 address and use bx. */
9097 tmp = gen_reg_rtx (SImode);
9098 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9104 ;; NB Never uses BX.
9105 (define_insn "*arm_indirect_jump"
9107 (match_operand:SI 0 "s_register_operand" "r"))]
9109 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9110 [(set_attr "predicable" "yes")]
9113 (define_insn "*load_indirect_jump"
9115 (match_operand:SI 0 "memory_operand" "m"))]
9117 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9118 [(set_attr "type" "load1")
9119 (set_attr "pool_range" "4096")
9120 (set_attr "neg_pool_range" "4084")
9121 (set_attr "predicable" "yes")]
9124 ;; NB Never uses BX.
9125 (define_insn "*thumb1_indirect_jump"
9127 (match_operand:SI 0 "register_operand" "l*r"))]
9130 [(set_attr "conds" "clob")
9131 (set_attr "length" "2")]
9141 if (TARGET_UNIFIED_ASM)
9144 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9145 return \"mov\\tr8, r8\";
9147 [(set (attr "length")
9148 (if_then_else (eq_attr "is_thumb" "yes")
9154 ;; Patterns to allow combination of arithmetic, cond code and shifts
9156 (define_insn "*arith_shiftsi"
9157 [(set (match_operand:SI 0 "s_register_operand" "=r")
9158 (match_operator:SI 1 "shiftable_operator"
9159 [(match_operator:SI 3 "shift_operator"
9160 [(match_operand:SI 4 "s_register_operand" "r")
9161 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9162 (match_operand:SI 2 "s_register_operand" "r")]))]
9164 "%i1%?\\t%0, %2, %4%S3"
9165 [(set_attr "predicable" "yes")
9166 (set_attr "shift" "4")
9167 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9168 (const_string "alu_shift")
9169 (const_string "alu_shift_reg")))]
9173 [(set (match_operand:SI 0 "s_register_operand" "")
9174 (match_operator:SI 1 "shiftable_operator"
9175 [(match_operator:SI 2 "shiftable_operator"
9176 [(match_operator:SI 3 "shift_operator"
9177 [(match_operand:SI 4 "s_register_operand" "")
9178 (match_operand:SI 5 "reg_or_int_operand" "")])
9179 (match_operand:SI 6 "s_register_operand" "")])
9180 (match_operand:SI 7 "arm_rhs_operand" "")]))
9181 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9184 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9187 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9190 (define_insn "*arith_shiftsi_compare0"
9191 [(set (reg:CC_NOOV CC_REGNUM)
9192 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9193 [(match_operator:SI 3 "shift_operator"
9194 [(match_operand:SI 4 "s_register_operand" "r")
9195 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9196 (match_operand:SI 2 "s_register_operand" "r")])
9198 (set (match_operand:SI 0 "s_register_operand" "=r")
9199 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9202 "%i1%.\\t%0, %2, %4%S3"
9203 [(set_attr "conds" "set")
9204 (set_attr "shift" "4")
9205 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9206 (const_string "alu_shift")
9207 (const_string "alu_shift_reg")))]
9210 (define_insn "*arith_shiftsi_compare0_scratch"
9211 [(set (reg:CC_NOOV CC_REGNUM)
9212 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9213 [(match_operator:SI 3 "shift_operator"
9214 [(match_operand:SI 4 "s_register_operand" "r")
9215 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9216 (match_operand:SI 2 "s_register_operand" "r")])
9218 (clobber (match_scratch:SI 0 "=r"))]
9220 "%i1%.\\t%0, %2, %4%S3"
9221 [(set_attr "conds" "set")
9222 (set_attr "shift" "4")
9223 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9224 (const_string "alu_shift")
9225 (const_string "alu_shift_reg")))]
9228 (define_insn "*sub_shiftsi"
9229 [(set (match_operand:SI 0 "s_register_operand" "=r")
9230 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9231 (match_operator:SI 2 "shift_operator"
9232 [(match_operand:SI 3 "s_register_operand" "r")
9233 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9235 "sub%?\\t%0, %1, %3%S2"
9236 [(set_attr "predicable" "yes")
9237 (set_attr "shift" "3")
9238 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9239 (const_string "alu_shift")
9240 (const_string "alu_shift_reg")))]
9243 (define_insn "*sub_shiftsi_compare0"
9244 [(set (reg:CC_NOOV CC_REGNUM)
9246 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9247 (match_operator:SI 2 "shift_operator"
9248 [(match_operand:SI 3 "s_register_operand" "r")
9249 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9251 (set (match_operand:SI 0 "s_register_operand" "=r")
9252 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9255 "sub%.\\t%0, %1, %3%S2"
9256 [(set_attr "conds" "set")
9257 (set_attr "shift" "3")
9258 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9259 (const_string "alu_shift")
9260 (const_string "alu_shift_reg")))]
9263 (define_insn "*sub_shiftsi_compare0_scratch"
9264 [(set (reg:CC_NOOV CC_REGNUM)
9266 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9267 (match_operator:SI 2 "shift_operator"
9268 [(match_operand:SI 3 "s_register_operand" "r")
9269 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9271 (clobber (match_scratch:SI 0 "=r"))]
9273 "sub%.\\t%0, %1, %3%S2"
9274 [(set_attr "conds" "set")
9275 (set_attr "shift" "3")
9276 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9277 (const_string "alu_shift")
9278 (const_string "alu_shift_reg")))]
9283 (define_insn "*and_scc"
9284 [(set (match_operand:SI 0 "s_register_operand" "=r")
9285 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9286 [(match_operand 3 "cc_register" "") (const_int 0)])
9287 (match_operand:SI 2 "s_register_operand" "r")))]
9289 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9290 [(set_attr "conds" "use")
9291 (set_attr "length" "8")]
9294 (define_insn "*ior_scc"
9295 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9296 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9297 [(match_operand 3 "cc_register" "") (const_int 0)])
9298 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9302 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9303 [(set_attr "conds" "use")
9304 (set_attr "length" "4,8")]
9307 (define_insn "*compare_scc"
9308 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9309 (match_operator:SI 1 "arm_comparison_operator"
9310 [(match_operand:SI 2 "s_register_operand" "r,r")
9311 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9312 (clobber (reg:CC CC_REGNUM))]
9315 if (operands[3] == const0_rtx)
9317 if (GET_CODE (operands[1]) == LT)
9318 return \"mov\\t%0, %2, lsr #31\";
9320 if (GET_CODE (operands[1]) == GE)
9321 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9323 if (GET_CODE (operands[1]) == EQ)
9324 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9327 if (GET_CODE (operands[1]) == NE)
9329 if (which_alternative == 1)
9330 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9331 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9333 if (which_alternative == 1)
9334 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9336 output_asm_insn (\"cmp\\t%2, %3\", operands);
9337 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9339 [(set_attr "conds" "clob")
9340 (set_attr "length" "12")]
9343 (define_insn "*cond_move"
9344 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9345 (if_then_else:SI (match_operator 3 "equality_operator"
9346 [(match_operator 4 "arm_comparison_operator"
9347 [(match_operand 5 "cc_register" "") (const_int 0)])
9349 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9350 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9353 if (GET_CODE (operands[3]) == NE)
9355 if (which_alternative != 1)
9356 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9357 if (which_alternative != 0)
9358 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9361 if (which_alternative != 0)
9362 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9363 if (which_alternative != 1)
9364 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9367 [(set_attr "conds" "use")
9368 (set_attr "length" "4,4,8")]
9371 (define_insn "*cond_arith"
9372 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9373 (match_operator:SI 5 "shiftable_operator"
9374 [(match_operator:SI 4 "arm_comparison_operator"
9375 [(match_operand:SI 2 "s_register_operand" "r,r")
9376 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9377 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9378 (clobber (reg:CC CC_REGNUM))]
9381 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9382 return \"%i5\\t%0, %1, %2, lsr #31\";
9384 output_asm_insn (\"cmp\\t%2, %3\", operands);
9385 if (GET_CODE (operands[5]) == AND)
9386 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9387 else if (GET_CODE (operands[5]) == MINUS)
9388 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9389 else if (which_alternative != 0)
9390 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9391 return \"%i5%d4\\t%0, %1, #1\";
9393 [(set_attr "conds" "clob")
9394 (set_attr "length" "12")]
9397 (define_insn "*cond_sub"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9399 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9400 (match_operator:SI 4 "arm_comparison_operator"
9401 [(match_operand:SI 2 "s_register_operand" "r,r")
9402 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9403 (clobber (reg:CC CC_REGNUM))]
9406 output_asm_insn (\"cmp\\t%2, %3\", operands);
9407 if (which_alternative != 0)
9408 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9409 return \"sub%d4\\t%0, %1, #1\";
9411 [(set_attr "conds" "clob")
9412 (set_attr "length" "8,12")]
9415 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9416 (define_insn "*cmp_ite0"
9417 [(set (match_operand 6 "dominant_cc_register" "")
9420 (match_operator 4 "arm_comparison_operator"
9421 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9422 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9423 (match_operator:SI 5 "arm_comparison_operator"
9424 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9425 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9431 static const char * const opcodes[4][2] =
9433 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9434 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9435 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9436 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9437 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9438 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9439 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9440 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9443 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9445 return opcodes[which_alternative][swap];
9447 [(set_attr "conds" "set")
9448 (set_attr "length" "8")]
9451 (define_insn "*cmp_ite1"
9452 [(set (match_operand 6 "dominant_cc_register" "")
9455 (match_operator 4 "arm_comparison_operator"
9456 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9457 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9458 (match_operator:SI 5 "arm_comparison_operator"
9459 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9460 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9466 static const char * const opcodes[4][2] =
9468 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9469 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9470 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9471 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9472 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9473 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9474 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9475 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9478 comparison_dominates_p (GET_CODE (operands[5]),
9479 reverse_condition (GET_CODE (operands[4])));
9481 return opcodes[which_alternative][swap];
9483 [(set_attr "conds" "set")
9484 (set_attr "length" "8")]
9487 (define_insn "*cmp_and"
9488 [(set (match_operand 6 "dominant_cc_register" "")
9491 (match_operator 4 "arm_comparison_operator"
9492 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9493 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9494 (match_operator:SI 5 "arm_comparison_operator"
9495 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9496 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9501 static const char *const opcodes[4][2] =
9503 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9504 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9505 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9506 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9507 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9508 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9509 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9510 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9513 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9515 return opcodes[which_alternative][swap];
9517 [(set_attr "conds" "set")
9518 (set_attr "predicable" "no")
9519 (set_attr "length" "8")]
9522 (define_insn "*cmp_ior"
9523 [(set (match_operand 6 "dominant_cc_register" "")
9526 (match_operator 4 "arm_comparison_operator"
9527 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9528 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9529 (match_operator:SI 5 "arm_comparison_operator"
9530 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9531 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9536 static const char *const opcodes[4][2] =
9538 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9539 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9540 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9541 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9542 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9543 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9544 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9545 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9548 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9550 return opcodes[which_alternative][swap];
9553 [(set_attr "conds" "set")
9554 (set_attr "length" "8")]
9557 (define_insn_and_split "*ior_scc_scc"
9558 [(set (match_operand:SI 0 "s_register_operand" "=r")
9559 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9560 [(match_operand:SI 1 "s_register_operand" "r")
9561 (match_operand:SI 2 "arm_add_operand" "rIL")])
9562 (match_operator:SI 6 "arm_comparison_operator"
9563 [(match_operand:SI 4 "s_register_operand" "r")
9564 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9565 (clobber (reg:CC CC_REGNUM))]
9567 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9570 "TARGET_ARM && reload_completed"
9574 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9575 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9577 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9579 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9582 [(set_attr "conds" "clob")
9583 (set_attr "length" "16")])
9585 ; If the above pattern is followed by a CMP insn, then the compare is
9586 ; redundant, since we can rework the conditional instruction that follows.
9587 (define_insn_and_split "*ior_scc_scc_cmp"
9588 [(set (match_operand 0 "dominant_cc_register" "")
9589 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9590 [(match_operand:SI 1 "s_register_operand" "r")
9591 (match_operand:SI 2 "arm_add_operand" "rIL")])
9592 (match_operator:SI 6 "arm_comparison_operator"
9593 [(match_operand:SI 4 "s_register_operand" "r")
9594 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9596 (set (match_operand:SI 7 "s_register_operand" "=r")
9597 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9598 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9601 "TARGET_ARM && reload_completed"
9605 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9606 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9608 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9610 [(set_attr "conds" "set")
9611 (set_attr "length" "16")])
9613 (define_insn_and_split "*and_scc_scc"
9614 [(set (match_operand:SI 0 "s_register_operand" "=r")
9615 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9616 [(match_operand:SI 1 "s_register_operand" "r")
9617 (match_operand:SI 2 "arm_add_operand" "rIL")])
9618 (match_operator:SI 6 "arm_comparison_operator"
9619 [(match_operand:SI 4 "s_register_operand" "r")
9620 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9621 (clobber (reg:CC CC_REGNUM))]
9623 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9626 "TARGET_ARM && reload_completed
9627 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9632 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9633 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9635 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9637 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9640 [(set_attr "conds" "clob")
9641 (set_attr "length" "16")])
9643 ; If the above pattern is followed by a CMP insn, then the compare is
9644 ; redundant, since we can rework the conditional instruction that follows.
9645 (define_insn_and_split "*and_scc_scc_cmp"
9646 [(set (match_operand 0 "dominant_cc_register" "")
9647 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9648 [(match_operand:SI 1 "s_register_operand" "r")
9649 (match_operand:SI 2 "arm_add_operand" "rIL")])
9650 (match_operator:SI 6 "arm_comparison_operator"
9651 [(match_operand:SI 4 "s_register_operand" "r")
9652 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9654 (set (match_operand:SI 7 "s_register_operand" "=r")
9655 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9656 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9659 "TARGET_ARM && reload_completed"
9663 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9664 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9666 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9668 [(set_attr "conds" "set")
9669 (set_attr "length" "16")])
9671 ;; If there is no dominance in the comparison, then we can still save an
9672 ;; instruction in the AND case, since we can know that the second compare
9673 ;; need only zero the value if false (if true, then the value is already
9675 (define_insn_and_split "*and_scc_scc_nodom"
9676 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9677 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9678 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9679 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9680 (match_operator:SI 6 "arm_comparison_operator"
9681 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9682 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9683 (clobber (reg:CC CC_REGNUM))]
9685 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9688 "TARGET_ARM && reload_completed"
9689 [(parallel [(set (match_dup 0)
9690 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9691 (clobber (reg:CC CC_REGNUM))])
9692 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9694 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9697 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9698 operands[4], operands[5]),
9700 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9702 [(set_attr "conds" "clob")
9703 (set_attr "length" "20")])
9706 [(set (reg:CC_NOOV CC_REGNUM)
9707 (compare:CC_NOOV (ior:SI
9708 (and:SI (match_operand:SI 0 "s_register_operand" "")
9710 (match_operator:SI 1 "arm_comparison_operator"
9711 [(match_operand:SI 2 "s_register_operand" "")
9712 (match_operand:SI 3 "arm_add_operand" "")]))
9714 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9717 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9719 (set (reg:CC_NOOV CC_REGNUM)
9720 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9725 [(set (reg:CC_NOOV CC_REGNUM)
9726 (compare:CC_NOOV (ior:SI
9727 (match_operator:SI 1 "arm_comparison_operator"
9728 [(match_operand:SI 2 "s_register_operand" "")
9729 (match_operand:SI 3 "arm_add_operand" "")])
9730 (and:SI (match_operand:SI 0 "s_register_operand" "")
9733 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9736 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9738 (set (reg:CC_NOOV CC_REGNUM)
9739 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9742 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9744 (define_insn "*negscc"
9745 [(set (match_operand:SI 0 "s_register_operand" "=r")
9746 (neg:SI (match_operator 3 "arm_comparison_operator"
9747 [(match_operand:SI 1 "s_register_operand" "r")
9748 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9749 (clobber (reg:CC CC_REGNUM))]
9752 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9753 return \"mov\\t%0, %1, asr #31\";
9755 if (GET_CODE (operands[3]) == NE)
9756 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9758 output_asm_insn (\"cmp\\t%1, %2\", operands);
9759 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9760 return \"mvn%d3\\t%0, #0\";
9762 [(set_attr "conds" "clob")
9763 (set_attr "length" "12")]
9766 (define_insn "movcond"
9767 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9769 (match_operator 5 "arm_comparison_operator"
9770 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9771 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9772 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9773 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9774 (clobber (reg:CC CC_REGNUM))]
9777 if (GET_CODE (operands[5]) == LT
9778 && (operands[4] == const0_rtx))
9780 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9782 if (operands[2] == const0_rtx)
9783 return \"and\\t%0, %1, %3, asr #31\";
9784 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9786 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9788 if (operands[1] == const0_rtx)
9789 return \"bic\\t%0, %2, %3, asr #31\";
9790 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9792 /* The only case that falls through to here is when both ops 1 & 2
9796 if (GET_CODE (operands[5]) == GE
9797 && (operands[4] == const0_rtx))
9799 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9801 if (operands[2] == const0_rtx)
9802 return \"bic\\t%0, %1, %3, asr #31\";
9803 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9805 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9807 if (operands[1] == const0_rtx)
9808 return \"and\\t%0, %2, %3, asr #31\";
9809 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9811 /* The only case that falls through to here is when both ops 1 & 2
9814 if (GET_CODE (operands[4]) == CONST_INT
9815 && !const_ok_for_arm (INTVAL (operands[4])))
9816 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9818 output_asm_insn (\"cmp\\t%3, %4\", operands);
9819 if (which_alternative != 0)
9820 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9821 if (which_alternative != 1)
9822 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9825 [(set_attr "conds" "clob")
9826 (set_attr "length" "8,8,12")]
9829 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9831 (define_insn "*ifcompare_plus_move"
9832 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9833 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9834 [(match_operand:SI 4 "s_register_operand" "r,r")
9835 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9837 (match_operand:SI 2 "s_register_operand" "r,r")
9838 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9839 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9840 (clobber (reg:CC CC_REGNUM))]
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "8,12")]
9847 (define_insn "*if_plus_move"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9850 (match_operator 4 "arm_comparison_operator"
9851 [(match_operand 5 "cc_register" "") (const_int 0)])
9853 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9854 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9855 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9859 sub%d4\\t%0, %2, #%n3
9860 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9861 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9862 [(set_attr "conds" "use")
9863 (set_attr "length" "4,4,8,8")
9864 (set_attr "type" "*,*,*,*")]
9867 (define_insn "*ifcompare_move_plus"
9868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9869 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9870 [(match_operand:SI 4 "s_register_operand" "r,r")
9871 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9872 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9874 (match_operand:SI 2 "s_register_operand" "r,r")
9875 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9876 (clobber (reg:CC CC_REGNUM))]
9879 [(set_attr "conds" "clob")
9880 (set_attr "length" "8,12")]
9883 (define_insn "*if_move_plus"
9884 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9886 (match_operator 4 "arm_comparison_operator"
9887 [(match_operand 5 "cc_register" "") (const_int 0)])
9888 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9890 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9891 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9895 sub%D4\\t%0, %2, #%n3
9896 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9897 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9898 [(set_attr "conds" "use")
9899 (set_attr "length" "4,4,8,8")
9900 (set_attr "type" "*,*,*,*")]
9903 (define_insn "*ifcompare_arith_arith"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r")
9905 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9906 [(match_operand:SI 5 "s_register_operand" "r")
9907 (match_operand:SI 6 "arm_add_operand" "rIL")])
9908 (match_operator:SI 8 "shiftable_operator"
9909 [(match_operand:SI 1 "s_register_operand" "r")
9910 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9911 (match_operator:SI 7 "shiftable_operator"
9912 [(match_operand:SI 3 "s_register_operand" "r")
9913 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9914 (clobber (reg:CC CC_REGNUM))]
9917 [(set_attr "conds" "clob")
9918 (set_attr "length" "12")]
9921 (define_insn "*if_arith_arith"
9922 [(set (match_operand:SI 0 "s_register_operand" "=r")
9923 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9924 [(match_operand 8 "cc_register" "") (const_int 0)])
9925 (match_operator:SI 6 "shiftable_operator"
9926 [(match_operand:SI 1 "s_register_operand" "r")
9927 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9928 (match_operator:SI 7 "shiftable_operator"
9929 [(match_operand:SI 3 "s_register_operand" "r")
9930 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9932 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9933 [(set_attr "conds" "use")
9934 (set_attr "length" "8")]
9937 (define_insn "*ifcompare_arith_move"
9938 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9939 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9940 [(match_operand:SI 2 "s_register_operand" "r,r")
9941 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9942 (match_operator:SI 7 "shiftable_operator"
9943 [(match_operand:SI 4 "s_register_operand" "r,r")
9944 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9945 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9946 (clobber (reg:CC CC_REGNUM))]
9949 /* If we have an operation where (op x 0) is the identity operation and
9950 the conditional operator is LT or GE and we are comparing against zero and
9951 everything is in registers then we can do this in two instructions. */
9952 if (operands[3] == const0_rtx
9953 && GET_CODE (operands[7]) != AND
9954 && GET_CODE (operands[5]) == REG
9955 && GET_CODE (operands[1]) == REG
9956 && REGNO (operands[1]) == REGNO (operands[4])
9957 && REGNO (operands[4]) != REGNO (operands[0]))
9959 if (GET_CODE (operands[6]) == LT)
9960 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9961 else if (GET_CODE (operands[6]) == GE)
9962 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9964 if (GET_CODE (operands[3]) == CONST_INT
9965 && !const_ok_for_arm (INTVAL (operands[3])))
9966 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9968 output_asm_insn (\"cmp\\t%2, %3\", operands);
9969 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9970 if (which_alternative != 0)
9971 return \"mov%D6\\t%0, %1\";
9974 [(set_attr "conds" "clob")
9975 (set_attr "length" "8,12")]
9978 (define_insn "*if_arith_move"
9979 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9980 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9981 [(match_operand 6 "cc_register" "") (const_int 0)])
9982 (match_operator:SI 5 "shiftable_operator"
9983 [(match_operand:SI 2 "s_register_operand" "r,r")
9984 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9985 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9989 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9990 [(set_attr "conds" "use")
9991 (set_attr "length" "4,8")
9992 (set_attr "type" "*,*")]
9995 (define_insn "*ifcompare_move_arith"
9996 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9997 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9998 [(match_operand:SI 4 "s_register_operand" "r,r")
9999 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10000 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10001 (match_operator:SI 7 "shiftable_operator"
10002 [(match_operand:SI 2 "s_register_operand" "r,r")
10003 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10004 (clobber (reg:CC CC_REGNUM))]
10007 /* If we have an operation where (op x 0) is the identity operation and
10008 the conditional operator is LT or GE and we are comparing against zero and
10009 everything is in registers then we can do this in two instructions */
10010 if (operands[5] == const0_rtx
10011 && GET_CODE (operands[7]) != AND
10012 && GET_CODE (operands[3]) == REG
10013 && GET_CODE (operands[1]) == REG
10014 && REGNO (operands[1]) == REGNO (operands[2])
10015 && REGNO (operands[2]) != REGNO (operands[0]))
10017 if (GET_CODE (operands[6]) == GE)
10018 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10019 else if (GET_CODE (operands[6]) == LT)
10020 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10023 if (GET_CODE (operands[5]) == CONST_INT
10024 && !const_ok_for_arm (INTVAL (operands[5])))
10025 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10027 output_asm_insn (\"cmp\\t%4, %5\", operands);
10029 if (which_alternative != 0)
10030 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10031 return \"%I7%D6\\t%0, %2, %3\";
10033 [(set_attr "conds" "clob")
10034 (set_attr "length" "8,12")]
10037 (define_insn "*if_move_arith"
10038 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10040 (match_operator 4 "arm_comparison_operator"
10041 [(match_operand 6 "cc_register" "") (const_int 0)])
10042 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10043 (match_operator:SI 5 "shiftable_operator"
10044 [(match_operand:SI 2 "s_register_operand" "r,r")
10045 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10048 %I5%D4\\t%0, %2, %3
10049 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10050 [(set_attr "conds" "use")
10051 (set_attr "length" "4,8")
10052 (set_attr "type" "*,*")]
10055 (define_insn "*ifcompare_move_not"
10056 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10058 (match_operator 5 "arm_comparison_operator"
10059 [(match_operand:SI 3 "s_register_operand" "r,r")
10060 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10061 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10063 (match_operand:SI 2 "s_register_operand" "r,r"))))
10064 (clobber (reg:CC CC_REGNUM))]
10067 [(set_attr "conds" "clob")
10068 (set_attr "length" "8,12")]
10071 (define_insn "*if_move_not"
10072 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10074 (match_operator 4 "arm_comparison_operator"
10075 [(match_operand 3 "cc_register" "") (const_int 0)])
10076 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10077 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10081 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10082 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10083 [(set_attr "conds" "use")
10084 (set_attr "length" "4,8,8")]
10087 (define_insn "*ifcompare_not_move"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (match_operator 5 "arm_comparison_operator"
10091 [(match_operand:SI 3 "s_register_operand" "r,r")
10092 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10094 (match_operand:SI 2 "s_register_operand" "r,r"))
10095 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10096 (clobber (reg:CC CC_REGNUM))]
10099 [(set_attr "conds" "clob")
10100 (set_attr "length" "8,12")]
10103 (define_insn "*if_not_move"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10106 (match_operator 4 "arm_comparison_operator"
10107 [(match_operand 3 "cc_register" "") (const_int 0)])
10108 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10113 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10114 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10115 [(set_attr "conds" "use")
10116 (set_attr "length" "4,8,8")]
10119 (define_insn "*ifcompare_shift_move"
10120 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10122 (match_operator 6 "arm_comparison_operator"
10123 [(match_operand:SI 4 "s_register_operand" "r,r")
10124 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10125 (match_operator:SI 7 "shift_operator"
10126 [(match_operand:SI 2 "s_register_operand" "r,r")
10127 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10128 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10129 (clobber (reg:CC CC_REGNUM))]
10132 [(set_attr "conds" "clob")
10133 (set_attr "length" "8,12")]
10136 (define_insn "*if_shift_move"
10137 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10139 (match_operator 5 "arm_comparison_operator"
10140 [(match_operand 6 "cc_register" "") (const_int 0)])
10141 (match_operator:SI 4 "shift_operator"
10142 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10143 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10144 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10148 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10149 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10150 [(set_attr "conds" "use")
10151 (set_attr "shift" "2")
10152 (set_attr "length" "4,8,8")
10153 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10154 (const_string "alu_shift")
10155 (const_string "alu_shift_reg")))]
10158 (define_insn "*ifcompare_move_shift"
10159 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10161 (match_operator 6 "arm_comparison_operator"
10162 [(match_operand:SI 4 "s_register_operand" "r,r")
10163 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10164 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10165 (match_operator:SI 7 "shift_operator"
10166 [(match_operand:SI 2 "s_register_operand" "r,r")
10167 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10168 (clobber (reg:CC CC_REGNUM))]
10171 [(set_attr "conds" "clob")
10172 (set_attr "length" "8,12")]
10175 (define_insn "*if_move_shift"
10176 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10178 (match_operator 5 "arm_comparison_operator"
10179 [(match_operand 6 "cc_register" "") (const_int 0)])
10180 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10181 (match_operator:SI 4 "shift_operator"
10182 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10183 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10187 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10188 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10189 [(set_attr "conds" "use")
10190 (set_attr "shift" "2")
10191 (set_attr "length" "4,8,8")
10192 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10193 (const_string "alu_shift")
10194 (const_string "alu_shift_reg")))]
10197 (define_insn "*ifcompare_shift_shift"
10198 [(set (match_operand:SI 0 "s_register_operand" "=r")
10200 (match_operator 7 "arm_comparison_operator"
10201 [(match_operand:SI 5 "s_register_operand" "r")
10202 (match_operand:SI 6 "arm_add_operand" "rIL")])
10203 (match_operator:SI 8 "shift_operator"
10204 [(match_operand:SI 1 "s_register_operand" "r")
10205 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10206 (match_operator:SI 9 "shift_operator"
10207 [(match_operand:SI 3 "s_register_operand" "r")
10208 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10209 (clobber (reg:CC CC_REGNUM))]
10212 [(set_attr "conds" "clob")
10213 (set_attr "length" "12")]
10216 (define_insn "*if_shift_shift"
10217 [(set (match_operand:SI 0 "s_register_operand" "=r")
10219 (match_operator 5 "arm_comparison_operator"
10220 [(match_operand 8 "cc_register" "") (const_int 0)])
10221 (match_operator:SI 6 "shift_operator"
10222 [(match_operand:SI 1 "s_register_operand" "r")
10223 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10224 (match_operator:SI 7 "shift_operator"
10225 [(match_operand:SI 3 "s_register_operand" "r")
10226 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10228 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10229 [(set_attr "conds" "use")
10230 (set_attr "shift" "1")
10231 (set_attr "length" "8")
10232 (set (attr "type") (if_then_else
10233 (and (match_operand 2 "const_int_operand" "")
10234 (match_operand 4 "const_int_operand" ""))
10235 (const_string "alu_shift")
10236 (const_string "alu_shift_reg")))]
10239 (define_insn "*ifcompare_not_arith"
10240 [(set (match_operand:SI 0 "s_register_operand" "=r")
10242 (match_operator 6 "arm_comparison_operator"
10243 [(match_operand:SI 4 "s_register_operand" "r")
10244 (match_operand:SI 5 "arm_add_operand" "rIL")])
10245 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10246 (match_operator:SI 7 "shiftable_operator"
10247 [(match_operand:SI 2 "s_register_operand" "r")
10248 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10249 (clobber (reg:CC CC_REGNUM))]
10252 [(set_attr "conds" "clob")
10253 (set_attr "length" "12")]
10256 (define_insn "*if_not_arith"
10257 [(set (match_operand:SI 0 "s_register_operand" "=r")
10259 (match_operator 5 "arm_comparison_operator"
10260 [(match_operand 4 "cc_register" "") (const_int 0)])
10261 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10262 (match_operator:SI 6 "shiftable_operator"
10263 [(match_operand:SI 2 "s_register_operand" "r")
10264 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10266 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10267 [(set_attr "conds" "use")
10268 (set_attr "length" "8")]
10271 (define_insn "*ifcompare_arith_not"
10272 [(set (match_operand:SI 0 "s_register_operand" "=r")
10274 (match_operator 6 "arm_comparison_operator"
10275 [(match_operand:SI 4 "s_register_operand" "r")
10276 (match_operand:SI 5 "arm_add_operand" "rIL")])
10277 (match_operator:SI 7 "shiftable_operator"
10278 [(match_operand:SI 2 "s_register_operand" "r")
10279 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10280 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10281 (clobber (reg:CC CC_REGNUM))]
10284 [(set_attr "conds" "clob")
10285 (set_attr "length" "12")]
10288 (define_insn "*if_arith_not"
10289 [(set (match_operand:SI 0 "s_register_operand" "=r")
10291 (match_operator 5 "arm_comparison_operator"
10292 [(match_operand 4 "cc_register" "") (const_int 0)])
10293 (match_operator:SI 6 "shiftable_operator"
10294 [(match_operand:SI 2 "s_register_operand" "r")
10295 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10296 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10298 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10299 [(set_attr "conds" "use")
10300 (set_attr "length" "8")]
10303 (define_insn "*ifcompare_neg_move"
10304 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10306 (match_operator 5 "arm_comparison_operator"
10307 [(match_operand:SI 3 "s_register_operand" "r,r")
10308 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10309 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10310 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10311 (clobber (reg:CC CC_REGNUM))]
10314 [(set_attr "conds" "clob")
10315 (set_attr "length" "8,12")]
10318 (define_insn "*if_neg_move"
10319 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10321 (match_operator 4 "arm_comparison_operator"
10322 [(match_operand 3 "cc_register" "") (const_int 0)])
10323 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10324 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10327 rsb%d4\\t%0, %2, #0
10328 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10329 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10330 [(set_attr "conds" "use")
10331 (set_attr "length" "4,8,8")]
10334 (define_insn "*ifcompare_move_neg"
10335 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10337 (match_operator 5 "arm_comparison_operator"
10338 [(match_operand:SI 3 "s_register_operand" "r,r")
10339 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10340 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10341 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10342 (clobber (reg:CC CC_REGNUM))]
10345 [(set_attr "conds" "clob")
10346 (set_attr "length" "8,12")]
10349 (define_insn "*if_move_neg"
10350 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10352 (match_operator 4 "arm_comparison_operator"
10353 [(match_operand 3 "cc_register" "") (const_int 0)])
10354 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10355 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10358 rsb%D4\\t%0, %2, #0
10359 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10360 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10361 [(set_attr "conds" "use")
10362 (set_attr "length" "4,8,8")]
10365 (define_insn "*arith_adjacentmem"
10366 [(set (match_operand:SI 0 "s_register_operand" "=r")
10367 (match_operator:SI 1 "shiftable_operator"
10368 [(match_operand:SI 2 "memory_operand" "m")
10369 (match_operand:SI 3 "memory_operand" "m")]))
10370 (clobber (match_scratch:SI 4 "=r"))]
10371 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10377 HOST_WIDE_INT val1 = 0, val2 = 0;
10379 if (REGNO (operands[0]) > REGNO (operands[4]))
10381 ldm[1] = operands[4];
10382 ldm[2] = operands[0];
10386 ldm[1] = operands[0];
10387 ldm[2] = operands[4];
10390 base_reg = XEXP (operands[2], 0);
10392 if (!REG_P (base_reg))
10394 val1 = INTVAL (XEXP (base_reg, 1));
10395 base_reg = XEXP (base_reg, 0);
10398 if (!REG_P (XEXP (operands[3], 0)))
10399 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10401 arith[0] = operands[0];
10402 arith[3] = operands[1];
10416 if (val1 !=0 && val2 != 0)
10420 if (val1 == 4 || val2 == 4)
10421 /* Other val must be 8, since we know they are adjacent and neither
10423 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10424 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10426 ldm[0] = ops[0] = operands[4];
10428 ops[2] = GEN_INT (val1);
10429 output_add_immediate (ops);
10431 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10433 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10437 /* Offset is out of range for a single add, so use two ldr. */
10440 ops[2] = GEN_INT (val1);
10441 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10443 ops[2] = GEN_INT (val2);
10444 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10447 else if (val1 != 0)
10450 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10452 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10457 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10459 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10461 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10464 [(set_attr "length" "12")
10465 (set_attr "predicable" "yes")
10466 (set_attr "type" "load1")]
10469 ; This pattern is never tried by combine, so do it as a peephole
10472 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10473 (match_operand:SI 1 "arm_general_register_operand" ""))
10474 (set (reg:CC CC_REGNUM)
10475 (compare:CC (match_dup 1) (const_int 0)))]
10477 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10478 (set (match_dup 0) (match_dup 1))])]
10482 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10483 ; reversed, check that the memory references aren't volatile.
10486 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10487 (match_operand:SI 4 "memory_operand" "m"))
10488 (set (match_operand:SI 1 "s_register_operand" "=rk")
10489 (match_operand:SI 5 "memory_operand" "m"))
10490 (set (match_operand:SI 2 "s_register_operand" "=rk")
10491 (match_operand:SI 6 "memory_operand" "m"))
10492 (set (match_operand:SI 3 "s_register_operand" "=rk")
10493 (match_operand:SI 7 "memory_operand" "m"))]
10494 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10496 return emit_ldm_seq (operands, 4);
10501 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10502 (match_operand:SI 3 "memory_operand" "m"))
10503 (set (match_operand:SI 1 "s_register_operand" "=rk")
10504 (match_operand:SI 4 "memory_operand" "m"))
10505 (set (match_operand:SI 2 "s_register_operand" "=rk")
10506 (match_operand:SI 5 "memory_operand" "m"))]
10507 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10509 return emit_ldm_seq (operands, 3);
10514 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10515 (match_operand:SI 2 "memory_operand" "m"))
10516 (set (match_operand:SI 1 "s_register_operand" "=rk")
10517 (match_operand:SI 3 "memory_operand" "m"))]
10518 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10520 return emit_ldm_seq (operands, 2);
10525 [(set (match_operand:SI 4 "memory_operand" "=m")
10526 (match_operand:SI 0 "s_register_operand" "rk"))
10527 (set (match_operand:SI 5 "memory_operand" "=m")
10528 (match_operand:SI 1 "s_register_operand" "rk"))
10529 (set (match_operand:SI 6 "memory_operand" "=m")
10530 (match_operand:SI 2 "s_register_operand" "rk"))
10531 (set (match_operand:SI 7 "memory_operand" "=m")
10532 (match_operand:SI 3 "s_register_operand" "rk"))]
10533 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10535 return emit_stm_seq (operands, 4);
10540 [(set (match_operand:SI 3 "memory_operand" "=m")
10541 (match_operand:SI 0 "s_register_operand" "rk"))
10542 (set (match_operand:SI 4 "memory_operand" "=m")
10543 (match_operand:SI 1 "s_register_operand" "rk"))
10544 (set (match_operand:SI 5 "memory_operand" "=m")
10545 (match_operand:SI 2 "s_register_operand" "rk"))]
10546 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10548 return emit_stm_seq (operands, 3);
10553 [(set (match_operand:SI 2 "memory_operand" "=m")
10554 (match_operand:SI 0 "s_register_operand" "rk"))
10555 (set (match_operand:SI 3 "memory_operand" "=m")
10556 (match_operand:SI 1 "s_register_operand" "rk"))]
10557 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10559 return emit_stm_seq (operands, 2);
10564 [(set (match_operand:SI 0 "s_register_operand" "")
10565 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10567 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10568 [(match_operand:SI 3 "s_register_operand" "")
10569 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10570 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10572 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10573 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10578 ;; This split can be used because CC_Z mode implies that the following
10579 ;; branch will be an equality, or an unsigned inequality, so the sign
10580 ;; extension is not needed.
10583 [(set (reg:CC_Z CC_REGNUM)
10585 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10587 (match_operand 1 "const_int_operand" "")))
10588 (clobber (match_scratch:SI 2 ""))]
10590 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10591 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10592 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10593 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10595 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10598 ;; ??? Check the patterns above for Thumb-2 usefulness
10600 (define_expand "prologue"
10601 [(clobber (const_int 0))]
10604 arm_expand_prologue ();
10606 thumb1_expand_prologue ();
10611 (define_expand "epilogue"
10612 [(clobber (const_int 0))]
10615 if (crtl->calls_eh_return)
10616 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10618 thumb1_expand_epilogue ();
10619 else if (USE_RETURN_INSN (FALSE))
10621 emit_jump_insn (gen_return ());
10624 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10626 gen_rtx_RETURN (VOIDmode)),
10627 VUNSPEC_EPILOGUE));
10632 ;; Note - although unspec_volatile's USE all hard registers,
10633 ;; USEs are ignored after relaod has completed. Thus we need
10634 ;; to add an unspec of the link register to ensure that flow
10635 ;; does not think that it is unused by the sibcall branch that
10636 ;; will replace the standard function epilogue.
10637 (define_insn "sibcall_epilogue"
10638 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10639 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10642 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10643 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10644 return arm_output_epilogue (next_nonnote_insn (insn));
10646 ;; Length is absolute worst case
10647 [(set_attr "length" "44")
10648 (set_attr "type" "block")
10649 ;; We don't clobber the conditions, but the potential length of this
10650 ;; operation is sufficient to make conditionalizing the sequence
10651 ;; unlikely to be profitable.
10652 (set_attr "conds" "clob")]
10655 (define_insn "*epilogue_insns"
10656 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10660 return arm_output_epilogue (NULL);
10661 else /* TARGET_THUMB1 */
10662 return thumb_unexpanded_epilogue ();
10664 ; Length is absolute worst case
10665 [(set_attr "length" "44")
10666 (set_attr "type" "block")
10667 ;; We don't clobber the conditions, but the potential length of this
10668 ;; operation is sufficient to make conditionalizing the sequence
10669 ;; unlikely to be profitable.
10670 (set_attr "conds" "clob")]
10673 (define_expand "eh_epilogue"
10674 [(use (match_operand:SI 0 "register_operand" ""))
10675 (use (match_operand:SI 1 "register_operand" ""))
10676 (use (match_operand:SI 2 "register_operand" ""))]
10680 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10681 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10683 rtx ra = gen_rtx_REG (Pmode, 2);
10685 emit_move_insn (ra, operands[2]);
10688 /* This is a hack -- we may have crystalized the function type too
10690 cfun->machine->func_type = 0;
10694 ;; This split is only used during output to reduce the number of patterns
10695 ;; that need assembler instructions adding to them. We allowed the setting
10696 ;; of the conditions to be implicit during rtl generation so that
10697 ;; the conditional compare patterns would work. However this conflicts to
10698 ;; some extent with the conditional data operations, so we have to split them
10701 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10702 ;; conditional execution sufficient?
10705 [(set (match_operand:SI 0 "s_register_operand" "")
10706 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10707 [(match_operand 2 "" "") (match_operand 3 "" "")])
10709 (match_operand 4 "" "")))
10710 (clobber (reg:CC CC_REGNUM))]
10711 "TARGET_ARM && reload_completed"
10712 [(set (match_dup 5) (match_dup 6))
10713 (cond_exec (match_dup 7)
10714 (set (match_dup 0) (match_dup 4)))]
10717 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10718 operands[2], operands[3]);
10719 enum rtx_code rc = GET_CODE (operands[1]);
10721 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10722 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10723 if (mode == CCFPmode || mode == CCFPEmode)
10724 rc = reverse_condition_maybe_unordered (rc);
10726 rc = reverse_condition (rc);
10728 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10733 [(set (match_operand:SI 0 "s_register_operand" "")
10734 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10735 [(match_operand 2 "" "") (match_operand 3 "" "")])
10736 (match_operand 4 "" "")
10738 (clobber (reg:CC CC_REGNUM))]
10739 "TARGET_ARM && reload_completed"
10740 [(set (match_dup 5) (match_dup 6))
10741 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10742 (set (match_dup 0) (match_dup 4)))]
10745 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10746 operands[2], operands[3]);
10748 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10749 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10754 [(set (match_operand:SI 0 "s_register_operand" "")
10755 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10756 [(match_operand 2 "" "") (match_operand 3 "" "")])
10757 (match_operand 4 "" "")
10758 (match_operand 5 "" "")))
10759 (clobber (reg:CC CC_REGNUM))]
10760 "TARGET_ARM && reload_completed"
10761 [(set (match_dup 6) (match_dup 7))
10762 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10763 (set (match_dup 0) (match_dup 4)))
10764 (cond_exec (match_dup 8)
10765 (set (match_dup 0) (match_dup 5)))]
10768 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10769 operands[2], operands[3]);
10770 enum rtx_code rc = GET_CODE (operands[1]);
10772 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10773 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10774 if (mode == CCFPmode || mode == CCFPEmode)
10775 rc = reverse_condition_maybe_unordered (rc);
10777 rc = reverse_condition (rc);
10779 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10784 [(set (match_operand:SI 0 "s_register_operand" "")
10785 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10786 [(match_operand:SI 2 "s_register_operand" "")
10787 (match_operand:SI 3 "arm_add_operand" "")])
10788 (match_operand:SI 4 "arm_rhs_operand" "")
10790 (match_operand:SI 5 "s_register_operand" ""))))
10791 (clobber (reg:CC CC_REGNUM))]
10792 "TARGET_ARM && reload_completed"
10793 [(set (match_dup 6) (match_dup 7))
10794 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10795 (set (match_dup 0) (match_dup 4)))
10796 (cond_exec (match_dup 8)
10797 (set (match_dup 0) (not:SI (match_dup 5))))]
10800 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10801 operands[2], operands[3]);
10802 enum rtx_code rc = GET_CODE (operands[1]);
10804 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10805 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10806 if (mode == CCFPmode || mode == CCFPEmode)
10807 rc = reverse_condition_maybe_unordered (rc);
10809 rc = reverse_condition (rc);
10811 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10815 (define_insn "*cond_move_not"
10816 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10817 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10818 [(match_operand 3 "cc_register" "") (const_int 0)])
10819 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10821 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10825 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10826 [(set_attr "conds" "use")
10827 (set_attr "length" "4,8")]
10830 ;; The next two patterns occur when an AND operation is followed by a
10831 ;; scc insn sequence
10833 (define_insn "*sign_extract_onebit"
10834 [(set (match_operand:SI 0 "s_register_operand" "=r")
10835 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10837 (match_operand:SI 2 "const_int_operand" "n")))
10838 (clobber (reg:CC CC_REGNUM))]
10841 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10842 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10843 return \"mvnne\\t%0, #0\";
10845 [(set_attr "conds" "clob")
10846 (set_attr "length" "8")]
10849 (define_insn "*not_signextract_onebit"
10850 [(set (match_operand:SI 0 "s_register_operand" "=r")
10852 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10854 (match_operand:SI 2 "const_int_operand" "n"))))
10855 (clobber (reg:CC CC_REGNUM))]
10858 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10859 output_asm_insn (\"tst\\t%1, %2\", operands);
10860 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10861 return \"movne\\t%0, #0\";
10863 [(set_attr "conds" "clob")
10864 (set_attr "length" "12")]
10866 ;; ??? The above patterns need auditing for Thumb-2
10868 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10869 ;; expressions. For simplicity, the first register is also in the unspec
10871 (define_insn "*push_multi"
10872 [(match_parallel 2 "multi_register_push"
10873 [(set (match_operand:BLK 0 "memory_operand" "=m")
10874 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10875 UNSPEC_PUSH_MULT))])]
10879 int num_saves = XVECLEN (operands[2], 0);
10881 /* For the StrongARM at least it is faster to
10882 use STR to store only a single register.
10883 In Thumb mode always use push, and the assembler will pick
10884 something appropriate. */
10885 if (num_saves == 1 && TARGET_ARM)
10886 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10893 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10895 strcpy (pattern, \"push\\t{%1\");
10897 for (i = 1; i < num_saves; i++)
10899 strcat (pattern, \", %|\");
10901 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10904 strcat (pattern, \"}\");
10905 output_asm_insn (pattern, operands);
10910 [(set_attr "type" "store4")]
10913 (define_insn "stack_tie"
10914 [(set (mem:BLK (scratch))
10915 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10916 (match_operand:SI 1 "s_register_operand" "rk")]
10920 [(set_attr "length" "0")]
10923 ;; Similarly for the floating point registers
10924 (define_insn "*push_fp_multi"
10925 [(match_parallel 2 "multi_register_push"
10926 [(set (match_operand:BLK 0 "memory_operand" "=m")
10927 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10928 UNSPEC_PUSH_MULT))])]
10929 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10934 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10935 output_asm_insn (pattern, operands);
10938 [(set_attr "type" "f_store")]
10941 ;; Special patterns for dealing with the constant pool
10943 (define_insn "align_4"
10944 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10947 assemble_align (32);
10952 (define_insn "align_8"
10953 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10956 assemble_align (64);
10961 (define_insn "consttable_end"
10962 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10965 making_const_table = FALSE;
10970 (define_insn "consttable_1"
10971 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10974 making_const_table = TRUE;
10975 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10976 assemble_zeros (3);
10979 [(set_attr "length" "4")]
10982 (define_insn "consttable_2"
10983 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10986 making_const_table = TRUE;
10987 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10988 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10989 assemble_zeros (2);
10992 [(set_attr "length" "4")]
10995 (define_insn "consttable_4"
10996 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11000 rtx x = operands[0];
11001 making_const_table = TRUE;
11002 switch (GET_MODE_CLASS (GET_MODE (x)))
11005 if (GET_MODE (x) == HFmode)
11006 arm_emit_fp16_const (x);
11010 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
11011 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
11015 /* XXX: Sometimes gcc does something really dumb and ends up with
11016 a HIGH in a constant pool entry, usually because it's trying to
11017 load into a VFP register. We know this will always be used in
11018 combination with a LO_SUM which ignores the high bits, so just
11019 strip off the HIGH. */
11020 if (GET_CODE (x) == HIGH)
11022 assemble_integer (x, 4, BITS_PER_WORD, 1);
11023 mark_symbol_refs_as_used (x);
11028 [(set_attr "length" "4")]
11031 (define_insn "consttable_8"
11032 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11036 making_const_table = TRUE;
11037 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11042 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11043 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11047 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11052 [(set_attr "length" "8")]
11055 (define_insn "consttable_16"
11056 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11060 making_const_table = TRUE;
11061 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11066 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11067 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11071 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11076 [(set_attr "length" "16")]
11079 ;; Miscellaneous Thumb patterns
11081 (define_expand "tablejump"
11082 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11083 (use (label_ref (match_operand 1 "" "")))])]
11088 /* Hopefully, CSE will eliminate this copy. */
11089 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11090 rtx reg2 = gen_reg_rtx (SImode);
11092 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11093 operands[0] = reg2;
11098 ;; NB never uses BX.
11099 (define_insn "*thumb1_tablejump"
11100 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11101 (use (label_ref (match_operand 1 "" "")))]
11104 [(set_attr "length" "2")]
11107 ;; V5 Instructions,
11109 (define_insn "clzsi2"
11110 [(set (match_operand:SI 0 "s_register_operand" "=r")
11111 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11112 "TARGET_32BIT && arm_arch5"
11114 [(set_attr "predicable" "yes")
11115 (set_attr "insn" "clz")])
11117 (define_insn "rbitsi2"
11118 [(set (match_operand:SI 0 "s_register_operand" "=r")
11119 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11120 "TARGET_32BIT && arm_arch_thumb2"
11122 [(set_attr "predicable" "yes")
11123 (set_attr "insn" "clz")])
11125 (define_expand "ctzsi2"
11126 [(set (match_operand:SI 0 "s_register_operand" "")
11127 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11128 "TARGET_32BIT && arm_arch_thumb2"
11131 rtx tmp = gen_reg_rtx (SImode);
11132 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11133 emit_insn (gen_clzsi2 (operands[0], tmp));
11139 ;; V5E instructions.
11141 (define_insn "prefetch"
11142 [(prefetch (match_operand:SI 0 "address_operand" "p")
11143 (match_operand:SI 1 "" "")
11144 (match_operand:SI 2 "" ""))]
11145 "TARGET_32BIT && arm_arch5e"
11148 ;; General predication pattern
11151 [(match_operator 0 "arm_comparison_operator"
11152 [(match_operand 1 "cc_register" "")
11158 (define_insn "prologue_use"
11159 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11161 "%@ %0 needed for prologue"
11162 [(set_attr "length" "0")]
11166 ;; Patterns for exception handling
11168 (define_expand "eh_return"
11169 [(use (match_operand 0 "general_operand" ""))]
11174 emit_insn (gen_arm_eh_return (operands[0]));
11176 emit_insn (gen_thumb_eh_return (operands[0]));
11181 ;; We can't expand this before we know where the link register is stored.
11182 (define_insn_and_split "arm_eh_return"
11183 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11185 (clobber (match_scratch:SI 1 "=&r"))]
11188 "&& reload_completed"
11192 arm_set_return_address (operands[0], operands[1]);
11197 (define_insn_and_split "thumb_eh_return"
11198 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11200 (clobber (match_scratch:SI 1 "=&l"))]
11203 "&& reload_completed"
11207 thumb_set_return_address (operands[0], operands[1]);
11215 (define_insn "load_tp_hard"
11216 [(set (match_operand:SI 0 "register_operand" "=r")
11217 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11219 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11220 [(set_attr "predicable" "yes")]
11223 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11224 (define_insn "load_tp_soft"
11225 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11226 (clobber (reg:SI LR_REGNUM))
11227 (clobber (reg:SI IP_REGNUM))
11228 (clobber (reg:CC CC_REGNUM))]
11230 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11231 [(set_attr "conds" "clob")]
11234 (define_insn "*arm_movtas_ze"
11235 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11238 (match_operand:SI 1 "const_int_operand" ""))]
11241 [(set_attr "predicable" "yes")
11242 (set_attr "length" "4")]
11245 (define_insn "arm_rev"
11246 [(set (match_operand:SI 0 "s_register_operand" "=r")
11247 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11248 "TARGET_EITHER && arm_arch6"
11250 [(set (attr "length")
11251 (if_then_else (eq_attr "is_thumb" "yes")
11256 (define_expand "arm_legacy_rev"
11257 [(set (match_operand:SI 2 "s_register_operand" "")
11258 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11262 (lshiftrt:SI (match_dup 2)
11264 (set (match_operand:SI 3 "s_register_operand" "")
11265 (rotatert:SI (match_dup 1)
11268 (and:SI (match_dup 2)
11269 (const_int -65281)))
11270 (set (match_operand:SI 0 "s_register_operand" "")
11271 (xor:SI (match_dup 3)
11277 ;; Reuse temporaries to keep register pressure down.
11278 (define_expand "thumb_legacy_rev"
11279 [(set (match_operand:SI 2 "s_register_operand" "")
11280 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11282 (set (match_operand:SI 3 "s_register_operand" "")
11283 (lshiftrt:SI (match_dup 1)
11286 (ior:SI (match_dup 3)
11288 (set (match_operand:SI 4 "s_register_operand" "")
11290 (set (match_operand:SI 5 "s_register_operand" "")
11291 (rotatert:SI (match_dup 1)
11294 (ashift:SI (match_dup 5)
11297 (lshiftrt:SI (match_dup 5)
11300 (ior:SI (match_dup 5)
11303 (rotatert:SI (match_dup 5)
11305 (set (match_operand:SI 0 "s_register_operand" "")
11306 (ior:SI (match_dup 5)
11312 (define_expand "bswapsi2"
11313 [(set (match_operand:SI 0 "s_register_operand" "=r")
11314 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11319 if (!optimize_size)
11321 rtx op2 = gen_reg_rtx (SImode);
11322 rtx op3 = gen_reg_rtx (SImode);
11326 rtx op4 = gen_reg_rtx (SImode);
11327 rtx op5 = gen_reg_rtx (SImode);
11329 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11330 op2, op3, op4, op5));
11334 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11346 ;; Load the FPA co-processor patterns
11348 ;; Load the Maverick co-processor patterns
11349 (include "cirrus.md")
11350 ;; Vector bits common to IWMMXT and Neon
11351 (include "vec-common.md")
11352 ;; Load the Intel Wireless Multimedia Extension patterns
11353 (include "iwmmxt.md")
11354 ;; Load the VFP co-processor patterns
11356 ;; Thumb-2 patterns
11357 (include "thumb2.md")
11359 (include "neon.md")