1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 ;; and Martin Simmons (@harleqn.co.uk).
6 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 ;; This file is part of GCC.
10 ;; GCC is free software; you can redistribute it and/or modify it
11 ;; under the terms of the GNU General Public License as published
12 ;; by the Free Software Foundation; either version 2, or (at your
13 ;; option) any later version.
15 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
16 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 ;; License for more details.
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GCC; see the file COPYING. If not, write to
22 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
23 ;; Boston, MA 02110-1301, USA.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
56 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
57 ; operand 0 is the result,
58 ; operand 1 the parameter.
59 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
60 ; operand 0 is the result,
61 ; operand 1 the parameter.
62 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
63 ; operand 0 is the first register,
64 ; subsequent registers are in parallel (use ...)
66 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
67 ; usage, that is, we will add the pic_register
68 ; value to it before trying to dereference it.
69 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
70 ; GLOBAL_OFFSET_TABLE. The operation is fully
71 ; described by the RTL but must be wrapped to
72 ; prevent combine from trying to rip it apart.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
96 ;; UNSPEC_VOLATILE Usage:
99 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
101 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
102 ; instruction epilogue sequence that isn't expanded
103 ; into normal RTL. Used for both normal and sibcall
105 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
106 ; for inlined constants.
107 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
109 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
111 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
113 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
115 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
117 (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction.
118 (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction.
119 (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN
120 (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions
121 (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions
122 (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions
123 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
128 ;;---------------------------------------------------------------------------
131 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
132 ; generating ARM code. This is used to control the length of some insn
133 ; patterns that share the same RTL in both ARM and Thumb code.
134 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
136 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
137 ; scheduling decisions for the load unit and the multiplier.
138 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
140 ; IS_XSCALE is set to 'yes' when compiling for XScale.
141 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
143 ;; Operand number of an input operand that is shifted. Zero if the
144 ;; given instruction does not shift one of its input operands.
145 (define_attr "shift" "" (const_int 0))
147 ; Floating Point Unit. If we only have floating point emulation, then there
148 ; is no point in scheduling the floating point insns. (Well, for best
149 ; performance we should try and group them together).
150 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
151 (const (symbol_ref "arm_fpu_attr")))
153 ; LENGTH of an instruction (in bytes)
154 (define_attr "length" "" (const_int 4))
156 ; POOL_RANGE is how far away from a constant pool entry that this insn
157 ; can be placed. If the distance is zero, then this insn will never
158 ; reference the pool.
159 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
160 ; before its address.
161 (define_attr "pool_range" "" (const_int 0))
162 (define_attr "neg_pool_range" "" (const_int 0))
164 ; An assembler sequence may clobber the condition codes without us knowing.
165 ; If such an insn references the pool, then we have no way of knowing how,
166 ; so use the most conservative value for pool_range.
167 (define_asm_attributes
168 [(set_attr "conds" "clob")
169 (set_attr "length" "4")
170 (set_attr "pool_range" "250")])
172 ;; The instruction used to implement a particular pattern. This
173 ;; information is used by pipeline descriptions to provide accurate
174 ;; scheduling information.
177 "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other"
178 (const_string "other"))
180 ; TYPE attribute is used to detect floating point instructions which, if
181 ; running on a co-processor can run in parallel with other, basic instructions
182 ; If write-buffer scheduling is enabled then it can also be used in the
183 ; scheduling of writes.
185 ; Classification of each insn
186 ; alu any alu instruction that doesn't hit memory or fp
187 ; regs or have a shifted source operand
188 ; alu_shift any data instruction that doesn't hit memory or fp
189 ; regs, but has a source operand shifted by a constant
190 ; alu_shift_reg any data instruction that doesn't hit memory or fp
191 ; regs, but has a source operand shifted by a register value
192 ; mult a multiply instruction
193 ; block blockage insn, this blocks all functional units
194 ; float a floating point arithmetic operation (subject to expansion)
195 ; fdivd DFmode floating point division
196 ; fdivs SFmode floating point division
197 ; fmul Floating point multiply
198 ; ffmul Fast floating point multiply
199 ; farith Floating point arithmetic (4 cycle)
200 ; ffarith Fast floating point arithmetic (2 cycle)
201 ; float_em a floating point arithmetic operation that is normally emulated
202 ; even on a machine with an fpa.
203 ; f_load a floating point load from memory
204 ; f_store a floating point store to memory
205 ; f_load[sd] single/double load from memory
206 ; f_store[sd] single/double store to memory
207 ; f_flag a transfer of co-processor flags to the CPSR
208 ; f_mem_r a transfer of a floating point register to a real reg via mem
209 ; r_mem_f the reverse of f_mem_r
210 ; f_2_r fast transfer float to arm (no memory needed)
211 ; r_2_f fast transfer arm to float
212 ; f_cvt convert floating<->integral
214 ; call a subroutine call
215 ; load_byte load byte(s) from memory to arm registers
216 ; load1 load 1 word from memory to arm registers
217 ; load2 load 2 words from memory to arm registers
218 ; load3 load 3 words from memory to arm registers
219 ; load4 load 4 words from memory to arm registers
220 ; store store 1 word to memory from arm registers
221 ; store2 store 2 words
222 ; store3 store 3 words
223 ; store4 store 4 (or more) words
224 ; Additions for Cirrus Maverick co-processor:
225 ; mav_farith Floating point arithmetic (4 cycle)
226 ; mav_dmult Double multiplies (7 cycle)
229 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
231 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
232 (const_string "mult")
233 (const_string "alu")))
235 ; Load scheduling, set from the arm_ld_sched variable
236 ; initialized by arm_override_options()
237 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
239 ; condition codes: this one is used by final_prescan_insn to speed up
240 ; conditionalizing instructions. It saves having to scan the rtl to see if
241 ; it uses or alters the condition codes.
243 ; USE means that the condition codes are used by the insn in the process of
244 ; outputting code, this means (at present) that we can't use the insn in
247 ; SET means that the purpose of the insn is to set the condition codes in a
248 ; well defined manner.
250 ; CLOB means that the condition codes are altered in an undefined manner, if
251 ; they are altered at all
253 ; JUMP_CLOB is used when the condition cannot be represented by a single
254 ; instruction (UNEQ and LTGT). These cannot be predicated.
256 ; NOCOND means that the condition codes are neither altered nor affect the
257 ; output of this insn
259 (define_attr "conds" "use,set,clob,jump_clob,nocond"
260 (if_then_else (eq_attr "type" "call")
261 (const_string "clob")
262 (const_string "nocond")))
264 ; Predicable means that the insn can be conditionally executed based on
265 ; an automatically added predicate (additional patterns are generated by
266 ; gen...). We default to 'no' because no Thumb patterns match this rule
267 ; and not all ARM patterns do.
268 (define_attr "predicable" "no,yes" (const_string "no"))
270 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
271 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
272 ; suffer blockages enough to warrant modelling this (and it can adversely
273 ; affect the schedule).
274 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
276 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
277 ; to stall the processor. Used with model_wbuf above.
278 (define_attr "write_conflict" "no,yes"
279 (if_then_else (eq_attr "type"
280 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
282 (const_string "no")))
284 ; Classify the insns into those that take one cycle and those that take more
285 ; than one on the main cpu execution unit.
286 (define_attr "core_cycles" "single,multi"
287 (if_then_else (eq_attr "type"
288 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
289 (const_string "single")
290 (const_string "multi")))
292 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
293 ;; distant label. Only applicable to Thumb code.
294 (define_attr "far_jump" "yes,no" (const_string "no"))
297 ;;---------------------------------------------------------------------------
300 ; A list of modes that are exactly 64 bits in size. We use this to expand
301 ; some splits that are the same for all modes when operating on ARM
303 (define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
305 ;;---------------------------------------------------------------------------
308 (include "predicates.md")
310 ;;---------------------------------------------------------------------------
311 ;; Pipeline descriptions
313 ;; Processor type. This is created automatically from arm-cores.def.
314 (include "arm-tune.md")
316 ;; True if the generic scheduling description should be used.
318 (define_attr "generic_sched" "yes,no"
320 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs")
322 (const_string "yes"))))
324 (define_attr "generic_vfp" "yes,no"
326 (and (eq_attr "fpu" "vfp")
327 (eq_attr "tune" "!arm1020e,arm1022e"))
329 (const_string "no"))))
331 (include "arm-generic.md")
332 (include "arm926ejs.md")
333 (include "arm1020e.md")
334 (include "arm1026ejs.md")
335 (include "arm1136jfs.md")
338 ;;---------------------------------------------------------------------------
343 ;; Note: For DImode insns, there is normally no reason why operands should
344 ;; not be in the same register, what we don't want is for something being
345 ;; written to partially overlap something that is an input.
346 ;; Cirrus 64bit additions should not be split because we have a native
347 ;; 64bit addition instructions.
349 (define_expand "adddi3"
351 [(set (match_operand:DI 0 "s_register_operand" "")
352 (plus:DI (match_operand:DI 1 "s_register_operand" "")
353 (match_operand:DI 2 "s_register_operand" "")))
354 (clobber (reg:CC CC_REGNUM))])]
357 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
359 if (!cirrus_fp_register (operands[0], DImode))
360 operands[0] = force_reg (DImode, operands[0]);
361 if (!cirrus_fp_register (operands[1], DImode))
362 operands[1] = force_reg (DImode, operands[1]);
363 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
369 if (GET_CODE (operands[1]) != REG)
370 operands[1] = force_reg (SImode, operands[1]);
371 if (GET_CODE (operands[2]) != REG)
372 operands[2] = force_reg (SImode, operands[2]);
377 (define_insn "*thumb_adddi3"
378 [(set (match_operand:DI 0 "register_operand" "=l")
379 (plus:DI (match_operand:DI 1 "register_operand" "%0")
380 (match_operand:DI 2 "register_operand" "l")))
381 (clobber (reg:CC CC_REGNUM))
384 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
385 [(set_attr "length" "4")]
388 (define_insn_and_split "*arm_adddi3"
389 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
390 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
391 (match_operand:DI 2 "s_register_operand" "r, 0")))
392 (clobber (reg:CC CC_REGNUM))]
393 "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
395 "TARGET_ARM && reload_completed"
396 [(parallel [(set (reg:CC_C CC_REGNUM)
397 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
399 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
400 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
401 (plus:SI (match_dup 4) (match_dup 5))))]
404 operands[3] = gen_highpart (SImode, operands[0]);
405 operands[0] = gen_lowpart (SImode, operands[0]);
406 operands[4] = gen_highpart (SImode, operands[1]);
407 operands[1] = gen_lowpart (SImode, operands[1]);
408 operands[5] = gen_highpart (SImode, operands[2]);
409 operands[2] = gen_lowpart (SImode, operands[2]);
411 [(set_attr "conds" "clob")
412 (set_attr "length" "8")]
415 (define_insn_and_split "*adddi_sesidi_di"
416 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
417 (plus:DI (sign_extend:DI
418 (match_operand:SI 2 "s_register_operand" "r,r"))
419 (match_operand:DI 1 "s_register_operand" "r,0")))
420 (clobber (reg:CC CC_REGNUM))]
421 "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
423 "TARGET_ARM && reload_completed"
424 [(parallel [(set (reg:CC_C CC_REGNUM)
425 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
427 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
428 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
429 (plus:SI (ashiftrt:SI (match_dup 2)
434 operands[3] = gen_highpart (SImode, operands[0]);
435 operands[0] = gen_lowpart (SImode, operands[0]);
436 operands[4] = gen_highpart (SImode, operands[1]);
437 operands[1] = gen_lowpart (SImode, operands[1]);
438 operands[2] = gen_lowpart (SImode, operands[2]);
440 [(set_attr "conds" "clob")
441 (set_attr "length" "8")]
444 (define_insn_and_split "*adddi_zesidi_di"
445 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
446 (plus:DI (zero_extend:DI
447 (match_operand:SI 2 "s_register_operand" "r,r"))
448 (match_operand:DI 1 "s_register_operand" "r,0")))
449 (clobber (reg:CC CC_REGNUM))]
450 "TARGET_ARM && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
452 "TARGET_ARM && reload_completed"
453 [(parallel [(set (reg:CC_C CC_REGNUM)
454 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
456 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
457 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
458 (plus:SI (match_dup 4) (const_int 0))))]
461 operands[3] = gen_highpart (SImode, operands[0]);
462 operands[0] = gen_lowpart (SImode, operands[0]);
463 operands[4] = gen_highpart (SImode, operands[1]);
464 operands[1] = gen_lowpart (SImode, operands[1]);
465 operands[2] = gen_lowpart (SImode, operands[2]);
467 [(set_attr "conds" "clob")
468 (set_attr "length" "8")]
471 (define_expand "addsi3"
472 [(set (match_operand:SI 0 "s_register_operand" "")
473 (plus:SI (match_operand:SI 1 "s_register_operand" "")
474 (match_operand:SI 2 "reg_or_int_operand" "")))]
477 if (TARGET_ARM && GET_CODE (operands[2]) == CONST_INT)
479 arm_split_constant (PLUS, SImode, NULL_RTX,
480 INTVAL (operands[2]), operands[0], operands[1],
481 optimize && !no_new_pseudos);
487 ; If there is a scratch available, this will be faster than synthesizing the
490 [(match_scratch:SI 3 "r")
491 (set (match_operand:SI 0 "arm_general_register_operand" "")
492 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
493 (match_operand:SI 2 "const_int_operand" "")))]
495 !(const_ok_for_arm (INTVAL (operands[2]))
496 || const_ok_for_arm (-INTVAL (operands[2])))
497 && const_ok_for_arm (~INTVAL (operands[2]))"
498 [(set (match_dup 3) (match_dup 2))
499 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
503 (define_insn_and_split "*arm_addsi3"
504 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
505 (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
506 (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
513 GET_CODE (operands[2]) == CONST_INT
514 && !(const_ok_for_arm (INTVAL (operands[2]))
515 || const_ok_for_arm (-INTVAL (operands[2])))"
516 [(clobber (const_int 0))]
518 arm_split_constant (PLUS, SImode, curr_insn,
519 INTVAL (operands[2]), operands[0],
523 [(set_attr "length" "4,4,16")
524 (set_attr "predicable" "yes")]
527 ;; Register group 'k' is a single register group containing only the stack
528 ;; register. Trying to reload it will always fail catastrophically,
529 ;; so never allow those alternatives to match if reloading is needed.
531 (define_insn "*thumb_addsi3"
532 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
533 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
534 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
537 static const char * const asms[] =
539 \"add\\t%0, %0, %2\",
540 \"sub\\t%0, %0, #%n2\",
541 \"add\\t%0, %1, %2\",
542 \"add\\t%0, %0, %2\",
543 \"add\\t%0, %0, %2\",
544 \"add\\t%0, %1, %2\",
547 if ((which_alternative == 2 || which_alternative == 6)
548 && GET_CODE (operands[2]) == CONST_INT
549 && INTVAL (operands[2]) < 0)
550 return \"sub\\t%0, %1, #%n2\";
551 return asms[which_alternative];
553 [(set_attr "length" "2")]
556 ;; Reloading and elimination of the frame pointer can
557 ;; sometimes cause this optimization to be missed.
559 [(set (match_operand:SI 0 "arm_general_register_operand" "")
560 (match_operand:SI 1 "const_int_operand" ""))
562 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
564 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
565 && (INTVAL (operands[1]) & 3) == 0"
566 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
570 (define_insn "*addsi3_compare0"
571 [(set (reg:CC_NOOV CC_REGNUM)
573 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
574 (match_operand:SI 2 "arm_add_operand" "rI,L"))
576 (set (match_operand:SI 0 "s_register_operand" "=r,r")
577 (plus:SI (match_dup 1) (match_dup 2)))]
581 sub%?s\\t%0, %1, #%n2"
582 [(set_attr "conds" "set")]
585 (define_insn "*addsi3_compare0_scratch"
586 [(set (reg:CC_NOOV CC_REGNUM)
588 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
589 (match_operand:SI 1 "arm_add_operand" "rI,L"))
595 [(set_attr "conds" "set")]
598 (define_insn "*compare_negsi_si"
599 [(set (reg:CC_Z CC_REGNUM)
601 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
602 (match_operand:SI 1 "s_register_operand" "r")))]
605 [(set_attr "conds" "set")]
608 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
609 ;; addend is a constant.
610 (define_insn "*cmpsi2_addneg"
611 [(set (reg:CC CC_REGNUM)
613 (match_operand:SI 1 "s_register_operand" "r,r")
614 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
615 (set (match_operand:SI 0 "s_register_operand" "=r,r")
616 (plus:SI (match_dup 1)
617 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
618 "TARGET_ARM && INTVAL (operands[2]) == -INTVAL (operands[3])"
621 add%?s\\t%0, %1, #%n2"
622 [(set_attr "conds" "set")]
625 ;; Convert the sequence
627 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
631 ;; bcs dest ((unsigned)rn >= 1)
632 ;; similarly for the beq variant using bcc.
633 ;; This is a common looping idiom (while (n--))
635 [(set (match_operand:SI 0 "arm_general_register_operand" "")
636 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
638 (set (match_operand 2 "cc_register" "")
639 (compare (match_dup 0) (const_int -1)))
641 (if_then_else (match_operator 3 "equality_operator"
642 [(match_dup 2) (const_int 0)])
643 (match_operand 4 "" "")
644 (match_operand 5 "" "")))]
645 "TARGET_ARM && peep2_reg_dead_p (3, operands[2])"
649 (match_dup 1) (const_int 1)))
650 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
652 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
655 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
656 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
659 operands[2], const0_rtx);"
662 ;; The next four insns work because they compare the result with one of
663 ;; the operands, and we know that the use of the condition code is
664 ;; either GEU or LTU, so we can use the carry flag from the addition
665 ;; instead of doing the compare a second time.
666 (define_insn "*addsi3_compare_op1"
667 [(set (reg:CC_C CC_REGNUM)
669 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
670 (match_operand:SI 2 "arm_add_operand" "rI,L"))
672 (set (match_operand:SI 0 "s_register_operand" "=r,r")
673 (plus:SI (match_dup 1) (match_dup 2)))]
677 sub%?s\\t%0, %1, #%n2"
678 [(set_attr "conds" "set")]
681 (define_insn "*addsi3_compare_op2"
682 [(set (reg:CC_C CC_REGNUM)
684 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
685 (match_operand:SI 2 "arm_add_operand" "rI,L"))
687 (set (match_operand:SI 0 "s_register_operand" "=r,r")
688 (plus:SI (match_dup 1) (match_dup 2)))]
692 sub%?s\\t%0, %1, #%n2"
693 [(set_attr "conds" "set")]
696 (define_insn "*compare_addsi2_op0"
697 [(set (reg:CC_C CC_REGNUM)
699 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
700 (match_operand:SI 1 "arm_add_operand" "rI,L"))
706 [(set_attr "conds" "set")]
709 (define_insn "*compare_addsi2_op1"
710 [(set (reg:CC_C CC_REGNUM)
712 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
713 (match_operand:SI 1 "arm_add_operand" "rI,L"))
719 [(set_attr "conds" "set")]
722 (define_insn "*addsi3_carryin"
723 [(set (match_operand:SI 0 "s_register_operand" "=r")
724 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
725 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
726 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
729 [(set_attr "conds" "use")]
732 (define_insn "*addsi3_carryin_shift"
733 [(set (match_operand:SI 0 "s_register_operand" "=r")
734 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
736 (match_operator:SI 2 "shift_operator"
737 [(match_operand:SI 3 "s_register_operand" "r")
738 (match_operand:SI 4 "reg_or_int_operand" "rM")])
739 (match_operand:SI 1 "s_register_operand" "r"))))]
741 "adc%?\\t%0, %1, %3%S2"
742 [(set_attr "conds" "use")
743 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
744 (const_string "alu_shift")
745 (const_string "alu_shift_reg")))]
748 (define_insn "*addsi3_carryin_alt1"
749 [(set (match_operand:SI 0 "s_register_operand" "=r")
750 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
751 (match_operand:SI 2 "arm_rhs_operand" "rI"))
752 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
755 [(set_attr "conds" "use")]
758 (define_insn "*addsi3_carryin_alt2"
759 [(set (match_operand:SI 0 "s_register_operand" "=r")
760 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
761 (match_operand:SI 1 "s_register_operand" "r"))
762 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
765 [(set_attr "conds" "use")]
768 (define_insn "*addsi3_carryin_alt3"
769 [(set (match_operand:SI 0 "s_register_operand" "=r")
770 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
771 (match_operand:SI 2 "arm_rhs_operand" "rI"))
772 (match_operand:SI 1 "s_register_operand" "r")))]
775 [(set_attr "conds" "use")]
778 (define_insn "incscc"
779 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
780 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
781 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
782 (match_operand:SI 1 "s_register_operand" "0,?r")))]
786 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
787 [(set_attr "conds" "use")
788 (set_attr "length" "4,8")]
791 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
793 [(set (match_operand:SI 0 "s_register_operand" "")
794 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
795 (match_operand:SI 2 "s_register_operand" ""))
797 (clobber (match_operand:SI 3 "s_register_operand" ""))]
799 [(set (match_dup 3) (match_dup 1))
800 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
802 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
805 (define_expand "addsf3"
806 [(set (match_operand:SF 0 "s_register_operand" "")
807 (plus:SF (match_operand:SF 1 "s_register_operand" "")
808 (match_operand:SF 2 "arm_float_add_operand" "")))]
809 "TARGET_ARM && TARGET_HARD_FLOAT"
812 && !cirrus_fp_register (operands[2], SFmode))
813 operands[2] = force_reg (SFmode, operands[2]);
816 (define_expand "adddf3"
817 [(set (match_operand:DF 0 "s_register_operand" "")
818 (plus:DF (match_operand:DF 1 "s_register_operand" "")
819 (match_operand:DF 2 "arm_float_add_operand" "")))]
820 "TARGET_ARM && TARGET_HARD_FLOAT"
823 && !cirrus_fp_register (operands[2], DFmode))
824 operands[2] = force_reg (DFmode, operands[2]);
827 (define_expand "subdi3"
829 [(set (match_operand:DI 0 "s_register_operand" "")
830 (minus:DI (match_operand:DI 1 "s_register_operand" "")
831 (match_operand:DI 2 "s_register_operand" "")))
832 (clobber (reg:CC CC_REGNUM))])]
835 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
837 && cirrus_fp_register (operands[0], DImode)
838 && cirrus_fp_register (operands[1], DImode))
840 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
846 if (GET_CODE (operands[1]) != REG)
847 operands[1] = force_reg (SImode, operands[1]);
848 if (GET_CODE (operands[2]) != REG)
849 operands[2] = force_reg (SImode, operands[2]);
854 (define_insn "*arm_subdi3"
855 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
856 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
857 (match_operand:DI 2 "s_register_operand" "r,0,0")))
858 (clobber (reg:CC CC_REGNUM))]
860 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
861 [(set_attr "conds" "clob")
862 (set_attr "length" "8")]
865 (define_insn "*thumb_subdi3"
866 [(set (match_operand:DI 0 "register_operand" "=l")
867 (minus:DI (match_operand:DI 1 "register_operand" "0")
868 (match_operand:DI 2 "register_operand" "l")))
869 (clobber (reg:CC CC_REGNUM))]
871 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
872 [(set_attr "length" "4")]
875 (define_insn "*subdi_di_zesidi"
876 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
877 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
879 (match_operand:SI 2 "s_register_operand" "r,r"))))
880 (clobber (reg:CC CC_REGNUM))]
882 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
883 [(set_attr "conds" "clob")
884 (set_attr "length" "8")]
887 (define_insn "*subdi_di_sesidi"
888 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
889 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
891 (match_operand:SI 2 "s_register_operand" "r,r"))))
892 (clobber (reg:CC CC_REGNUM))]
894 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
895 [(set_attr "conds" "clob")
896 (set_attr "length" "8")]
899 (define_insn "*subdi_zesidi_di"
900 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
901 (minus:DI (zero_extend:DI
902 (match_operand:SI 2 "s_register_operand" "r,r"))
903 (match_operand:DI 1 "s_register_operand" "?r,0")))
904 (clobber (reg:CC CC_REGNUM))]
906 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
907 [(set_attr "conds" "clob")
908 (set_attr "length" "8")]
911 (define_insn "*subdi_sesidi_di"
912 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
913 (minus:DI (sign_extend:DI
914 (match_operand:SI 2 "s_register_operand" "r,r"))
915 (match_operand:DI 1 "s_register_operand" "?r,0")))
916 (clobber (reg:CC CC_REGNUM))]
918 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
919 [(set_attr "conds" "clob")
920 (set_attr "length" "8")]
923 (define_insn "*subdi_zesidi_zesidi"
924 [(set (match_operand:DI 0 "s_register_operand" "=r")
925 (minus:DI (zero_extend:DI
926 (match_operand:SI 1 "s_register_operand" "r"))
928 (match_operand:SI 2 "s_register_operand" "r"))))
929 (clobber (reg:CC CC_REGNUM))]
931 "subs\\t%Q0, %1, %2\;rsc\\t%R0, %1, %1"
932 [(set_attr "conds" "clob")
933 (set_attr "length" "8")]
936 (define_expand "subsi3"
937 [(set (match_operand:SI 0 "s_register_operand" "")
938 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
939 (match_operand:SI 2 "s_register_operand" "")))]
942 if (GET_CODE (operands[1]) == CONST_INT)
946 arm_split_constant (MINUS, SImode, NULL_RTX,
947 INTVAL (operands[1]), operands[0],
948 operands[2], optimize && !no_new_pseudos);
951 else /* TARGET_THUMB */
952 operands[1] = force_reg (SImode, operands[1]);
957 (define_insn "*thumb_subsi3_insn"
958 [(set (match_operand:SI 0 "register_operand" "=l")
959 (minus:SI (match_operand:SI 1 "register_operand" "l")
960 (match_operand:SI 2 "register_operand" "l")))]
963 [(set_attr "length" "2")]
966 (define_insn_and_split "*arm_subsi3_insn"
967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
968 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
969 (match_operand:SI 2 "s_register_operand" "r,r")))]
975 && GET_CODE (operands[1]) == CONST_INT
976 && !const_ok_for_arm (INTVAL (operands[1]))"
977 [(clobber (const_int 0))]
979 arm_split_constant (MINUS, SImode, curr_insn,
980 INTVAL (operands[1]), operands[0], operands[2], 0);
983 [(set_attr "length" "4,16")
984 (set_attr "predicable" "yes")]
988 [(match_scratch:SI 3 "r")
989 (set (match_operand:SI 0 "arm_general_register_operand" "")
990 (minus:SI (match_operand:SI 1 "const_int_operand" "")
991 (match_operand:SI 2 "arm_general_register_operand" "")))]
993 && !const_ok_for_arm (INTVAL (operands[1]))
994 && const_ok_for_arm (~INTVAL (operands[1]))"
995 [(set (match_dup 3) (match_dup 1))
996 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1000 (define_insn "*subsi3_compare0"
1001 [(set (reg:CC_NOOV CC_REGNUM)
1003 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1004 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1006 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1007 (minus:SI (match_dup 1) (match_dup 2)))]
1011 rsb%?s\\t%0, %2, %1"
1012 [(set_attr "conds" "set")]
1015 (define_insn "decscc"
1016 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1017 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1018 (match_operator:SI 2 "arm_comparison_operator"
1019 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1023 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1024 [(set_attr "conds" "use")
1025 (set_attr "length" "*,8")]
1028 (define_expand "subsf3"
1029 [(set (match_operand:SF 0 "s_register_operand" "")
1030 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1031 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1032 "TARGET_ARM && TARGET_HARD_FLOAT"
1034 if (TARGET_MAVERICK)
1036 if (!cirrus_fp_register (operands[1], SFmode))
1037 operands[1] = force_reg (SFmode, operands[1]);
1038 if (!cirrus_fp_register (operands[2], SFmode))
1039 operands[2] = force_reg (SFmode, operands[2]);
1043 (define_expand "subdf3"
1044 [(set (match_operand:DF 0 "s_register_operand" "")
1045 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1046 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1047 "TARGET_ARM && TARGET_HARD_FLOAT"
1049 if (TARGET_MAVERICK)
1051 if (!cirrus_fp_register (operands[1], DFmode))
1052 operands[1] = force_reg (DFmode, operands[1]);
1053 if (!cirrus_fp_register (operands[2], DFmode))
1054 operands[2] = force_reg (DFmode, operands[2]);
1059 ;; Multiplication insns
1061 (define_expand "mulsi3"
1062 [(set (match_operand:SI 0 "s_register_operand" "")
1063 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1064 (match_operand:SI 1 "s_register_operand" "")))]
1069 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1070 (define_insn "*arm_mulsi3"
1071 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1072 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1073 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1075 "mul%?\\t%0, %2, %1"
1076 [(set_attr "insn" "mul")
1077 (set_attr "predicable" "yes")]
1080 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1081 ; 1 and 2; are the same, because reload will make operand 0 match
1082 ; operand 1 without realizing that this conflicts with operand 2. We fix
1083 ; this by adding another alternative to match this case, and then `reload'
1084 ; it ourselves. This alternative must come first.
1085 (define_insn "*thumb_mulsi3"
1086 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1087 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1088 (match_operand:SI 2 "register_operand" "l,l,l")))]
1091 if (which_alternative < 2)
1092 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1094 return \"mul\\t%0, %2\";
1096 [(set_attr "length" "4,4,2")
1097 (set_attr "insn" "mul")]
1100 (define_insn "*mulsi3_compare0"
1101 [(set (reg:CC_NOOV CC_REGNUM)
1102 (compare:CC_NOOV (mult:SI
1103 (match_operand:SI 2 "s_register_operand" "r,r")
1104 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1106 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1107 (mult:SI (match_dup 2) (match_dup 1)))]
1109 "mul%?s\\t%0, %2, %1"
1110 [(set_attr "conds" "set")
1111 (set_attr "insn" "muls")]
1114 (define_insn "*mulsi_compare0_scratch"
1115 [(set (reg:CC_NOOV CC_REGNUM)
1116 (compare:CC_NOOV (mult:SI
1117 (match_operand:SI 2 "s_register_operand" "r,r")
1118 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1120 (clobber (match_scratch:SI 0 "=&r,&r"))]
1122 "mul%?s\\t%0, %2, %1"
1123 [(set_attr "conds" "set")
1124 (set_attr "insn" "muls")]
1127 ;; Unnamed templates to match MLA instruction.
1129 (define_insn "*mulsi3addsi"
1130 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1132 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1133 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1134 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1136 "mla%?\\t%0, %2, %1, %3"
1137 [(set_attr "insn" "mla")
1138 (set_attr "predicable" "yes")]
1141 (define_insn "*mulsi3addsi_compare0"
1142 [(set (reg:CC_NOOV CC_REGNUM)
1145 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1146 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1147 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1149 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1150 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1153 "mla%?s\\t%0, %2, %1, %3"
1154 [(set_attr "conds" "set")
1155 (set_attr "insn" "mlas")]
1158 (define_insn "*mulsi3addsi_compare0_scratch"
1159 [(set (reg:CC_NOOV CC_REGNUM)
1162 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1163 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1164 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1166 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1168 "mla%?s\\t%0, %2, %1, %3"
1169 [(set_attr "conds" "set")
1170 (set_attr "insn" "mlas")]
1173 ;; Unnamed template to match long long multiply-accumulate (smlal)
1175 (define_insn "*mulsidi3adddi"
1176 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1179 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1180 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1181 (match_operand:DI 1 "s_register_operand" "0")))]
1182 "TARGET_ARM && arm_arch3m"
1183 "smlal%?\\t%Q0, %R0, %3, %2"
1184 [(set_attr "insn" "smlal")
1185 (set_attr "predicable" "yes")]
1188 (define_insn "mulsidi3"
1189 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1191 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1192 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1193 "TARGET_ARM && arm_arch3m"
1194 "smull%?\\t%Q0, %R0, %1, %2"
1195 [(set_attr "insn" "smull")
1196 (set_attr "predicable" "yes")]
1199 (define_insn "umulsidi3"
1200 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1202 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1203 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1204 "TARGET_ARM && arm_arch3m"
1205 "umull%?\\t%Q0, %R0, %1, %2"
1206 [(set_attr "insn" "umull")
1207 (set_attr "predicable" "yes")]
1210 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1212 (define_insn "*umulsidi3adddi"
1213 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1216 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1217 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1218 (match_operand:DI 1 "s_register_operand" "0")))]
1219 "TARGET_ARM && arm_arch3m"
1220 "umlal%?\\t%Q0, %R0, %3, %2"
1221 [(set_attr "insn" "umlal")
1222 (set_attr "predicable" "yes")]
1225 (define_insn "smulsi3_highpart"
1226 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1230 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1231 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1233 (clobber (match_scratch:SI 3 "=&r,&r"))]
1234 "TARGET_ARM && arm_arch3m"
1235 "smull%?\\t%3, %0, %2, %1"
1236 [(set_attr "insn" "smull")
1237 (set_attr "predicable" "yes")]
1240 (define_insn "umulsi3_highpart"
1241 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1245 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1246 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1248 (clobber (match_scratch:SI 3 "=&r,&r"))]
1249 "TARGET_ARM && arm_arch3m"
1250 "umull%?\\t%3, %0, %2, %1"
1251 [(set_attr "insn" "umull")
1252 (set_attr "predicable" "yes")]
1255 (define_insn "mulhisi3"
1256 [(set (match_operand:SI 0 "s_register_operand" "=r")
1257 (mult:SI (sign_extend:SI
1258 (match_operand:HI 1 "s_register_operand" "%r"))
1260 (match_operand:HI 2 "s_register_operand" "r"))))]
1261 "TARGET_ARM && arm_arch5e"
1262 "smulbb%?\\t%0, %1, %2"
1263 [(set_attr "insn" "smulxy")
1264 (set_attr "predicable" "yes")]
1267 (define_insn "*mulhisi3tb"
1268 [(set (match_operand:SI 0 "s_register_operand" "=r")
1269 (mult:SI (ashiftrt:SI
1270 (match_operand:SI 1 "s_register_operand" "r")
1273 (match_operand:HI 2 "s_register_operand" "r"))))]
1274 "TARGET_ARM && arm_arch5e"
1275 "smultb%?\\t%0, %1, %2"
1276 [(set_attr "insn" "smulxy")
1277 (set_attr "predicable" "yes")]
1280 (define_insn "*mulhisi3bt"
1281 [(set (match_operand:SI 0 "s_register_operand" "=r")
1282 (mult:SI (sign_extend:SI
1283 (match_operand:HI 1 "s_register_operand" "r"))
1285 (match_operand:SI 2 "s_register_operand" "r")
1287 "TARGET_ARM && arm_arch5e"
1288 "smulbt%?\\t%0, %1, %2"
1289 [(set_attr "insn" "smulxy")
1290 (set_attr "predicable" "yes")]
1293 (define_insn "*mulhisi3tt"
1294 [(set (match_operand:SI 0 "s_register_operand" "=r")
1295 (mult:SI (ashiftrt:SI
1296 (match_operand:SI 1 "s_register_operand" "r")
1299 (match_operand:SI 2 "s_register_operand" "r")
1301 "TARGET_ARM && arm_arch5e"
1302 "smultt%?\\t%0, %1, %2"
1303 [(set_attr "insn" "smulxy")
1304 (set_attr "predicable" "yes")]
1307 (define_insn "*mulhisi3addsi"
1308 [(set (match_operand:SI 0 "s_register_operand" "=r")
1309 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1310 (mult:SI (sign_extend:SI
1311 (match_operand:HI 2 "s_register_operand" "%r"))
1313 (match_operand:HI 3 "s_register_operand" "r")))))]
1314 "TARGET_ARM && arm_arch5e"
1315 "smlabb%?\\t%0, %2, %3, %1"
1316 [(set_attr "insn" "smlaxy")
1317 (set_attr "predicable" "yes")]
1320 (define_insn "*mulhidi3adddi"
1321 [(set (match_operand:DI 0 "s_register_operand" "=r")
1323 (match_operand:DI 1 "s_register_operand" "0")
1324 (mult:DI (sign_extend:DI
1325 (match_operand:HI 2 "s_register_operand" "%r"))
1327 (match_operand:HI 3 "s_register_operand" "r")))))]
1328 "TARGET_ARM && arm_arch5e"
1329 "smlalbb%?\\t%Q0, %R0, %2, %3"
1330 [(set_attr "insn" "smlalxy")
1331 (set_attr "predicable" "yes")])
1333 (define_expand "mulsf3"
1334 [(set (match_operand:SF 0 "s_register_operand" "")
1335 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1336 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1337 "TARGET_ARM && TARGET_HARD_FLOAT"
1340 && !cirrus_fp_register (operands[2], SFmode))
1341 operands[2] = force_reg (SFmode, operands[2]);
1344 (define_expand "muldf3"
1345 [(set (match_operand:DF 0 "s_register_operand" "")
1346 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1347 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1348 "TARGET_ARM && TARGET_HARD_FLOAT"
1351 && !cirrus_fp_register (operands[2], DFmode))
1352 operands[2] = force_reg (DFmode, operands[2]);
1357 (define_expand "divsf3"
1358 [(set (match_operand:SF 0 "s_register_operand" "")
1359 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1360 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1361 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1364 (define_expand "divdf3"
1365 [(set (match_operand:DF 0 "s_register_operand" "")
1366 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1367 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1368 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1373 (define_expand "modsf3"
1374 [(set (match_operand:SF 0 "s_register_operand" "")
1375 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1376 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1377 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
1380 (define_expand "moddf3"
1381 [(set (match_operand:DF 0 "s_register_operand" "")
1382 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1383 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1384 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
1387 ;; Boolean and,ior,xor insns
1389 ;; Split up double word logical operations
1391 ;; Split up simple DImode logical operations. Simply perform the logical
1392 ;; operation on the upper and lower halves of the registers.
1394 [(set (match_operand:DI 0 "s_register_operand" "")
1395 (match_operator:DI 6 "logical_binary_operator"
1396 [(match_operand:DI 1 "s_register_operand" "")
1397 (match_operand:DI 2 "s_register_operand" "")]))]
1398 "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1399 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1400 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1403 operands[3] = gen_highpart (SImode, operands[0]);
1404 operands[0] = gen_lowpart (SImode, operands[0]);
1405 operands[4] = gen_highpart (SImode, operands[1]);
1406 operands[1] = gen_lowpart (SImode, operands[1]);
1407 operands[5] = gen_highpart (SImode, operands[2]);
1408 operands[2] = gen_lowpart (SImode, operands[2]);
1413 [(set (match_operand:DI 0 "s_register_operand" "")
1414 (match_operator:DI 6 "logical_binary_operator"
1415 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1416 (match_operand:DI 1 "s_register_operand" "")]))]
1417 "TARGET_ARM && reload_completed"
1418 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1419 (set (match_dup 3) (match_op_dup:SI 6
1420 [(ashiftrt:SI (match_dup 2) (const_int 31))
1424 operands[3] = gen_highpart (SImode, operands[0]);
1425 operands[0] = gen_lowpart (SImode, operands[0]);
1426 operands[4] = gen_highpart (SImode, operands[1]);
1427 operands[1] = gen_lowpart (SImode, operands[1]);
1428 operands[5] = gen_highpart (SImode, operands[2]);
1429 operands[2] = gen_lowpart (SImode, operands[2]);
1433 ;; The zero extend of operand 2 means we can just copy the high part of
1434 ;; operand1 into operand0.
1436 [(set (match_operand:DI 0 "s_register_operand" "")
1438 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1439 (match_operand:DI 1 "s_register_operand" "")))]
1440 "TARGET_ARM && operands[0] != operands[1] && reload_completed"
1441 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1442 (set (match_dup 3) (match_dup 4))]
1445 operands[4] = gen_highpart (SImode, operands[1]);
1446 operands[3] = gen_highpart (SImode, operands[0]);
1447 operands[0] = gen_lowpart (SImode, operands[0]);
1448 operands[1] = gen_lowpart (SImode, operands[1]);
1452 ;; The zero extend of operand 2 means we can just copy the high part of
1453 ;; operand1 into operand0.
1455 [(set (match_operand:DI 0 "s_register_operand" "")
1457 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1458 (match_operand:DI 1 "s_register_operand" "")))]
1459 "TARGET_ARM && operands[0] != operands[1] && reload_completed"
1460 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1461 (set (match_dup 3) (match_dup 4))]
1464 operands[4] = gen_highpart (SImode, operands[1]);
1465 operands[3] = gen_highpart (SImode, operands[0]);
1466 operands[0] = gen_lowpart (SImode, operands[0]);
1467 operands[1] = gen_lowpart (SImode, operands[1]);
1471 (define_insn "anddi3"
1472 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1473 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1474 (match_operand:DI 2 "s_register_operand" "r,r")))]
1475 "TARGET_ARM && ! TARGET_IWMMXT"
1477 [(set_attr "length" "8")]
1480 (define_insn_and_split "*anddi_zesidi_di"
1481 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1482 (and:DI (zero_extend:DI
1483 (match_operand:SI 2 "s_register_operand" "r,r"))
1484 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1487 "TARGET_ARM && reload_completed"
1488 ; The zero extend of operand 2 clears the high word of the output
1490 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1491 (set (match_dup 3) (const_int 0))]
1494 operands[3] = gen_highpart (SImode, operands[0]);
1495 operands[0] = gen_lowpart (SImode, operands[0]);
1496 operands[1] = gen_lowpart (SImode, operands[1]);
1498 [(set_attr "length" "8")]
1501 (define_insn "*anddi_sesdi_di"
1502 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1503 (and:DI (sign_extend:DI
1504 (match_operand:SI 2 "s_register_operand" "r,r"))
1505 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1508 [(set_attr "length" "8")]
1511 (define_expand "andsi3"
1512 [(set (match_operand:SI 0 "s_register_operand" "")
1513 (and:SI (match_operand:SI 1 "s_register_operand" "")
1514 (match_operand:SI 2 "reg_or_int_operand" "")))]
1519 if (GET_CODE (operands[2]) == CONST_INT)
1521 arm_split_constant (AND, SImode, NULL_RTX,
1522 INTVAL (operands[2]), operands[0],
1523 operands[1], optimize && !no_new_pseudos);
1528 else /* TARGET_THUMB */
1530 if (GET_CODE (operands[2]) != CONST_INT)
1531 operands[2] = force_reg (SImode, operands[2]);
1536 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1538 operands[2] = force_reg (SImode,
1539 GEN_INT (~INTVAL (operands[2])));
1541 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1546 for (i = 9; i <= 31; i++)
1548 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1550 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1554 else if ((((HOST_WIDE_INT) 1) << i) - 1
1555 == ~INTVAL (operands[2]))
1557 rtx shift = GEN_INT (i);
1558 rtx reg = gen_reg_rtx (SImode);
1560 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1561 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1567 operands[2] = force_reg (SImode, operands[2]);
1573 (define_insn_and_split "*arm_andsi3_insn"
1574 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1575 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1576 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1580 bic%?\\t%0, %1, #%B2
1583 && GET_CODE (operands[2]) == CONST_INT
1584 && !(const_ok_for_arm (INTVAL (operands[2]))
1585 || const_ok_for_arm (~INTVAL (operands[2])))"
1586 [(clobber (const_int 0))]
1588 arm_split_constant (AND, SImode, curr_insn,
1589 INTVAL (operands[2]), operands[0], operands[1], 0);
1592 [(set_attr "length" "4,4,16")
1593 (set_attr "predicable" "yes")]
1596 (define_insn "*thumb_andsi3_insn"
1597 [(set (match_operand:SI 0 "register_operand" "=l")
1598 (and:SI (match_operand:SI 1 "register_operand" "%0")
1599 (match_operand:SI 2 "register_operand" "l")))]
1602 [(set_attr "length" "2")]
1605 (define_insn "*andsi3_compare0"
1606 [(set (reg:CC_NOOV CC_REGNUM)
1608 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1609 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1611 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1612 (and:SI (match_dup 1) (match_dup 2)))]
1616 bic%?s\\t%0, %1, #%B2"
1617 [(set_attr "conds" "set")]
1620 (define_insn "*andsi3_compare0_scratch"
1621 [(set (reg:CC_NOOV CC_REGNUM)
1623 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1624 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1626 (clobber (match_scratch:SI 2 "=X,r"))]
1630 bic%?s\\t%2, %0, #%B1"
1631 [(set_attr "conds" "set")]
1634 (define_insn "*zeroextractsi_compare0_scratch"
1635 [(set (reg:CC_NOOV CC_REGNUM)
1636 (compare:CC_NOOV (zero_extract:SI
1637 (match_operand:SI 0 "s_register_operand" "r")
1638 (match_operand 1 "const_int_operand" "n")
1639 (match_operand 2 "const_int_operand" "n"))
1642 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1643 && INTVAL (operands[1]) > 0
1644 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1645 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1647 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1648 << INTVAL (operands[2]));
1649 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1652 [(set_attr "conds" "set")]
1655 (define_insn_and_split "*ne_zeroextractsi"
1656 [(set (match_operand:SI 0 "s_register_operand" "=r")
1657 (ne:SI (zero_extract:SI
1658 (match_operand:SI 1 "s_register_operand" "r")
1659 (match_operand:SI 2 "const_int_operand" "n")
1660 (match_operand:SI 3 "const_int_operand" "n"))
1662 (clobber (reg:CC CC_REGNUM))]
1664 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1665 && INTVAL (operands[2]) > 0
1666 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1667 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1670 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1671 && INTVAL (operands[2]) > 0
1672 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1673 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1674 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1675 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1677 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1679 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1680 (match_dup 0) (const_int 1)))]
1682 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1683 << INTVAL (operands[3]));
1685 [(set_attr "conds" "clob")
1686 (set_attr "length" "8")]
1689 (define_insn_and_split "*ne_zeroextractsi_shifted"
1690 [(set (match_operand:SI 0 "s_register_operand" "=r")
1691 (ne:SI (zero_extract:SI
1692 (match_operand:SI 1 "s_register_operand" "r")
1693 (match_operand:SI 2 "const_int_operand" "n")
1696 (clobber (reg:CC CC_REGNUM))]
1700 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1701 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1703 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1705 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1706 (match_dup 0) (const_int 1)))]
1708 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1710 [(set_attr "conds" "clob")
1711 (set_attr "length" "8")]
1714 (define_insn_and_split "*ite_ne_zeroextractsi"
1715 [(set (match_operand:SI 0 "s_register_operand" "=r")
1716 (if_then_else:SI (ne (zero_extract:SI
1717 (match_operand:SI 1 "s_register_operand" "r")
1718 (match_operand:SI 2 "const_int_operand" "n")
1719 (match_operand:SI 3 "const_int_operand" "n"))
1721 (match_operand:SI 4 "arm_not_operand" "rIK")
1723 (clobber (reg:CC CC_REGNUM))]
1725 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1726 && INTVAL (operands[2]) > 0
1727 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1728 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1729 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1732 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1733 && INTVAL (operands[2]) > 0
1734 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1735 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1736 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1737 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1738 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1740 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1742 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1743 (match_dup 0) (match_dup 4)))]
1745 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1746 << INTVAL (operands[3]));
1748 [(set_attr "conds" "clob")
1749 (set_attr "length" "8")]
1752 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
1753 [(set (match_operand:SI 0 "s_register_operand" "=r")
1754 (if_then_else:SI (ne (zero_extract:SI
1755 (match_operand:SI 1 "s_register_operand" "r")
1756 (match_operand:SI 2 "const_int_operand" "n")
1759 (match_operand:SI 3 "arm_not_operand" "rIK")
1761 (clobber (reg:CC CC_REGNUM))]
1762 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1764 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1765 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1766 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1768 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1770 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1771 (match_dup 0) (match_dup 3)))]
1773 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1775 [(set_attr "conds" "clob")
1776 (set_attr "length" "8")]
1780 [(set (match_operand:SI 0 "s_register_operand" "")
1781 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
1782 (match_operand:SI 2 "const_int_operand" "")
1783 (match_operand:SI 3 "const_int_operand" "")))
1784 (clobber (match_operand:SI 4 "s_register_operand" ""))]
1786 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
1787 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
1789 HOST_WIDE_INT temp = INTVAL (operands[2]);
1791 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
1792 operands[3] = GEN_INT (32 - temp);
1797 [(set (match_operand:SI 0 "s_register_operand" "")
1798 (match_operator:SI 1 "shiftable_operator"
1799 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
1800 (match_operand:SI 3 "const_int_operand" "")
1801 (match_operand:SI 4 "const_int_operand" ""))
1802 (match_operand:SI 5 "s_register_operand" "")]))
1803 (clobber (match_operand:SI 6 "s_register_operand" ""))]
1805 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
1808 [(lshiftrt:SI (match_dup 6) (match_dup 4))
1811 HOST_WIDE_INT temp = INTVAL (operands[3]);
1813 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
1814 operands[4] = GEN_INT (32 - temp);
1819 [(set (match_operand:SI 0 "s_register_operand" "")
1820 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
1821 (match_operand:SI 2 "const_int_operand" "")
1822 (match_operand:SI 3 "const_int_operand" "")))]
1824 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
1825 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
1827 HOST_WIDE_INT temp = INTVAL (operands[2]);
1829 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
1830 operands[3] = GEN_INT (32 - temp);
1835 [(set (match_operand:SI 0 "s_register_operand" "")
1836 (match_operator:SI 1 "shiftable_operator"
1837 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
1838 (match_operand:SI 3 "const_int_operand" "")
1839 (match_operand:SI 4 "const_int_operand" ""))
1840 (match_operand:SI 5 "s_register_operand" "")]))
1841 (clobber (match_operand:SI 6 "s_register_operand" ""))]
1843 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
1846 [(ashiftrt:SI (match_dup 6) (match_dup 4))
1849 HOST_WIDE_INT temp = INTVAL (operands[3]);
1851 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
1852 operands[4] = GEN_INT (32 - temp);
1856 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
1857 ;;; represented by the bitfield, then this will produce incorrect results.
1858 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
1859 ;;; which have a real bit-field insert instruction, the truncation happens
1860 ;;; in the bit-field insert instruction itself. Since arm does not have a
1861 ;;; bit-field insert instruction, we would have to emit code here to truncate
1862 ;;; the value before we insert. This loses some of the advantage of having
1863 ;;; this insv pattern, so this pattern needs to be reevalutated.
1865 (define_expand "insv"
1866 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
1867 (match_operand:SI 1 "general_operand" "")
1868 (match_operand:SI 2 "general_operand" ""))
1869 (match_operand:SI 3 "reg_or_int_operand" ""))]
1873 int start_bit = INTVAL (operands[2]);
1874 int width = INTVAL (operands[1]);
1875 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
1876 rtx target, subtarget;
1878 target = operands[0];
1879 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
1880 subreg as the final target. */
1881 if (GET_CODE (target) == SUBREG)
1883 subtarget = gen_reg_rtx (SImode);
1884 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
1885 < GET_MODE_SIZE (SImode))
1886 target = SUBREG_REG (target);
1891 if (GET_CODE (operands[3]) == CONST_INT)
1893 /* Since we are inserting a known constant, we may be able to
1894 reduce the number of bits that we have to clear so that
1895 the mask becomes simple. */
1896 /* ??? This code does not check to see if the new mask is actually
1897 simpler. It may not be. */
1898 rtx op1 = gen_reg_rtx (SImode);
1899 /* ??? Truncate operand3 to fit in the bitfield. See comment before
1900 start of this pattern. */
1901 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
1902 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
1904 emit_insn (gen_andsi3 (op1, operands[0],
1905 gen_int_mode (~mask2, SImode)));
1906 emit_insn (gen_iorsi3 (subtarget, op1,
1907 gen_int_mode (op3_value << start_bit, SImode)));
1909 else if (start_bit == 0
1910 && !(const_ok_for_arm (mask)
1911 || const_ok_for_arm (~mask)))
1913 /* A Trick, since we are setting the bottom bits in the word,
1914 we can shift operand[3] up, operand[0] down, OR them together
1915 and rotate the result back again. This takes 3 insns, and
1916 the third might be mergeable into another op. */
1917 /* The shift up copes with the possibility that operand[3] is
1918 wider than the bitfield. */
1919 rtx op0 = gen_reg_rtx (SImode);
1920 rtx op1 = gen_reg_rtx (SImode);
1922 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
1923 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
1924 emit_insn (gen_iorsi3 (op1, op1, op0));
1925 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
1927 else if ((width + start_bit == 32)
1928 && !(const_ok_for_arm (mask)
1929 || const_ok_for_arm (~mask)))
1931 /* Similar trick, but slightly less efficient. */
1933 rtx op0 = gen_reg_rtx (SImode);
1934 rtx op1 = gen_reg_rtx (SImode);
1936 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
1937 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
1938 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
1939 emit_insn (gen_iorsi3 (subtarget, op1, op0));
1943 rtx op0 = gen_int_mode (mask, SImode);
1944 rtx op1 = gen_reg_rtx (SImode);
1945 rtx op2 = gen_reg_rtx (SImode);
1947 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
1949 rtx tmp = gen_reg_rtx (SImode);
1951 emit_insn (gen_movsi (tmp, op0));
1955 /* Mask out any bits in operand[3] that are not needed. */
1956 emit_insn (gen_andsi3 (op1, operands[3], op0));
1958 if (GET_CODE (op0) == CONST_INT
1959 && (const_ok_for_arm (mask << start_bit)
1960 || const_ok_for_arm (~(mask << start_bit))))
1962 op0 = gen_int_mode (~(mask << start_bit), SImode);
1963 emit_insn (gen_andsi3 (op2, operands[0], op0));
1967 if (GET_CODE (op0) == CONST_INT)
1969 rtx tmp = gen_reg_rtx (SImode);
1971 emit_insn (gen_movsi (tmp, op0));
1976 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
1978 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
1982 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
1984 emit_insn (gen_iorsi3 (subtarget, op1, op2));
1987 if (subtarget != target)
1989 /* If TARGET is still a SUBREG, then it must be wider than a word,
1990 so we must be careful only to set the subword we were asked to. */
1991 if (GET_CODE (target) == SUBREG)
1992 emit_move_insn (target, subtarget);
1994 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2001 ; constants for op 2 will never be given to these patterns.
2002 (define_insn_and_split "*anddi_notdi_di"
2003 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2004 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2005 (match_operand:DI 2 "s_register_operand" "0,r")))]
2008 "TARGET_ARM && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2009 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2010 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2013 operands[3] = gen_highpart (SImode, operands[0]);
2014 operands[0] = gen_lowpart (SImode, operands[0]);
2015 operands[4] = gen_highpart (SImode, operands[1]);
2016 operands[1] = gen_lowpart (SImode, operands[1]);
2017 operands[5] = gen_highpart (SImode, operands[2]);
2018 operands[2] = gen_lowpart (SImode, operands[2]);
2020 [(set_attr "length" "8")
2021 (set_attr "predicable" "yes")]
2024 (define_insn_and_split "*anddi_notzesidi_di"
2025 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2026 (and:DI (not:DI (zero_extend:DI
2027 (match_operand:SI 2 "s_register_operand" "r,r")))
2028 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2031 bic%?\\t%Q0, %Q1, %2
2033 ; (not (zero_extend ...)) allows us to just copy the high word from
2034 ; operand1 to operand0.
2037 && operands[0] != operands[1]"
2038 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2039 (set (match_dup 3) (match_dup 4))]
2042 operands[3] = gen_highpart (SImode, operands[0]);
2043 operands[0] = gen_lowpart (SImode, operands[0]);
2044 operands[4] = gen_highpart (SImode, operands[1]);
2045 operands[1] = gen_lowpart (SImode, operands[1]);
2047 [(set_attr "length" "4,8")
2048 (set_attr "predicable" "yes")]
2051 (define_insn_and_split "*anddi_notsesidi_di"
2052 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2053 (and:DI (not:DI (sign_extend:DI
2054 (match_operand:SI 2 "s_register_operand" "r,r")))
2055 (match_operand:DI 1 "s_register_operand" "0,r")))]
2058 "TARGET_ARM && reload_completed"
2059 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2060 (set (match_dup 3) (and:SI (not:SI
2061 (ashiftrt:SI (match_dup 2) (const_int 31)))
2065 operands[3] = gen_highpart (SImode, operands[0]);
2066 operands[0] = gen_lowpart (SImode, operands[0]);
2067 operands[4] = gen_highpart (SImode, operands[1]);
2068 operands[1] = gen_lowpart (SImode, operands[1]);
2070 [(set_attr "length" "8")
2071 (set_attr "predicable" "yes")]
2074 (define_insn "andsi_notsi_si"
2075 [(set (match_operand:SI 0 "s_register_operand" "=r")
2076 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2077 (match_operand:SI 1 "s_register_operand" "r")))]
2079 "bic%?\\t%0, %1, %2"
2080 [(set_attr "predicable" "yes")]
2083 (define_insn "bicsi3"
2084 [(set (match_operand:SI 0 "register_operand" "=l")
2085 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2086 (match_operand:SI 2 "register_operand" "0")))]
2089 [(set_attr "length" "2")]
2092 (define_insn "andsi_not_shiftsi_si"
2093 [(set (match_operand:SI 0 "s_register_operand" "=r")
2094 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2095 [(match_operand:SI 2 "s_register_operand" "r")
2096 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2097 (match_operand:SI 1 "s_register_operand" "r")))]
2099 "bic%?\\t%0, %1, %2%S4"
2100 [(set_attr "predicable" "yes")
2101 (set_attr "shift" "2")
2102 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2103 (const_string "alu_shift")
2104 (const_string "alu_shift_reg")))]
2107 (define_insn "*andsi_notsi_si_compare0"
2108 [(set (reg:CC_NOOV CC_REGNUM)
2110 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2111 (match_operand:SI 1 "s_register_operand" "r"))
2113 (set (match_operand:SI 0 "s_register_operand" "=r")
2114 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2116 "bic%?s\\t%0, %1, %2"
2117 [(set_attr "conds" "set")]
2120 (define_insn "*andsi_notsi_si_compare0_scratch"
2121 [(set (reg:CC_NOOV CC_REGNUM)
2123 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2124 (match_operand:SI 1 "s_register_operand" "r"))
2126 (clobber (match_scratch:SI 0 "=r"))]
2128 "bic%?s\\t%0, %1, %2"
2129 [(set_attr "conds" "set")]
2132 (define_insn "iordi3"
2133 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2134 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2135 (match_operand:DI 2 "s_register_operand" "r,r")))]
2136 "TARGET_ARM && ! TARGET_IWMMXT"
2138 [(set_attr "length" "8")
2139 (set_attr "predicable" "yes")]
2142 (define_insn "*iordi_zesidi_di"
2143 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2144 (ior:DI (zero_extend:DI
2145 (match_operand:SI 2 "s_register_operand" "r,r"))
2146 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2149 orr%?\\t%Q0, %Q1, %2
2151 [(set_attr "length" "4,8")
2152 (set_attr "predicable" "yes")]
2155 (define_insn "*iordi_sesidi_di"
2156 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2157 (ior:DI (sign_extend:DI
2158 (match_operand:SI 2 "s_register_operand" "r,r"))
2159 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2162 [(set_attr "length" "8")
2163 (set_attr "predicable" "yes")]
2166 (define_expand "iorsi3"
2167 [(set (match_operand:SI 0 "s_register_operand" "")
2168 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2169 (match_operand:SI 2 "reg_or_int_operand" "")))]
2172 if (GET_CODE (operands[2]) == CONST_INT)
2176 arm_split_constant (IOR, SImode, NULL_RTX,
2177 INTVAL (operands[2]), operands[0], operands[1],
2178 optimize && !no_new_pseudos);
2181 else /* TARGET_THUMB */
2182 operands [2] = force_reg (SImode, operands [2]);
2187 (define_insn_and_split "*arm_iorsi3"
2188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2189 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2190 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2196 && GET_CODE (operands[2]) == CONST_INT
2197 && !const_ok_for_arm (INTVAL (operands[2]))"
2198 [(clobber (const_int 0))]
2200 arm_split_constant (IOR, SImode, curr_insn,
2201 INTVAL (operands[2]), operands[0], operands[1], 0);
2204 [(set_attr "length" "4,16")
2205 (set_attr "predicable" "yes")]
2208 (define_insn "*thumb_iorsi3"
2209 [(set (match_operand:SI 0 "register_operand" "=l")
2210 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2211 (match_operand:SI 2 "register_operand" "l")))]
2214 [(set_attr "length" "2")]
2218 [(match_scratch:SI 3 "r")
2219 (set (match_operand:SI 0 "arm_general_register_operand" "")
2220 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2221 (match_operand:SI 2 "const_int_operand" "")))]
2223 && !const_ok_for_arm (INTVAL (operands[2]))
2224 && const_ok_for_arm (~INTVAL (operands[2]))"
2225 [(set (match_dup 3) (match_dup 2))
2226 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2230 (define_insn "*iorsi3_compare0"
2231 [(set (reg:CC_NOOV CC_REGNUM)
2232 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2233 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2235 (set (match_operand:SI 0 "s_register_operand" "=r")
2236 (ior:SI (match_dup 1) (match_dup 2)))]
2238 "orr%?s\\t%0, %1, %2"
2239 [(set_attr "conds" "set")]
2242 (define_insn "*iorsi3_compare0_scratch"
2243 [(set (reg:CC_NOOV CC_REGNUM)
2244 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2245 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2247 (clobber (match_scratch:SI 0 "=r"))]
2249 "orr%?s\\t%0, %1, %2"
2250 [(set_attr "conds" "set")]
2253 (define_insn "xordi3"
2254 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2255 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2256 (match_operand:DI 2 "s_register_operand" "r,r")))]
2257 "TARGET_ARM && !TARGET_IWMMXT"
2259 [(set_attr "length" "8")
2260 (set_attr "predicable" "yes")]
2263 (define_insn "*xordi_zesidi_di"
2264 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2265 (xor:DI (zero_extend:DI
2266 (match_operand:SI 2 "s_register_operand" "r,r"))
2267 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2270 eor%?\\t%Q0, %Q1, %2
2272 [(set_attr "length" "4,8")
2273 (set_attr "predicable" "yes")]
2276 (define_insn "*xordi_sesidi_di"
2277 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2278 (xor:DI (sign_extend:DI
2279 (match_operand:SI 2 "s_register_operand" "r,r"))
2280 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2283 [(set_attr "length" "8")
2284 (set_attr "predicable" "yes")]
2287 (define_expand "xorsi3"
2288 [(set (match_operand:SI 0 "s_register_operand" "")
2289 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2290 (match_operand:SI 2 "arm_rhs_operand" "")))]
2293 if (GET_CODE (operands[2]) == CONST_INT)
2294 operands[2] = force_reg (SImode, operands[2]);
2298 (define_insn "*arm_xorsi3"
2299 [(set (match_operand:SI 0 "s_register_operand" "=r")
2300 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2301 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2303 "eor%?\\t%0, %1, %2"
2304 [(set_attr "predicable" "yes")]
2307 (define_insn "*thumb_xorsi3"
2308 [(set (match_operand:SI 0 "register_operand" "=l")
2309 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2310 (match_operand:SI 2 "register_operand" "l")))]
2313 [(set_attr "length" "2")]
2316 (define_insn "*xorsi3_compare0"
2317 [(set (reg:CC_NOOV CC_REGNUM)
2318 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2319 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2321 (set (match_operand:SI 0 "s_register_operand" "=r")
2322 (xor:SI (match_dup 1) (match_dup 2)))]
2324 "eor%?s\\t%0, %1, %2"
2325 [(set_attr "conds" "set")]
2328 (define_insn "*xorsi3_compare0_scratch"
2329 [(set (reg:CC_NOOV CC_REGNUM)
2330 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2331 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2335 [(set_attr "conds" "set")]
2338 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2339 ; (NOT D) we can sometimes merge the final NOT into one of the following
2343 [(set (match_operand:SI 0 "s_register_operand" "")
2344 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2345 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2346 (match_operand:SI 3 "arm_rhs_operand" "")))
2347 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2349 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2350 (not:SI (match_dup 3))))
2351 (set (match_dup 0) (not:SI (match_dup 4)))]
2355 (define_insn "*andsi_iorsi3_notsi"
2356 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2357 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2358 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2359 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2361 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2362 [(set_attr "length" "8")
2363 (set_attr "predicable" "yes")]
2367 [(set (match_operand:SI 0 "s_register_operand" "")
2368 (match_operator:SI 1 "logical_binary_operator"
2369 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2370 (match_operand:SI 3 "const_int_operand" "")
2371 (match_operand:SI 4 "const_int_operand" ""))
2372 (match_operator:SI 9 "logical_binary_operator"
2373 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2374 (match_operand:SI 6 "const_int_operand" ""))
2375 (match_operand:SI 7 "s_register_operand" "")])]))
2376 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2378 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2379 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2382 [(ashift:SI (match_dup 2) (match_dup 4))
2386 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2389 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2393 [(set (match_operand:SI 0 "s_register_operand" "")
2394 (match_operator:SI 1 "logical_binary_operator"
2395 [(match_operator:SI 9 "logical_binary_operator"
2396 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2397 (match_operand:SI 6 "const_int_operand" ""))
2398 (match_operand:SI 7 "s_register_operand" "")])
2399 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2400 (match_operand:SI 3 "const_int_operand" "")
2401 (match_operand:SI 4 "const_int_operand" ""))]))
2402 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2404 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2405 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2408 [(ashift:SI (match_dup 2) (match_dup 4))
2412 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2415 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2419 [(set (match_operand:SI 0 "s_register_operand" "")
2420 (match_operator:SI 1 "logical_binary_operator"
2421 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2422 (match_operand:SI 3 "const_int_operand" "")
2423 (match_operand:SI 4 "const_int_operand" ""))
2424 (match_operator:SI 9 "logical_binary_operator"
2425 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2426 (match_operand:SI 6 "const_int_operand" ""))
2427 (match_operand:SI 7 "s_register_operand" "")])]))
2428 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2430 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2431 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2434 [(ashift:SI (match_dup 2) (match_dup 4))
2438 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2441 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2445 [(set (match_operand:SI 0 "s_register_operand" "")
2446 (match_operator:SI 1 "logical_binary_operator"
2447 [(match_operator:SI 9 "logical_binary_operator"
2448 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2449 (match_operand:SI 6 "const_int_operand" ""))
2450 (match_operand:SI 7 "s_register_operand" "")])
2451 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2452 (match_operand:SI 3 "const_int_operand" "")
2453 (match_operand:SI 4 "const_int_operand" ""))]))
2454 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2456 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2457 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2460 [(ashift:SI (match_dup 2) (match_dup 4))
2464 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2467 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2471 ;; Minimum and maximum insns
2473 (define_insn "smaxsi3"
2474 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2475 (smax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2476 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2477 (clobber (reg:CC CC_REGNUM))]
2480 cmp\\t%1, %2\;movlt\\t%0, %2
2481 cmp\\t%1, %2\;movge\\t%0, %1
2482 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2483 [(set_attr "conds" "clob")
2484 (set_attr "length" "8,8,12")]
2487 (define_insn "sminsi3"
2488 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2489 (smin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2490 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2491 (clobber (reg:CC CC_REGNUM))]
2494 cmp\\t%1, %2\;movge\\t%0, %2
2495 cmp\\t%1, %2\;movlt\\t%0, %1
2496 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2497 [(set_attr "conds" "clob")
2498 (set_attr "length" "8,8,12")]
2501 (define_insn "umaxsi3"
2502 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2503 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2504 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2505 (clobber (reg:CC CC_REGNUM))]
2508 cmp\\t%1, %2\;movcc\\t%0, %2
2509 cmp\\t%1, %2\;movcs\\t%0, %1
2510 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2511 [(set_attr "conds" "clob")
2512 (set_attr "length" "8,8,12")]
2515 (define_insn "uminsi3"
2516 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2517 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2518 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2519 (clobber (reg:CC CC_REGNUM))]
2522 cmp\\t%1, %2\;movcs\\t%0, %2
2523 cmp\\t%1, %2\;movcc\\t%0, %1
2524 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2525 [(set_attr "conds" "clob")
2526 (set_attr "length" "8,8,12")]
2529 (define_insn "*store_minmaxsi"
2530 [(set (match_operand:SI 0 "memory_operand" "=m")
2531 (match_operator:SI 3 "minmax_operator"
2532 [(match_operand:SI 1 "s_register_operand" "r")
2533 (match_operand:SI 2 "s_register_operand" "r")]))
2534 (clobber (reg:CC CC_REGNUM))]
2537 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2538 operands[1], operands[2]);
2539 output_asm_insn (\"cmp\\t%1, %2\", operands);
2540 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2541 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2544 [(set_attr "conds" "clob")
2545 (set_attr "length" "12")
2546 (set_attr "type" "store1")]
2549 ; Reject the frame pointer in operand[1], since reloading this after
2550 ; it has been eliminated can cause carnage.
2551 (define_insn "*minmax_arithsi"
2552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2553 (match_operator:SI 4 "shiftable_operator"
2554 [(match_operator:SI 5 "minmax_operator"
2555 [(match_operand:SI 2 "s_register_operand" "r,r")
2556 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2557 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2558 (clobber (reg:CC CC_REGNUM))]
2559 "TARGET_ARM && !arm_eliminable_register (operands[1])"
2562 enum rtx_code code = GET_CODE (operands[4]);
2564 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2565 operands[2], operands[3]);
2566 output_asm_insn (\"cmp\\t%2, %3\", operands);
2567 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
2568 if (which_alternative != 0 || operands[3] != const0_rtx
2569 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2570 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
2573 [(set_attr "conds" "clob")
2574 (set_attr "length" "12")]
2578 ;; Shift and rotation insns
2580 (define_expand "ashldi3"
2581 [(set (match_operand:DI 0 "s_register_operand" "")
2582 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
2583 (match_operand:SI 2 "reg_or_int_operand" "")))]
2586 if (GET_CODE (operands[2]) == CONST_INT)
2588 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2590 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
2593 /* Ideally we shouldn't fail here if we could know that operands[1]
2594 ends up already living in an iwmmxt register. Otherwise it's
2595 cheaper to have the alternate code being generated than moving
2596 values to iwmmxt regs and back. */
2599 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
2604 (define_insn "arm_ashldi3_1bit"
2605 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2606 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2608 (clobber (reg:CC CC_REGNUM))]
2610 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
2611 [(set_attr "conds" "clob")
2612 (set_attr "length" "8")]
2615 (define_expand "ashlsi3"
2616 [(set (match_operand:SI 0 "s_register_operand" "")
2617 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
2618 (match_operand:SI 2 "arm_rhs_operand" "")))]
2621 if (GET_CODE (operands[2]) == CONST_INT
2622 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2624 emit_insn (gen_movsi (operands[0], const0_rtx));
2630 (define_insn "*thumb_ashlsi3"
2631 [(set (match_operand:SI 0 "register_operand" "=l,l")
2632 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
2633 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2636 [(set_attr "length" "2")]
2639 (define_expand "ashrdi3"
2640 [(set (match_operand:DI 0 "s_register_operand" "")
2641 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
2642 (match_operand:SI 2 "reg_or_int_operand" "")))]
2645 if (GET_CODE (operands[2]) == CONST_INT)
2647 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2649 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
2652 /* Ideally we shouldn't fail here if we could know that operands[1]
2653 ends up already living in an iwmmxt register. Otherwise it's
2654 cheaper to have the alternate code being generated than moving
2655 values to iwmmxt regs and back. */
2658 else if (!TARGET_REALLY_IWMMXT)
2663 (define_insn "arm_ashrdi3_1bit"
2664 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2665 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2667 (clobber (reg:CC CC_REGNUM))]
2669 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
2670 [(set_attr "conds" "clob")
2671 (set_attr "length" "8")]
2674 (define_expand "ashrsi3"
2675 [(set (match_operand:SI 0 "s_register_operand" "")
2676 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
2677 (match_operand:SI 2 "arm_rhs_operand" "")))]
2680 if (GET_CODE (operands[2]) == CONST_INT
2681 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2682 operands[2] = GEN_INT (31);
2686 (define_insn "*thumb_ashrsi3"
2687 [(set (match_operand:SI 0 "register_operand" "=l,l")
2688 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
2689 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2692 [(set_attr "length" "2")]
2695 (define_expand "lshrdi3"
2696 [(set (match_operand:DI 0 "s_register_operand" "")
2697 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
2698 (match_operand:SI 2 "reg_or_int_operand" "")))]
2701 if (GET_CODE (operands[2]) == CONST_INT)
2703 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2705 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
2708 /* Ideally we shouldn't fail here if we could know that operands[1]
2709 ends up already living in an iwmmxt register. Otherwise it's
2710 cheaper to have the alternate code being generated than moving
2711 values to iwmmxt regs and back. */
2714 else if (!TARGET_REALLY_IWMMXT)
2719 (define_insn "arm_lshrdi3_1bit"
2720 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2721 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2723 (clobber (reg:CC CC_REGNUM))]
2725 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
2726 [(set_attr "conds" "clob")
2727 (set_attr "length" "8")]
2730 (define_expand "lshrsi3"
2731 [(set (match_operand:SI 0 "s_register_operand" "")
2732 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
2733 (match_operand:SI 2 "arm_rhs_operand" "")))]
2736 if (GET_CODE (operands[2]) == CONST_INT
2737 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2739 emit_insn (gen_movsi (operands[0], const0_rtx));
2745 (define_insn "*thumb_lshrsi3"
2746 [(set (match_operand:SI 0 "register_operand" "=l,l")
2747 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
2748 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2751 [(set_attr "length" "2")]
2754 (define_expand "rotlsi3"
2755 [(set (match_operand:SI 0 "s_register_operand" "")
2756 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
2757 (match_operand:SI 2 "reg_or_int_operand" "")))]
2760 if (GET_CODE (operands[2]) == CONST_INT)
2761 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
2764 rtx reg = gen_reg_rtx (SImode);
2765 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
2771 (define_expand "rotrsi3"
2772 [(set (match_operand:SI 0 "s_register_operand" "")
2773 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
2774 (match_operand:SI 2 "arm_rhs_operand" "")))]
2779 if (GET_CODE (operands[2]) == CONST_INT
2780 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2781 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
2783 else /* TARGET_THUMB */
2785 if (GET_CODE (operands [2]) == CONST_INT)
2786 operands [2] = force_reg (SImode, operands[2]);
2791 (define_insn "*thumb_rotrsi3"
2792 [(set (match_operand:SI 0 "register_operand" "=l")
2793 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
2794 (match_operand:SI 2 "register_operand" "l")))]
2797 [(set_attr "length" "2")]
2800 (define_insn "*arm_shiftsi3"
2801 [(set (match_operand:SI 0 "s_register_operand" "=r")
2802 (match_operator:SI 3 "shift_operator"
2803 [(match_operand:SI 1 "s_register_operand" "r")
2804 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
2807 [(set_attr "predicable" "yes")
2808 (set_attr "shift" "1")
2809 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2810 (const_string "alu_shift")
2811 (const_string "alu_shift_reg")))]
2814 (define_insn "*shiftsi3_compare0"
2815 [(set (reg:CC_NOOV CC_REGNUM)
2816 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
2817 [(match_operand:SI 1 "s_register_operand" "r")
2818 (match_operand:SI 2 "arm_rhs_operand" "rM")])
2820 (set (match_operand:SI 0 "s_register_operand" "=r")
2821 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
2823 "mov%?s\\t%0, %1%S3"
2824 [(set_attr "conds" "set")
2825 (set_attr "shift" "1")
2826 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2827 (const_string "alu_shift")
2828 (const_string "alu_shift_reg")))]
2831 (define_insn "*shiftsi3_compare0_scratch"
2832 [(set (reg:CC_NOOV CC_REGNUM)
2833 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
2834 [(match_operand:SI 1 "s_register_operand" "r")
2835 (match_operand:SI 2 "arm_rhs_operand" "rM")])
2837 (clobber (match_scratch:SI 0 "=r"))]
2839 "mov%?s\\t%0, %1%S3"
2840 [(set_attr "conds" "set")
2841 (set_attr "shift" "1")]
2844 (define_insn "*notsi_shiftsi"
2845 [(set (match_operand:SI 0 "s_register_operand" "=r")
2846 (not:SI (match_operator:SI 3 "shift_operator"
2847 [(match_operand:SI 1 "s_register_operand" "r")
2848 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
2851 [(set_attr "predicable" "yes")
2852 (set_attr "shift" "1")
2853 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2854 (const_string "alu_shift")
2855 (const_string "alu_shift_reg")))]
2858 (define_insn "*notsi_shiftsi_compare0"
2859 [(set (reg:CC_NOOV CC_REGNUM)
2860 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
2861 [(match_operand:SI 1 "s_register_operand" "r")
2862 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2864 (set (match_operand:SI 0 "s_register_operand" "=r")
2865 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
2867 "mvn%?s\\t%0, %1%S3"
2868 [(set_attr "conds" "set")
2869 (set_attr "shift" "1")
2870 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2871 (const_string "alu_shift")
2872 (const_string "alu_shift_reg")))]
2875 (define_insn "*not_shiftsi_compare0_scratch"
2876 [(set (reg:CC_NOOV CC_REGNUM)
2877 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
2878 [(match_operand:SI 1 "s_register_operand" "r")
2879 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2881 (clobber (match_scratch:SI 0 "=r"))]
2883 "mvn%?s\\t%0, %1%S3"
2884 [(set_attr "conds" "set")
2885 (set_attr "shift" "1")
2886 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2887 (const_string "alu_shift")
2888 (const_string "alu_shift_reg")))]
2891 ;; We don't really have extzv, but defining this using shifts helps
2892 ;; to reduce register pressure later on.
2894 (define_expand "extzv"
2896 (ashift:SI (match_operand:SI 1 "register_operand" "")
2897 (match_operand:SI 2 "const_int_operand" "")))
2898 (set (match_operand:SI 0 "register_operand" "")
2899 (lshiftrt:SI (match_dup 4)
2900 (match_operand:SI 3 "const_int_operand" "")))]
2904 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
2905 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
2907 operands[3] = GEN_INT (rshift);
2911 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
2915 operands[2] = GEN_INT (lshift);
2916 operands[4] = gen_reg_rtx (SImode);
2921 ;; Unary arithmetic insns
2923 (define_expand "negdi2"
2925 [(set (match_operand:DI 0 "s_register_operand" "")
2926 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
2927 (clobber (reg:CC CC_REGNUM))])]
2932 if (GET_CODE (operands[1]) != REG)
2933 operands[1] = force_reg (SImode, operands[1]);
2938 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
2939 ;; The second alternative is to allow the common case of a *full* overlap.
2940 (define_insn "*arm_negdi2"
2941 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2942 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
2943 (clobber (reg:CC CC_REGNUM))]
2945 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
2946 [(set_attr "conds" "clob")
2947 (set_attr "length" "8")]
2950 (define_insn "*thumb_negdi2"
2951 [(set (match_operand:DI 0 "register_operand" "=&l")
2952 (neg:DI (match_operand:DI 1 "register_operand" "l")))
2953 (clobber (reg:CC CC_REGNUM))]
2955 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
2956 [(set_attr "length" "6")]
2959 (define_expand "negsi2"
2960 [(set (match_operand:SI 0 "s_register_operand" "")
2961 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
2966 (define_insn "*arm_negsi2"
2967 [(set (match_operand:SI 0 "s_register_operand" "=r")
2968 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
2970 "rsb%?\\t%0, %1, #0"
2971 [(set_attr "predicable" "yes")]
2974 (define_insn "*thumb_negsi2"
2975 [(set (match_operand:SI 0 "register_operand" "=l")
2976 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
2979 [(set_attr "length" "2")]
2982 (define_expand "negsf2"
2983 [(set (match_operand:SF 0 "s_register_operand" "")
2984 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
2985 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
2989 (define_expand "negdf2"
2990 [(set (match_operand:DF 0 "s_register_operand" "")
2991 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
2992 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
2995 ;; abssi2 doesn't really clobber the condition codes if a different register
2996 ;; is being set. To keep things simple, assume during rtl manipulations that
2997 ;; it does, but tell the final scan operator the truth. Similarly for
3000 (define_expand "abssi2"
3002 [(set (match_operand:SI 0 "s_register_operand" "")
3003 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3004 (clobber (reg:CC CC_REGNUM))])]
3008 (define_insn "*arm_abssi2"
3009 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3010 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3011 (clobber (reg:CC CC_REGNUM))]
3014 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3015 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3016 [(set_attr "conds" "clob,*")
3017 (set_attr "shift" "1")
3018 ;; predicable can't be set based on the variant, so left as no
3019 (set_attr "length" "8")]
3022 (define_insn "*neg_abssi2"
3023 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3024 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3025 (clobber (reg:CC CC_REGNUM))]
3028 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3029 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3030 [(set_attr "conds" "clob,*")
3031 (set_attr "shift" "1")
3032 ;; predicable can't be set based on the variant, so left as no
3033 (set_attr "length" "8")]
3036 (define_expand "abssf2"
3037 [(set (match_operand:SF 0 "s_register_operand" "")
3038 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3039 "TARGET_ARM && TARGET_HARD_FLOAT"
3042 (define_expand "absdf2"
3043 [(set (match_operand:DF 0 "s_register_operand" "")
3044 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3045 "TARGET_ARM && TARGET_HARD_FLOAT"
3048 (define_expand "sqrtsf2"
3049 [(set (match_operand:SF 0 "s_register_operand" "")
3050 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3051 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3054 (define_expand "sqrtdf2"
3055 [(set (match_operand:DF 0 "s_register_operand" "")
3056 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3057 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3060 (define_insn_and_split "one_cmpldi2"
3061 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3062 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3065 "TARGET_ARM && reload_completed"
3066 [(set (match_dup 0) (not:SI (match_dup 1)))
3067 (set (match_dup 2) (not:SI (match_dup 3)))]
3070 operands[2] = gen_highpart (SImode, operands[0]);
3071 operands[0] = gen_lowpart (SImode, operands[0]);
3072 operands[3] = gen_highpart (SImode, operands[1]);
3073 operands[1] = gen_lowpart (SImode, operands[1]);
3075 [(set_attr "length" "8")
3076 (set_attr "predicable" "yes")]
3079 (define_expand "one_cmplsi2"
3080 [(set (match_operand:SI 0 "s_register_operand" "")
3081 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3086 (define_insn "*arm_one_cmplsi2"
3087 [(set (match_operand:SI 0 "s_register_operand" "=r")
3088 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3091 [(set_attr "predicable" "yes")]
3094 (define_insn "*thumb_one_cmplsi2"
3095 [(set (match_operand:SI 0 "register_operand" "=l")
3096 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3099 [(set_attr "length" "2")]
3102 (define_insn "*notsi_compare0"
3103 [(set (reg:CC_NOOV CC_REGNUM)
3104 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3106 (set (match_operand:SI 0 "s_register_operand" "=r")
3107 (not:SI (match_dup 1)))]
3110 [(set_attr "conds" "set")]
3113 (define_insn "*notsi_compare0_scratch"
3114 [(set (reg:CC_NOOV CC_REGNUM)
3115 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3117 (clobber (match_scratch:SI 0 "=r"))]
3120 [(set_attr "conds" "set")]
3123 ;; Fixed <--> Floating conversion insns
3125 (define_expand "floatsisf2"
3126 [(set (match_operand:SF 0 "s_register_operand" "")
3127 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3128 "TARGET_ARM && TARGET_HARD_FLOAT"
3130 if (TARGET_MAVERICK)
3132 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3137 (define_expand "floatsidf2"
3138 [(set (match_operand:DF 0 "s_register_operand" "")
3139 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3140 "TARGET_ARM && TARGET_HARD_FLOAT"
3142 if (TARGET_MAVERICK)
3144 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3149 (define_expand "fix_truncsfsi2"
3150 [(set (match_operand:SI 0 "s_register_operand" "")
3151 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3152 "TARGET_ARM && TARGET_HARD_FLOAT"
3154 if (TARGET_MAVERICK)
3156 if (!cirrus_fp_register (operands[0], SImode))
3157 operands[0] = force_reg (SImode, operands[0]);
3158 if (!cirrus_fp_register (operands[1], SFmode))
3159 operands[1] = force_reg (SFmode, operands[0]);
3160 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3165 (define_expand "fix_truncdfsi2"
3166 [(set (match_operand:SI 0 "s_register_operand" "")
3167 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3168 "TARGET_ARM && TARGET_HARD_FLOAT"
3170 if (TARGET_MAVERICK)
3172 if (!cirrus_fp_register (operands[1], DFmode))
3173 operands[1] = force_reg (DFmode, operands[0]);
3174 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3181 (define_expand "truncdfsf2"
3182 [(set (match_operand:SF 0 "s_register_operand" "")
3184 (match_operand:DF 1 "s_register_operand" "")))]
3185 "TARGET_ARM && TARGET_HARD_FLOAT"
3189 ;; Zero and sign extension instructions.
3191 (define_insn "zero_extendsidi2"
3192 [(set (match_operand:DI 0 "s_register_operand" "=r")
3193 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3196 if (REGNO (operands[1])
3197 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3198 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3199 return \"mov%?\\t%R0, #0\";
3201 [(set_attr "length" "8")
3202 (set_attr "predicable" "yes")]
3205 (define_insn "zero_extendqidi2"
3206 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3207 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3210 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3211 ldr%?b\\t%Q0, %1\;mov%?\\t%R0, #0"
3212 [(set_attr "length" "8")
3213 (set_attr "predicable" "yes")
3214 (set_attr "type" "*,load_byte")
3215 (set_attr "pool_range" "*,4092")
3216 (set_attr "neg_pool_range" "*,4084")]
3219 (define_insn "extendsidi2"
3220 [(set (match_operand:DI 0 "s_register_operand" "=r")
3221 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3224 if (REGNO (operands[1])
3225 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3226 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3227 return \"mov%?\\t%R0, %Q0, asr #31\";
3229 [(set_attr "length" "8")
3230 (set_attr "shift" "1")
3231 (set_attr "predicable" "yes")]
3234 (define_expand "zero_extendhisi2"
3236 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3238 (set (match_operand:SI 0 "s_register_operand" "")
3239 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3243 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
3245 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3246 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3250 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3252 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3256 if (!s_register_operand (operands[1], HImode))
3257 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3261 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3262 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3266 operands[1] = gen_lowpart (SImode, operands[1]);
3267 operands[2] = gen_reg_rtx (SImode);
3271 (define_insn "*thumb_zero_extendhisi2"
3272 [(set (match_operand:SI 0 "register_operand" "=l")
3273 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3274 "TARGET_THUMB && !arm_arch6"
3276 rtx mem = XEXP (operands[1], 0);
3278 if (GET_CODE (mem) == CONST)
3279 mem = XEXP (mem, 0);
3281 if (GET_CODE (mem) == LABEL_REF)
3282 return \"ldr\\t%0, %1\";
3284 if (GET_CODE (mem) == PLUS)
3286 rtx a = XEXP (mem, 0);
3287 rtx b = XEXP (mem, 1);
3289 /* This can happen due to bugs in reload. */
3290 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3293 ops[0] = operands[0];
3296 output_asm_insn (\"mov %0, %1\", ops);
3298 XEXP (mem, 0) = operands[0];
3301 else if ( GET_CODE (a) == LABEL_REF
3302 && GET_CODE (b) == CONST_INT)
3303 return \"ldr\\t%0, %1\";
3306 return \"ldrh\\t%0, %1\";
3308 [(set_attr "length" "4")
3309 (set_attr "type" "load_byte")
3310 (set_attr "pool_range" "60")]
3313 (define_insn "*thumb_zero_extendhisi2_v6"
3314 [(set (match_operand:SI 0 "register_operand" "=l,l")
3315 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3316 "TARGET_THUMB && arm_arch6"
3320 if (which_alternative == 0)
3321 return \"uxth\\t%0, %1\";
3323 mem = XEXP (operands[1], 0);
3325 if (GET_CODE (mem) == CONST)
3326 mem = XEXP (mem, 0);
3328 if (GET_CODE (mem) == LABEL_REF)
3329 return \"ldr\\t%0, %1\";
3331 if (GET_CODE (mem) == PLUS)
3333 rtx a = XEXP (mem, 0);
3334 rtx b = XEXP (mem, 1);
3336 /* This can happen due to bugs in reload. */
3337 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3340 ops[0] = operands[0];
3343 output_asm_insn (\"mov %0, %1\", ops);
3345 XEXP (mem, 0) = operands[0];
3348 else if ( GET_CODE (a) == LABEL_REF
3349 && GET_CODE (b) == CONST_INT)
3350 return \"ldr\\t%0, %1\";
3353 return \"ldrh\\t%0, %1\";
3355 [(set_attr "length" "2,4")
3356 (set_attr "type" "alu_shift,load_byte")
3357 (set_attr "pool_range" "*,60")]
3360 (define_insn "*arm_zero_extendhisi2"
3361 [(set (match_operand:SI 0 "s_register_operand" "=r")
3362 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3363 "TARGET_ARM && arm_arch4 && !arm_arch6"
3365 [(set_attr "type" "load_byte")
3366 (set_attr "predicable" "yes")
3367 (set_attr "pool_range" "256")
3368 (set_attr "neg_pool_range" "244")]
3371 (define_insn "*arm_zero_extendhisi2_v6"
3372 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3373 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3374 "TARGET_ARM && arm_arch6"
3378 [(set_attr "type" "alu_shift,load_byte")
3379 (set_attr "predicable" "yes")
3380 (set_attr "pool_range" "*,256")
3381 (set_attr "neg_pool_range" "*,244")]
3384 (define_insn "*arm_zero_extendhisi2addsi"
3385 [(set (match_operand:SI 0 "s_register_operand" "=r")
3386 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3387 (match_operand:SI 2 "s_register_operand" "r")))]
3388 "TARGET_ARM && arm_arch6"
3389 "uxtah%?\\t%0, %2, %1"
3390 [(set_attr "type" "alu_shift")
3391 (set_attr "predicable" "yes")]
3394 (define_expand "zero_extendqisi2"
3395 [(set (match_operand:SI 0 "s_register_operand" "")
3396 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3399 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3403 emit_insn (gen_andsi3 (operands[0],
3404 gen_lowpart (SImode, operands[1]),
3407 else /* TARGET_THUMB */
3409 rtx temp = gen_reg_rtx (SImode);
3412 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3413 operands[1] = gen_lowpart (SImode, operands[1]);
3416 ops[1] = operands[1];
3417 ops[2] = GEN_INT (24);
3419 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3420 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3422 ops[0] = operands[0];
3424 ops[2] = GEN_INT (24);
3426 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3427 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3434 (define_insn "*thumb_zero_extendqisi2"
3435 [(set (match_operand:SI 0 "register_operand" "=l")
3436 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3437 "TARGET_THUMB && !arm_arch6"
3439 [(set_attr "length" "2")
3440 (set_attr "type" "load_byte")
3441 (set_attr "pool_range" "32")]
3444 (define_insn "*thumb_zero_extendqisi2_v6"
3445 [(set (match_operand:SI 0 "register_operand" "=l,l")
3446 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3447 "TARGET_THUMB && arm_arch6"
3451 [(set_attr "length" "2,2")
3452 (set_attr "type" "alu_shift,load_byte")
3453 (set_attr "pool_range" "*,32")]
3456 (define_insn "*arm_zero_extendqisi2"
3457 [(set (match_operand:SI 0 "s_register_operand" "=r")
3458 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3459 "TARGET_ARM && !arm_arch6"
3460 "ldr%?b\\t%0, %1\\t%@ zero_extendqisi2"
3461 [(set_attr "type" "load_byte")
3462 (set_attr "predicable" "yes")
3463 (set_attr "pool_range" "4096")
3464 (set_attr "neg_pool_range" "4084")]
3467 (define_insn "*arm_zero_extendqisi2_v6"
3468 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3469 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3470 "TARGET_ARM && arm_arch6"
3473 ldr%?b\\t%0, %1\\t%@ zero_extendqisi2"
3474 [(set_attr "type" "alu_shift,load_byte")
3475 (set_attr "predicable" "yes")
3476 (set_attr "pool_range" "*,4096")
3477 (set_attr "neg_pool_range" "*,4084")]
3480 (define_insn "*arm_zero_extendqisi2addsi"
3481 [(set (match_operand:SI 0 "s_register_operand" "=r")
3482 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
3483 (match_operand:SI 2 "s_register_operand" "r")))]
3484 "TARGET_ARM && arm_arch6"
3485 "uxtab%?\\t%0, %2, %1"
3486 [(set_attr "predicable" "yes")
3487 (set_attr "type" "alu_shift")]
3491 [(set (match_operand:SI 0 "s_register_operand" "")
3492 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
3493 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3494 "TARGET_ARM && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
3495 [(set (match_dup 2) (match_dup 1))
3496 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3500 (define_insn "*compareqi_eq0"
3501 [(set (reg:CC_Z CC_REGNUM)
3502 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
3506 [(set_attr "conds" "set")]
3509 (define_expand "extendhisi2"
3511 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3513 (set (match_operand:SI 0 "s_register_operand" "")
3514 (ashiftrt:SI (match_dup 2)
3519 if (GET_CODE (operands[1]) == MEM)
3523 emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1]));
3528 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3529 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3534 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3536 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
3540 if (!s_register_operand (operands[1], HImode))
3541 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3546 emit_insn (gen_thumb_extendhisi2 (operands[0], operands[1]));
3548 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3549 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3554 operands[1] = gen_lowpart (SImode, operands[1]);
3555 operands[2] = gen_reg_rtx (SImode);
3559 (define_insn "thumb_extendhisi2"
3560 [(set (match_operand:SI 0 "register_operand" "=l")
3561 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
3562 (clobber (match_scratch:SI 2 "=&l"))]
3563 "TARGET_THUMB && !arm_arch6"
3567 rtx mem = XEXP (operands[1], 0);
3569 /* This code used to try to use 'V', and fix the address only if it was
3570 offsettable, but this fails for e.g. REG+48 because 48 is outside the
3571 range of QImode offsets, and offsettable_address_p does a QImode
3574 if (GET_CODE (mem) == CONST)
3575 mem = XEXP (mem, 0);
3577 if (GET_CODE (mem) == LABEL_REF)
3578 return \"ldr\\t%0, %1\";
3580 if (GET_CODE (mem) == PLUS)
3582 rtx a = XEXP (mem, 0);
3583 rtx b = XEXP (mem, 1);
3585 if (GET_CODE (a) == LABEL_REF
3586 && GET_CODE (b) == CONST_INT)
3587 return \"ldr\\t%0, %1\";
3589 if (GET_CODE (b) == REG)
3590 return \"ldrsh\\t%0, %1\";
3598 ops[2] = const0_rtx;
3601 gcc_assert (GET_CODE (ops[1]) == REG);
3603 ops[0] = operands[0];
3604 ops[3] = operands[2];
3605 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
3608 [(set_attr "length" "4")
3609 (set_attr "type" "load_byte")
3610 (set_attr "pool_range" "1020")]
3613 ;; We used to have an early-clobber on the scratch register here.
3614 ;; However, there's a bug somewhere in reload which means that this
3615 ;; can be partially ignored during spill allocation if the memory
3616 ;; address also needs reloading; this causes us to die later on when
3617 ;; we try to verify the operands. Fortunately, we don't really need
3618 ;; the early-clobber: we can always use operand 0 if operand 2
3619 ;; overlaps the address.
3620 (define_insn "*thumb_extendhisi2_insn_v6"
3621 [(set (match_operand:SI 0 "register_operand" "=l,l")
3622 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
3623 (clobber (match_scratch:SI 2 "=X,l"))]
3624 "TARGET_THUMB && arm_arch6"
3630 if (which_alternative == 0)
3631 return \"sxth\\t%0, %1\";
3633 mem = XEXP (operands[1], 0);
3635 /* This code used to try to use 'V', and fix the address only if it was
3636 offsettable, but this fails for e.g. REG+48 because 48 is outside the
3637 range of QImode offsets, and offsettable_address_p does a QImode
3640 if (GET_CODE (mem) == CONST)
3641 mem = XEXP (mem, 0);
3643 if (GET_CODE (mem) == LABEL_REF)
3644 return \"ldr\\t%0, %1\";
3646 if (GET_CODE (mem) == PLUS)
3648 rtx a = XEXP (mem, 0);
3649 rtx b = XEXP (mem, 1);
3651 if (GET_CODE (a) == LABEL_REF
3652 && GET_CODE (b) == CONST_INT)
3653 return \"ldr\\t%0, %1\";
3655 if (GET_CODE (b) == REG)
3656 return \"ldrsh\\t%0, %1\";
3664 ops[2] = const0_rtx;
3667 gcc_assert (GET_CODE (ops[1]) == REG);
3669 ops[0] = operands[0];
3670 if (reg_mentioned_p (operands[2], ops[1]))
3673 ops[3] = operands[2];
3674 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
3677 [(set_attr "length" "2,4")
3678 (set_attr "type" "alu_shift,load_byte")
3679 (set_attr "pool_range" "*,1020")]
3682 (define_expand "extendhisi2_mem"
3683 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
3685 (zero_extend:SI (match_dup 7)))
3686 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
3687 (set (match_operand:SI 0 "" "")
3688 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
3693 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
3695 mem1 = change_address (operands[1], QImode, addr);
3696 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
3697 operands[0] = gen_lowpart (SImode, operands[0]);
3699 operands[2] = gen_reg_rtx (SImode);
3700 operands[3] = gen_reg_rtx (SImode);
3701 operands[6] = gen_reg_rtx (SImode);
3704 if (BYTES_BIG_ENDIAN)
3706 operands[4] = operands[2];
3707 operands[5] = operands[3];
3711 operands[4] = operands[3];
3712 operands[5] = operands[2];
3717 (define_insn "*arm_extendhisi2"
3718 [(set (match_operand:SI 0 "s_register_operand" "=r")
3719 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3720 "TARGET_ARM && arm_arch4 && !arm_arch6"
3722 [(set_attr "type" "load_byte")
3723 (set_attr "predicable" "yes")
3724 (set_attr "pool_range" "256")
3725 (set_attr "neg_pool_range" "244")]
3728 (define_insn "*arm_extendhisi2_v6"
3729 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3730 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3731 "TARGET_ARM && arm_arch6"
3735 [(set_attr "type" "alu_shift,load_byte")
3736 (set_attr "predicable" "yes")
3737 (set_attr "pool_range" "*,256")
3738 (set_attr "neg_pool_range" "*,244")]
3741 (define_insn "*arm_extendhisi2addsi"
3742 [(set (match_operand:SI 0 "s_register_operand" "=r")
3743 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3744 (match_operand:SI 2 "s_register_operand" "r")))]
3745 "TARGET_ARM && arm_arch6"
3746 "sxtah%?\\t%0, %2, %1"
3749 (define_expand "extendqihi2"
3751 (ashift:SI (match_operand:QI 1 "general_operand" "")
3753 (set (match_operand:HI 0 "s_register_operand" "")
3754 (ashiftrt:SI (match_dup 2)
3759 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
3761 emit_insn (gen_rtx_SET (VOIDmode,
3763 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
3766 if (!s_register_operand (operands[1], QImode))
3767 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3768 operands[0] = gen_lowpart (SImode, operands[0]);
3769 operands[1] = gen_lowpart (SImode, operands[1]);
3770 operands[2] = gen_reg_rtx (SImode);
3774 (define_insn "*extendqihi_insn"
3775 [(set (match_operand:HI 0 "s_register_operand" "=r")
3776 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
3777 "TARGET_ARM && arm_arch4"
3779 [(set_attr "type" "load_byte")
3780 (set_attr "predicable" "yes")
3781 (set_attr "pool_range" "256")
3782 (set_attr "neg_pool_range" "244")]
3785 (define_expand "extendqisi2"
3787 (ashift:SI (match_operand:QI 1 "general_operand" "")
3789 (set (match_operand:SI 0 "s_register_operand" "")
3790 (ashiftrt:SI (match_dup 2)
3795 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
3797 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3798 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3802 if (!s_register_operand (operands[1], QImode))
3803 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3807 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3808 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3812 operands[1] = gen_lowpart (SImode, operands[1]);
3813 operands[2] = gen_reg_rtx (SImode);
3817 (define_insn "*arm_extendqisi"
3818 [(set (match_operand:SI 0 "s_register_operand" "=r")
3819 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
3820 "TARGET_ARM && arm_arch4 && !arm_arch6"
3822 [(set_attr "type" "load_byte")
3823 (set_attr "predicable" "yes")
3824 (set_attr "pool_range" "256")
3825 (set_attr "neg_pool_range" "244")]
3828 (define_insn "*arm_extendqisi_v6"
3829 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3830 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
3831 "TARGET_ARM && arm_arch6"
3835 [(set_attr "type" "alu_shift,load_byte")
3836 (set_attr "predicable" "yes")
3837 (set_attr "pool_range" "*,256")
3838 (set_attr "neg_pool_range" "*,244")]
3841 (define_insn "*arm_extendqisi2addsi"
3842 [(set (match_operand:SI 0 "s_register_operand" "=r")
3843 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
3844 (match_operand:SI 2 "s_register_operand" "r")))]
3845 "TARGET_ARM && arm_arch6"
3846 "sxtab%?\\t%0, %2, %1"
3847 [(set_attr "type" "alu_shift")
3848 (set_attr "predicable" "yes")]
3851 (define_insn "*thumb_extendqisi2"
3852 [(set (match_operand:SI 0 "register_operand" "=l,l")
3853 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
3854 "TARGET_THUMB && !arm_arch6"
3858 rtx mem = XEXP (operands[1], 0);
3860 if (GET_CODE (mem) == CONST)
3861 mem = XEXP (mem, 0);
3863 if (GET_CODE (mem) == LABEL_REF)
3864 return \"ldr\\t%0, %1\";
3866 if (GET_CODE (mem) == PLUS
3867 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
3868 return \"ldr\\t%0, %1\";
3870 if (which_alternative == 0)
3871 return \"ldrsb\\t%0, %1\";
3873 ops[0] = operands[0];
3875 if (GET_CODE (mem) == PLUS)
3877 rtx a = XEXP (mem, 0);
3878 rtx b = XEXP (mem, 1);
3883 if (GET_CODE (a) == REG)
3885 if (GET_CODE (b) == REG)
3886 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
3887 else if (REGNO (a) == REGNO (ops[0]))
3889 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
3890 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
3891 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
3894 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
3898 gcc_assert (GET_CODE (b) == REG);
3899 if (REGNO (b) == REGNO (ops[0]))
3901 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
3902 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
3903 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
3906 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
3909 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
3911 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
3912 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
3913 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
3918 ops[2] = const0_rtx;
3920 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
3924 [(set_attr "length" "2,6")
3925 (set_attr "type" "load_byte,load_byte")
3926 (set_attr "pool_range" "32,32")]
3929 (define_insn "*thumb_extendqisi2_v6"
3930 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
3931 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
3932 "TARGET_THUMB && arm_arch6"
3938 if (which_alternative == 0)
3939 return \"sxtb\\t%0, %1\";
3941 mem = XEXP (operands[1], 0);
3943 if (GET_CODE (mem) == CONST)
3944 mem = XEXP (mem, 0);
3946 if (GET_CODE (mem) == LABEL_REF)
3947 return \"ldr\\t%0, %1\";
3949 if (GET_CODE (mem) == PLUS
3950 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
3951 return \"ldr\\t%0, %1\";
3953 if (which_alternative == 0)
3954 return \"ldrsb\\t%0, %1\";
3956 ops[0] = operands[0];
3958 if (GET_CODE (mem) == PLUS)
3960 rtx a = XEXP (mem, 0);
3961 rtx b = XEXP (mem, 1);
3966 if (GET_CODE (a) == REG)
3968 if (GET_CODE (b) == REG)
3969 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
3970 else if (REGNO (a) == REGNO (ops[0]))
3972 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
3973 output_asm_insn (\"sxtb\\t%0, %0\", ops);
3976 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
3980 gcc_assert (GET_CODE (b) == REG);
3981 if (REGNO (b) == REGNO (ops[0]))
3983 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
3984 output_asm_insn (\"sxtb\\t%0, %0\", ops);
3987 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
3990 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
3992 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
3993 output_asm_insn (\"sxtb\\t%0, %0\", ops);
3998 ops[2] = const0_rtx;
4000 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4004 [(set_attr "length" "2,2,4")
4005 (set_attr "type" "alu_shift,load_byte,load_byte")
4006 (set_attr "pool_range" "*,32,32")]
4009 (define_expand "extendsfdf2"
4010 [(set (match_operand:DF 0 "s_register_operand" "")
4011 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4012 "TARGET_ARM && TARGET_HARD_FLOAT"
4016 ;; Move insns (including loads and stores)
4018 ;; XXX Just some ideas about movti.
4019 ;; I don't think these are a good idea on the arm, there just aren't enough
4021 ;;(define_expand "loadti"
4022 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4023 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4026 ;;(define_expand "storeti"
4027 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4028 ;; (match_operand:TI 1 "s_register_operand" ""))]
4031 ;;(define_expand "movti"
4032 ;; [(set (match_operand:TI 0 "general_operand" "")
4033 ;; (match_operand:TI 1 "general_operand" ""))]
4039 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4040 ;; operands[1] = copy_to_reg (operands[1]);
4041 ;; if (GET_CODE (operands[0]) == MEM)
4042 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4043 ;; else if (GET_CODE (operands[1]) == MEM)
4044 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4048 ;; emit_insn (insn);
4052 ;; Recognize garbage generated above.
4055 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4056 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4060 ;; register mem = (which_alternative < 3);
4061 ;; register const char *template;
4063 ;; operands[mem] = XEXP (operands[mem], 0);
4064 ;; switch (which_alternative)
4066 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4067 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4068 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4069 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4070 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4071 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4073 ;; output_asm_insn (template, operands);
4077 (define_expand "movdi"
4078 [(set (match_operand:DI 0 "general_operand" "")
4079 (match_operand:DI 1 "general_operand" ""))]
4084 if (!no_new_pseudos)
4086 if (GET_CODE (operands[0]) != REG)
4087 operands[1] = force_reg (DImode, operands[1]);
4093 (define_insn "*arm_movdi"
4094 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4095 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4097 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4100 switch (which_alternative)
4107 return output_move_double (operands);
4110 [(set_attr "length" "8,12,16,8,8")
4111 (set_attr "type" "*,*,*,load2,store2")
4112 (set_attr "pool_range" "*,*,*,1020,*")
4113 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4117 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4118 (match_operand:ANY64 1 "const_double_operand" ""))]
4121 && (arm_const_double_inline_cost (operands[1])
4122 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4125 arm_split_constant (SET, SImode, curr_insn,
4126 INTVAL (gen_lowpart (SImode, operands[1])),
4127 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4128 arm_split_constant (SET, SImode, curr_insn,
4129 INTVAL (gen_highpart_mode (SImode,
4130 GET_MODE (operands[0]),
4132 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4137 ; If optimizing for size, or if we have load delay slots, then
4138 ; we want to split the constant into two separate operations.
4139 ; In both cases this may split a trivial part into a single data op
4140 ; leaving a single complex constant to load. We can also get longer
4141 ; offsets in a LDR which means we get better chances of sharing the pool
4142 ; entries. Finally, we can normally do a better job of scheduling
4143 ; LDR instructions than we can with LDM.
4144 ; This pattern will only match if the one above did not.
4146 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4147 (match_operand:ANY64 1 "const_double_operand" ""))]
4148 "TARGET_ARM && reload_completed
4149 && arm_const_double_by_parts (operands[1])"
4150 [(set (match_dup 0) (match_dup 1))
4151 (set (match_dup 2) (match_dup 3))]
4153 operands[2] = gen_highpart (SImode, operands[0]);
4154 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4156 operands[0] = gen_lowpart (SImode, operands[0]);
4157 operands[1] = gen_lowpart (SImode, operands[1]);
4162 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4163 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4164 "TARGET_EITHER && reload_completed"
4165 [(set (match_dup 0) (match_dup 1))
4166 (set (match_dup 2) (match_dup 3))]
4168 operands[2] = gen_highpart (SImode, operands[0]);
4169 operands[3] = gen_highpart (SImode, operands[1]);
4170 operands[0] = gen_lowpart (SImode, operands[0]);
4171 operands[1] = gen_lowpart (SImode, operands[1]);
4173 /* Handle a partial overlap. */
4174 if (rtx_equal_p (operands[0], operands[3]))
4176 rtx tmp0 = operands[0];
4177 rtx tmp1 = operands[1];
4179 operands[0] = operands[2];
4180 operands[1] = operands[3];
4187 ;; We can't actually do base+index doubleword loads if the index and
4188 ;; destination overlap. Split here so that we at least have chance to
4191 [(set (match_operand:DI 0 "s_register_operand" "")
4192 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4193 (match_operand:SI 2 "s_register_operand" ""))))]
4195 && reg_overlap_mentioned_p (operands[0], operands[1])
4196 && reg_overlap_mentioned_p (operands[0], operands[2])"
4198 (plus:SI (match_dup 1)
4201 (mem:DI (match_dup 4)))]
4203 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4207 ;;; ??? This should have alternatives for constants.
4208 ;;; ??? This was originally identical to the movdf_insn pattern.
4209 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4210 ;;; thumb_reorg with a memory reference.
4211 (define_insn "*thumb_movdi_insn"
4212 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4213 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4215 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4216 && ( register_operand (operands[0], DImode)
4217 || register_operand (operands[1], DImode))"
4220 switch (which_alternative)
4224 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4225 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4226 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4228 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4230 operands[1] = GEN_INT (- INTVAL (operands[1]));
4231 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4233 return \"ldmia\\t%1, {%0, %H0}\";
4235 return \"stmia\\t%0, {%1, %H1}\";
4237 return thumb_load_double_from_address (operands);
4239 operands[2] = gen_rtx_MEM (SImode,
4240 plus_constant (XEXP (operands[0], 0), 4));
4241 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4244 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4245 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4246 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4249 [(set_attr "length" "4,4,6,2,2,6,4,4")
4250 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4251 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4254 (define_expand "movsi"
4255 [(set (match_operand:SI 0 "general_operand" "")
4256 (match_operand:SI 1 "general_operand" ""))]
4261 /* Everything except mem = const or mem = mem can be done easily. */
4262 if (GET_CODE (operands[0]) == MEM)
4263 operands[1] = force_reg (SImode, operands[1]);
4264 if (arm_general_register_operand (operands[0], SImode)
4265 && GET_CODE (operands[1]) == CONST_INT
4266 && !(const_ok_for_arm (INTVAL (operands[1]))
4267 || const_ok_for_arm (~INTVAL (operands[1]))))
4269 arm_split_constant (SET, SImode, NULL_RTX,
4270 INTVAL (operands[1]), operands[0], NULL_RTX,
4271 optimize && !no_new_pseudos);
4275 else /* TARGET_THUMB.... */
4277 if (!no_new_pseudos)
4279 if (GET_CODE (operands[0]) != REG)
4280 operands[1] = force_reg (SImode, operands[1]);
4285 && (CONSTANT_P (operands[1])
4286 || symbol_mentioned_p (operands[1])
4287 || label_mentioned_p (operands[1])))
4288 operands[1] = legitimize_pic_address (operands[1], SImode,
4289 (no_new_pseudos ? operands[0] : 0));
4293 (define_insn "*arm_movsi_insn"
4294 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r, m")
4295 (match_operand:SI 1 "general_operand" "rI,K,mi,r"))]
4296 "TARGET_ARM && ! TARGET_IWMMXT
4297 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4298 && ( register_operand (operands[0], SImode)
4299 || register_operand (operands[1], SImode))"
4305 [(set_attr "type" "*,*,load1,store1")
4306 (set_attr "predicable" "yes")
4307 (set_attr "pool_range" "*,*,4096,*")
4308 (set_attr "neg_pool_range" "*,*,4084,*")]
4312 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4313 (match_operand:SI 1 "const_int_operand" ""))]
4315 && (!(const_ok_for_arm (INTVAL (operands[1]))
4316 || const_ok_for_arm (~INTVAL (operands[1]))))"
4317 [(clobber (const_int 0))]
4319 arm_split_constant (SET, SImode, NULL_RTX,
4320 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4325 (define_insn "*thumb_movsi_insn"
4326 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
4327 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
4329 && ( register_operand (operands[0], SImode)
4330 || register_operand (operands[1], SImode))"
4341 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4342 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4343 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4347 [(set (match_operand:SI 0 "register_operand" "")
4348 (match_operand:SI 1 "const_int_operand" ""))]
4349 "TARGET_THUMB && CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'J')"
4350 [(set (match_dup 0) (match_dup 1))
4351 (set (match_dup 0) (neg:SI (match_dup 0)))]
4352 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4356 [(set (match_operand:SI 0 "register_operand" "")
4357 (match_operand:SI 1 "const_int_operand" ""))]
4358 "TARGET_THUMB && CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'K')"
4359 [(set (match_dup 0) (match_dup 1))
4360 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4363 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4364 unsigned HOST_WIDE_INT mask = 0xff;
4367 for (i = 0; i < 25; i++)
4368 if ((val & (mask << i)) == val)
4371 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4375 operands[1] = GEN_INT (val >> i);
4376 operands[2] = GEN_INT (i);
4380 ;; When generating pic, we need to load the symbol offset into a register.
4381 ;; So that the optimizer does not confuse this with a normal symbol load
4382 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4383 ;; since that is the only type of relocation we can use.
4385 ;; The rather odd constraints on the following are to force reload to leave
4386 ;; the insn alone, and to force the minipool generation pass to then move
4387 ;; the GOT symbol to memory.
4389 (define_insn "pic_load_addr_arm"
4390 [(set (match_operand:SI 0 "s_register_operand" "=r")
4391 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4392 "TARGET_ARM && flag_pic"
4394 [(set_attr "type" "load1")
4395 (set (attr "pool_range") (const_int 4096))
4396 (set (attr "neg_pool_range") (const_int 4084))]
4399 (define_insn "pic_load_addr_thumb"
4400 [(set (match_operand:SI 0 "s_register_operand" "=l")
4401 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4402 "TARGET_THUMB && flag_pic"
4404 [(set_attr "type" "load1")
4405 (set (attr "pool_range") (const_int 1024))]
4408 ;; This variant is used for AOF assembly, since it needs to mention the
4409 ;; pic register in the rtl.
4410 (define_expand "pic_load_addr_based"
4411 [(set (match_operand:SI 0 "s_register_operand" "")
4412 (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
4413 "TARGET_ARM && flag_pic"
4414 "operands[2] = pic_offset_table_rtx;"
4417 (define_insn "*pic_load_addr_based_insn"
4418 [(set (match_operand:SI 0 "s_register_operand" "=r")
4419 (unspec:SI [(match_operand 1 "" "")
4420 (match_operand 2 "s_register_operand" "r")]
4422 "TARGET_EITHER && flag_pic && operands[2] == pic_offset_table_rtx"
4424 #ifdef AOF_ASSEMBLER
4425 operands[1] = aof_pic_entry (operands[1]);
4427 output_asm_insn (\"ldr%?\\t%0, %a1\", operands);
4430 [(set_attr "type" "load1")
4431 (set (attr "pool_range")
4432 (if_then_else (eq_attr "is_thumb" "yes")
4435 (set (attr "neg_pool_range")
4436 (if_then_else (eq_attr "is_thumb" "yes")
4441 (define_insn "pic_add_dot_plus_four"
4442 [(set (match_operand:SI 0 "register_operand" "+r")
4443 (unspec:SI [(plus:SI (match_dup 0)
4444 (const (plus:SI (pc) (const_int 4))))]
4446 (use (label_ref (match_operand 1 "" "")))]
4447 "TARGET_THUMB && flag_pic"
4449 (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
4450 CODE_LABEL_NUMBER (operands[1]));
4451 return \"add\\t%0, %|pc\";
4453 [(set_attr "length" "2")]
4456 (define_insn "pic_add_dot_plus_eight"
4457 [(set (match_operand:SI 0 "register_operand" "+r")
4458 (unspec:SI [(plus:SI (match_dup 0)
4459 (const (plus:SI (pc) (const_int 8))))]
4461 (use (label_ref (match_operand 1 "" "")))]
4462 "TARGET_ARM && flag_pic"
4464 (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
4465 CODE_LABEL_NUMBER (operands[1]));
4466 return \"add%?\\t%0, %|pc, %0\";
4468 [(set_attr "predicable" "yes")]
4471 (define_expand "builtin_setjmp_receiver"
4472 [(label_ref (match_operand 0 "" ""))]
4476 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
4478 arm_load_pic_register (3);
4482 ;; If copying one reg to another we can set the condition codes according to
4483 ;; its value. Such a move is common after a return from subroutine and the
4484 ;; result is being tested against zero.
4486 (define_insn "*movsi_compare0"
4487 [(set (reg:CC CC_REGNUM)
4488 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
4490 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4495 sub%?s\\t%0, %1, #0"
4496 [(set_attr "conds" "set")]
4499 ;; Subroutine to store a half word from a register into memory.
4500 ;; Operand 0 is the source register (HImode)
4501 ;; Operand 1 is the destination address in a register (SImode)
4503 ;; In both this routine and the next, we must be careful not to spill
4504 ;; a memory address of reg+large_const into a separate PLUS insn, since this
4505 ;; can generate unrecognizable rtl.
4507 (define_expand "storehi"
4508 [;; store the low byte
4509 (set (match_operand 1 "" "") (match_dup 3))
4510 ;; extract the high byte
4512 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4513 ;; store the high byte
4514 (set (match_dup 4) (match_dup 5))]
4518 rtx op1 = operands[1];
4519 rtx addr = XEXP (op1, 0);
4520 enum rtx_code code = GET_CODE (addr);
4522 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4524 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
4526 operands[4] = adjust_address (op1, QImode, 1);
4527 operands[1] = adjust_address (operands[1], QImode, 0);
4528 operands[3] = gen_lowpart (QImode, operands[0]);
4529 operands[0] = gen_lowpart (SImode, operands[0]);
4530 operands[2] = gen_reg_rtx (SImode);
4531 operands[5] = gen_lowpart (QImode, operands[2]);
4535 (define_expand "storehi_bigend"
4536 [(set (match_dup 4) (match_dup 3))
4538 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4539 (set (match_operand 1 "" "") (match_dup 5))]
4543 rtx op1 = operands[1];
4544 rtx addr = XEXP (op1, 0);
4545 enum rtx_code code = GET_CODE (addr);
4547 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4549 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
4551 operands[4] = adjust_address (op1, QImode, 1);
4552 operands[1] = adjust_address (operands[1], QImode, 0);
4553 operands[3] = gen_lowpart (QImode, operands[0]);
4554 operands[0] = gen_lowpart (SImode, operands[0]);
4555 operands[2] = gen_reg_rtx (SImode);
4556 operands[5] = gen_lowpart (QImode, operands[2]);
4560 ;; Subroutine to store a half word integer constant into memory.
4561 (define_expand "storeinthi"
4562 [(set (match_operand 0 "" "")
4563 (match_operand 1 "" ""))
4564 (set (match_dup 3) (match_dup 2))]
4568 HOST_WIDE_INT value = INTVAL (operands[1]);
4569 rtx addr = XEXP (operands[0], 0);
4570 rtx op0 = operands[0];
4571 enum rtx_code code = GET_CODE (addr);
4573 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4575 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
4577 operands[1] = gen_reg_rtx (SImode);
4578 if (BYTES_BIG_ENDIAN)
4580 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
4581 if ((value & 255) == ((value >> 8) & 255))
4582 operands[2] = operands[1];
4585 operands[2] = gen_reg_rtx (SImode);
4586 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
4591 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
4592 if ((value & 255) == ((value >> 8) & 255))
4593 operands[2] = operands[1];
4596 operands[2] = gen_reg_rtx (SImode);
4597 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
4601 operands[3] = adjust_address (op0, QImode, 1);
4602 operands[0] = adjust_address (operands[0], QImode, 0);
4603 operands[2] = gen_lowpart (QImode, operands[2]);
4604 operands[1] = gen_lowpart (QImode, operands[1]);
4608 (define_expand "storehi_single_op"
4609 [(set (match_operand:HI 0 "memory_operand" "")
4610 (match_operand:HI 1 "general_operand" ""))]
4611 "TARGET_ARM && arm_arch4"
4613 if (!s_register_operand (operands[1], HImode))
4614 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4618 (define_expand "movhi"
4619 [(set (match_operand:HI 0 "general_operand" "")
4620 (match_operand:HI 1 "general_operand" ""))]
4625 if (!no_new_pseudos)
4627 if (GET_CODE (operands[0]) == MEM)
4631 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
4634 if (GET_CODE (operands[1]) == CONST_INT)
4635 emit_insn (gen_storeinthi (operands[0], operands[1]));
4638 if (GET_CODE (operands[1]) == MEM)
4639 operands[1] = force_reg (HImode, operands[1]);
4640 if (BYTES_BIG_ENDIAN)
4641 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
4643 emit_insn (gen_storehi (operands[1], operands[0]));
4647 /* Sign extend a constant, and keep it in an SImode reg. */
4648 else if (GET_CODE (operands[1]) == CONST_INT)
4650 rtx reg = gen_reg_rtx (SImode);
4651 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
4653 /* If the constant is already valid, leave it alone. */
4654 if (!const_ok_for_arm (val))
4656 /* If setting all the top bits will make the constant
4657 loadable in a single instruction, then set them.
4658 Otherwise, sign extend the number. */
4660 if (const_ok_for_arm (~(val | ~0xffff)))
4662 else if (val & 0x8000)
4666 emit_insn (gen_movsi (reg, GEN_INT (val)));
4667 operands[1] = gen_lowpart (HImode, reg);
4669 else if (arm_arch4 && optimize && !no_new_pseudos
4670 && GET_CODE (operands[1]) == MEM)
4672 rtx reg = gen_reg_rtx (SImode);
4674 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
4675 operands[1] = gen_lowpart (HImode, reg);
4677 else if (!arm_arch4)
4679 if (GET_CODE (operands[1]) == MEM)
4682 rtx offset = const0_rtx;
4683 rtx reg = gen_reg_rtx (SImode);
4685 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
4686 || (GET_CODE (base) == PLUS
4687 && (GET_CODE (offset = XEXP (base, 1))
4689 && ((INTVAL(offset) & 1) != 1)
4690 && GET_CODE (base = XEXP (base, 0)) == REG))
4691 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
4695 new = widen_memory_access (operands[1], SImode,
4696 ((INTVAL (offset) & ~3)
4697 - INTVAL (offset)));
4698 emit_insn (gen_movsi (reg, new));
4699 if (((INTVAL (offset) & 2) != 0)
4700 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
4702 rtx reg2 = gen_reg_rtx (SImode);
4704 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
4709 emit_insn (gen_movhi_bytes (reg, operands[1]));
4711 operands[1] = gen_lowpart (HImode, reg);
4715 /* Handle loading a large integer during reload. */
4716 else if (GET_CODE (operands[1]) == CONST_INT
4717 && !const_ok_for_arm (INTVAL (operands[1]))
4718 && !const_ok_for_arm (~INTVAL (operands[1])))
4720 /* Writing a constant to memory needs a scratch, which should
4721 be handled with SECONDARY_RELOADs. */
4722 gcc_assert (GET_CODE (operands[0]) == REG);
4724 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
4725 emit_insn (gen_movsi (operands[0], operands[1]));
4729 else /* TARGET_THUMB */
4731 if (!no_new_pseudos)
4733 if (GET_CODE (operands[1]) == CONST_INT)
4735 rtx reg = gen_reg_rtx (SImode);
4737 emit_insn (gen_movsi (reg, operands[1]));
4738 operands[1] = gen_lowpart (HImode, reg);
4741 /* ??? We shouldn't really get invalid addresses here, but this can
4742 happen if we are passed a SP (never OK for HImode/QImode) or
4743 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
4744 HImode/QImode) relative address. */
4745 /* ??? This should perhaps be fixed elsewhere, for instance, in
4746 fixup_stack_1, by checking for other kinds of invalid addresses,
4747 e.g. a bare reference to a virtual register. This may confuse the
4748 alpha though, which must handle this case differently. */
4749 if (GET_CODE (operands[0]) == MEM
4750 && !memory_address_p (GET_MODE (operands[0]),
4751 XEXP (operands[0], 0)))
4753 = replace_equiv_address (operands[0],
4754 copy_to_reg (XEXP (operands[0], 0)));
4756 if (GET_CODE (operands[1]) == MEM
4757 && !memory_address_p (GET_MODE (operands[1]),
4758 XEXP (operands[1], 0)))
4760 = replace_equiv_address (operands[1],
4761 copy_to_reg (XEXP (operands[1], 0)));
4763 if (GET_CODE (operands[1]) == MEM && optimize > 0)
4765 rtx reg = gen_reg_rtx (SImode);
4767 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
4768 operands[1] = gen_lowpart (HImode, reg);
4771 if (GET_CODE (operands[0]) == MEM)
4772 operands[1] = force_reg (HImode, operands[1]);
4774 else if (GET_CODE (operands[1]) == CONST_INT
4775 && !CONST_OK_FOR_THUMB_LETTER (INTVAL (operands[1]), 'I'))
4777 /* Handle loading a large integer during reload. */
4779 /* Writing a constant to memory needs a scratch, which should
4780 be handled with SECONDARY_RELOADs. */
4781 gcc_assert (GET_CODE (operands[0]) == REG);
4783 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
4784 emit_insn (gen_movsi (operands[0], operands[1]));
4791 (define_insn "*thumb_movhi_insn"
4792 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
4793 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
4795 && ( register_operand (operands[0], HImode)
4796 || register_operand (operands[1], HImode))"
4798 switch (which_alternative)
4800 case 0: return \"add %0, %1, #0\";
4801 case 2: return \"strh %1, %0\";
4802 case 3: return \"mov %0, %1\";
4803 case 4: return \"mov %0, %1\";
4804 case 5: return \"mov %0, %1\";
4805 default: gcc_unreachable ();
4807 /* The stack pointer can end up being taken as an index register.
4808 Catch this case here and deal with it. */
4809 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
4810 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
4811 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
4814 ops[0] = operands[0];
4815 ops[1] = XEXP (XEXP (operands[1], 0), 0);
4817 output_asm_insn (\"mov %0, %1\", ops);
4819 XEXP (XEXP (operands[1], 0), 0) = operands[0];
4822 return \"ldrh %0, %1\";
4824 [(set_attr "length" "2,4,2,2,2,2")
4825 (set_attr "type" "*,load1,store1,*,*,*")]
4829 (define_expand "movhi_bytes"
4830 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4832 (zero_extend:SI (match_dup 6)))
4833 (set (match_operand:SI 0 "" "")
4834 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
4839 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4841 mem1 = change_address (operands[1], QImode, addr);
4842 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4843 operands[0] = gen_lowpart (SImode, operands[0]);
4845 operands[2] = gen_reg_rtx (SImode);
4846 operands[3] = gen_reg_rtx (SImode);
4849 if (BYTES_BIG_ENDIAN)
4851 operands[4] = operands[2];
4852 operands[5] = operands[3];
4856 operands[4] = operands[3];
4857 operands[5] = operands[2];
4862 (define_expand "movhi_bigend"
4864 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
4867 (ashiftrt:SI (match_dup 2) (const_int 16)))
4868 (set (match_operand:HI 0 "s_register_operand" "")
4872 operands[2] = gen_reg_rtx (SImode);
4873 operands[3] = gen_reg_rtx (SImode);
4874 operands[4] = gen_lowpart (HImode, operands[3]);
4878 ;; Pattern to recognize insn generated default case above
4879 (define_insn "*movhi_insn_arch4"
4880 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
4881 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
4884 && (GET_CODE (operands[1]) != CONST_INT
4885 || const_ok_for_arm (INTVAL (operands[1]))
4886 || const_ok_for_arm (~INTVAL (operands[1])))"
4888 mov%?\\t%0, %1\\t%@ movhi
4889 mvn%?\\t%0, #%B1\\t%@ movhi
4890 str%?h\\t%1, %0\\t%@ movhi
4891 ldr%?h\\t%0, %1\\t%@ movhi"
4892 [(set_attr "type" "*,*,store1,load1")
4893 (set_attr "predicable" "yes")
4894 (set_attr "pool_range" "*,*,*,256")
4895 (set_attr "neg_pool_range" "*,*,*,244")]
4898 (define_insn "*movhi_bytes"
4899 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
4900 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
4903 mov%?\\t%0, %1\\t%@ movhi
4904 mvn%?\\t%0, #%B1\\t%@ movhi"
4905 [(set_attr "predicable" "yes")]
4908 (define_expand "thumb_movhi_clobber"
4909 [(set (match_operand:HI 0 "memory_operand" "")
4910 (match_operand:HI 1 "register_operand" ""))
4911 (clobber (match_operand:DI 2 "register_operand" ""))]
4914 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
4915 && REGNO (operands[1]) <= LAST_LO_REGNUM)
4917 emit_insn (gen_movhi (operands[0], operands[1]));
4920 /* XXX Fixme, need to handle other cases here as well. */
4925 ;; We use a DImode scratch because we may occasionally need an additional
4926 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
4927 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
4928 (define_expand "reload_outhi"
4929 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
4930 (match_operand:HI 1 "s_register_operand" "r")
4931 (match_operand:DI 2 "s_register_operand" "=&l")])]
4934 arm_reload_out_hi (operands);
4936 thumb_reload_out_hi (operands);
4941 (define_expand "reload_inhi"
4942 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
4943 (match_operand:HI 1 "arm_reload_memory_operand" "o")
4944 (match_operand:DI 2 "s_register_operand" "=&r")])]
4948 arm_reload_in_hi (operands);
4950 thumb_reload_out_hi (operands);
4954 (define_expand "movqi"
4955 [(set (match_operand:QI 0 "general_operand" "")
4956 (match_operand:QI 1 "general_operand" ""))]
4959 /* Everything except mem = const or mem = mem can be done easily */
4961 if (!no_new_pseudos)
4963 if (GET_CODE (operands[1]) == CONST_INT)
4965 rtx reg = gen_reg_rtx (SImode);
4967 emit_insn (gen_movsi (reg, operands[1]));
4968 operands[1] = gen_lowpart (QImode, reg);
4973 /* ??? We shouldn't really get invalid addresses here, but this can
4974 happen if we are passed a SP (never OK for HImode/QImode) or
4975 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
4976 HImode/QImode) relative address. */
4977 /* ??? This should perhaps be fixed elsewhere, for instance, in
4978 fixup_stack_1, by checking for other kinds of invalid addresses,
4979 e.g. a bare reference to a virtual register. This may confuse the
4980 alpha though, which must handle this case differently. */
4981 if (GET_CODE (operands[0]) == MEM
4982 && !memory_address_p (GET_MODE (operands[0]),
4983 XEXP (operands[0], 0)))
4985 = replace_equiv_address (operands[0],
4986 copy_to_reg (XEXP (operands[0], 0)));
4987 if (GET_CODE (operands[1]) == MEM
4988 && !memory_address_p (GET_MODE (operands[1]),
4989 XEXP (operands[1], 0)))
4991 = replace_equiv_address (operands[1],
4992 copy_to_reg (XEXP (operands[1], 0)));
4995 if (GET_CODE (operands[1]) == MEM && optimize > 0)
4997 rtx reg = gen_reg_rtx (SImode);
4999 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5000 operands[1] = gen_lowpart (QImode, reg);
5003 if (GET_CODE (operands[0]) == MEM)
5004 operands[1] = force_reg (QImode, operands[1]);
5006 else if (TARGET_THUMB
5007 && GET_CODE (operands[1]) == CONST_INT
5008 && !CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I'))
5010 /* Handle loading a large integer during reload. */
5012 /* Writing a constant to memory needs a scratch, which should
5013 be handled with SECONDARY_RELOADs. */
5014 gcc_assert (GET_CODE (operands[0]) == REG);
5016 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5017 emit_insn (gen_movsi (operands[0], operands[1]));
5024 (define_insn "*arm_movqi_insn"
5025 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5026 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5028 && ( register_operand (operands[0], QImode)
5029 || register_operand (operands[1], QImode))"
5035 [(set_attr "type" "*,*,load1,store1")
5036 (set_attr "predicable" "yes")]
5039 (define_insn "*thumb_movqi_insn"
5040 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5041 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5043 && ( register_operand (operands[0], QImode)
5044 || register_operand (operands[1], QImode))"
5052 [(set_attr "length" "2")
5053 (set_attr "type" "*,load1,store1,*,*,*")
5054 (set_attr "pool_range" "*,32,*,*,*,*")]
5057 (define_expand "movsf"
5058 [(set (match_operand:SF 0 "general_operand" "")
5059 (match_operand:SF 1 "general_operand" ""))]
5064 if (GET_CODE (operands[0]) == MEM)
5065 operands[1] = force_reg (SFmode, operands[1]);
5067 else /* TARGET_THUMB */
5069 if (!no_new_pseudos)
5071 if (GET_CODE (operands[0]) != REG)
5072 operands[1] = force_reg (SFmode, operands[1]);
5079 [(set (match_operand:SF 0 "nonimmediate_operand" "")
5080 (match_operand:SF 1 "immediate_operand" ""))]
5082 && !(TARGET_HARD_FLOAT && TARGET_FPA)
5084 && GET_CODE (operands[1]) == CONST_DOUBLE"
5085 [(set (match_dup 2) (match_dup 3))]
5087 operands[2] = gen_lowpart (SImode, operands[0]);
5088 operands[3] = gen_lowpart (SImode, operands[1]);
5089 if (operands[2] == 0 || operands[3] == 0)
5094 (define_insn "*arm_movsf_soft_insn"
5095 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5096 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5098 && TARGET_SOFT_FLOAT
5099 && (GET_CODE (operands[0]) != MEM
5100 || register_operand (operands[1], SFmode))"
5103 ldr%?\\t%0, %1\\t%@ float
5104 str%?\\t%1, %0\\t%@ float"
5105 [(set_attr "length" "4,4,4")
5106 (set_attr "predicable" "yes")
5107 (set_attr "type" "*,load1,store1")
5108 (set_attr "pool_range" "*,4096,*")
5109 (set_attr "neg_pool_range" "*,4084,*")]
5112 ;;; ??? This should have alternatives for constants.
5113 (define_insn "*thumb_movsf_insn"
5114 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5115 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5117 && ( register_operand (operands[0], SFmode)
5118 || register_operand (operands[1], SFmode))"
5127 [(set_attr "length" "2")
5128 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5129 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5132 (define_expand "movdf"
5133 [(set (match_operand:DF 0 "general_operand" "")
5134 (match_operand:DF 1 "general_operand" ""))]
5139 if (GET_CODE (operands[0]) == MEM)
5140 operands[1] = force_reg (DFmode, operands[1]);
5142 else /* TARGET_THUMB */
5144 if (!no_new_pseudos)
5146 if (GET_CODE (operands[0]) != REG)
5147 operands[1] = force_reg (DFmode, operands[1]);
5153 ;; Reloading a df mode value stored in integer regs to memory can require a
5155 (define_expand "reload_outdf"
5156 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5157 (match_operand:DF 1 "s_register_operand" "r")
5158 (match_operand:SI 2 "s_register_operand" "=&r")]
5162 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5165 operands[2] = XEXP (operands[0], 0);
5166 else if (code == POST_INC || code == PRE_DEC)
5168 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5169 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5170 emit_insn (gen_movdi (operands[0], operands[1]));
5173 else if (code == PRE_INC)
5175 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5177 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5180 else if (code == POST_DEC)
5181 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5183 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5184 XEXP (XEXP (operands[0], 0), 1)));
5186 emit_insn (gen_rtx_SET (VOIDmode,
5187 replace_equiv_address (operands[0], operands[2]),
5190 if (code == POST_DEC)
5191 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5197 (define_insn "*movdf_soft_insn"
5198 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5199 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5200 "TARGET_ARM && TARGET_SOFT_FLOAT
5203 switch (which_alternative)
5210 return output_move_double (operands);
5213 [(set_attr "length" "8,12,16,8,8")
5214 (set_attr "type" "*,*,*,load2,store2")
5215 (set_attr "pool_range" "1020")
5216 (set_attr "neg_pool_range" "1008")]
5219 ;;; ??? This should have alternatives for constants.
5220 ;;; ??? This was originally identical to the movdi_insn pattern.
5221 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5222 ;;; thumb_reorg with a memory reference.
5223 (define_insn "*thumb_movdf_insn"
5224 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5225 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5227 && ( register_operand (operands[0], DFmode)
5228 || register_operand (operands[1], DFmode))"
5230 switch (which_alternative)
5234 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5235 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5236 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5238 return \"ldmia\\t%1, {%0, %H0}\";
5240 return \"stmia\\t%0, {%1, %H1}\";
5242 return thumb_load_double_from_address (operands);
5244 operands[2] = gen_rtx_MEM (SImode,
5245 plus_constant (XEXP (operands[0], 0), 4));
5246 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5249 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5250 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5251 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5254 [(set_attr "length" "4,2,2,6,4,4")
5255 (set_attr "type" "*,load2,store2,load2,store2,*")
5256 (set_attr "pool_range" "*,*,*,1020,*,*")]
5259 (define_expand "movxf"
5260 [(set (match_operand:XF 0 "general_operand" "")
5261 (match_operand:XF 1 "general_operand" ""))]
5262 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
5264 if (GET_CODE (operands[0]) == MEM)
5265 operands[1] = force_reg (XFmode, operands[1]);
5270 (define_expand "movv2si"
5271 [(set (match_operand:V2SI 0 "nonimmediate_operand" "")
5272 (match_operand:V2SI 1 "general_operand" ""))]
5273 "TARGET_REALLY_IWMMXT"
5277 (define_expand "movv4hi"
5278 [(set (match_operand:V4HI 0 "nonimmediate_operand" "")
5279 (match_operand:V4HI 1 "general_operand" ""))]
5280 "TARGET_REALLY_IWMMXT"
5284 (define_expand "movv8qi"
5285 [(set (match_operand:V8QI 0 "nonimmediate_operand" "")
5286 (match_operand:V8QI 1 "general_operand" ""))]
5287 "TARGET_REALLY_IWMMXT"
5292 ;; load- and store-multiple insns
5293 ;; The arm can load/store any set of registers, provided that they are in
5294 ;; ascending order; but that is beyond GCC so stick with what it knows.
5296 (define_expand "load_multiple"
5297 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5298 (match_operand:SI 1 "" ""))
5299 (use (match_operand:SI 2 "" ""))])]
5302 HOST_WIDE_INT offset = 0;
5304 /* Support only fixed point registers. */
5305 if (GET_CODE (operands[2]) != CONST_INT
5306 || INTVAL (operands[2]) > 14
5307 || INTVAL (operands[2]) < 2
5308 || GET_CODE (operands[1]) != MEM
5309 || GET_CODE (operands[0]) != REG
5310 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5311 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5315 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5316 force_reg (SImode, XEXP (operands[1], 0)),
5317 TRUE, FALSE, operands[1], &offset);
5320 ;; Load multiple with write-back
5322 (define_insn "*ldmsi_postinc4"
5323 [(match_parallel 0 "load_multiple_operation"
5324 [(set (match_operand:SI 1 "s_register_operand" "=r")
5325 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5327 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5328 (mem:SI (match_dup 2)))
5329 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5330 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5331 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5332 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5333 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5334 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5335 "TARGET_ARM && XVECLEN (operands[0], 0) == 5"
5336 "ldm%?ia\\t%1!, {%3, %4, %5, %6}"
5337 [(set_attr "type" "load4")
5338 (set_attr "predicable" "yes")]
5341 (define_insn "*ldmsi_postinc4_thumb"
5342 [(match_parallel 0 "load_multiple_operation"
5343 [(set (match_operand:SI 1 "s_register_operand" "=l")
5344 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5346 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5347 (mem:SI (match_dup 2)))
5348 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5349 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5350 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5351 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5352 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5353 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5354 "TARGET_THUMB && XVECLEN (operands[0], 0) == 5"
5355 "ldmia\\t%1!, {%3, %4, %5, %6}"
5356 [(set_attr "type" "load4")]
5359 (define_insn "*ldmsi_postinc3"
5360 [(match_parallel 0 "load_multiple_operation"
5361 [(set (match_operand:SI 1 "s_register_operand" "=r")
5362 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5364 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5365 (mem:SI (match_dup 2)))
5366 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5367 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5368 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5369 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5370 "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
5371 "ldm%?ia\\t%1!, {%3, %4, %5}"
5372 [(set_attr "type" "load3")
5373 (set_attr "predicable" "yes")]
5376 (define_insn "*ldmsi_postinc2"
5377 [(match_parallel 0 "load_multiple_operation"
5378 [(set (match_operand:SI 1 "s_register_operand" "=r")
5379 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5381 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5382 (mem:SI (match_dup 2)))
5383 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5384 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5385 "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
5386 "ldm%?ia\\t%1!, {%3, %4}"
5387 [(set_attr "type" "load2")
5388 (set_attr "predicable" "yes")]
5391 ;; Ordinary load multiple
5393 (define_insn "*ldmsi4"
5394 [(match_parallel 0 "load_multiple_operation"
5395 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5396 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5397 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5398 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5399 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5400 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5401 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5402 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5403 "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
5404 "ldm%?ia\\t%1, {%2, %3, %4, %5}"
5405 [(set_attr "type" "load4")
5406 (set_attr "predicable" "yes")]
5409 (define_insn "*ldmsi3"
5410 [(match_parallel 0 "load_multiple_operation"
5411 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5412 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5413 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5414 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5415 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5416 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
5417 "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
5418 "ldm%?ia\\t%1, {%2, %3, %4}"
5419 [(set_attr "type" "load3")
5420 (set_attr "predicable" "yes")]
5423 (define_insn "*ldmsi2"
5424 [(match_parallel 0 "load_multiple_operation"
5425 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5426 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5427 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5428 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
5429 "TARGET_ARM && XVECLEN (operands[0], 0) == 2"
5430 "ldm%?ia\\t%1, {%2, %3}"
5431 [(set_attr "type" "load2")
5432 (set_attr "predicable" "yes")]
5435 (define_expand "store_multiple"
5436 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5437 (match_operand:SI 1 "" ""))
5438 (use (match_operand:SI 2 "" ""))])]
5441 HOST_WIDE_INT offset = 0;
5443 /* Support only fixed point registers. */
5444 if (GET_CODE (operands[2]) != CONST_INT
5445 || INTVAL (operands[2]) > 14
5446 || INTVAL (operands[2]) < 2
5447 || GET_CODE (operands[1]) != REG
5448 || GET_CODE (operands[0]) != MEM
5449 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
5450 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5454 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
5455 force_reg (SImode, XEXP (operands[0], 0)),
5456 TRUE, FALSE, operands[0], &offset);
5459 ;; Store multiple with write-back
5461 (define_insn "*stmsi_postinc4"
5462 [(match_parallel 0 "store_multiple_operation"
5463 [(set (match_operand:SI 1 "s_register_operand" "=r")
5464 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5466 (set (mem:SI (match_dup 2))
5467 (match_operand:SI 3 "arm_hard_register_operand" ""))
5468 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5469 (match_operand:SI 4 "arm_hard_register_operand" ""))
5470 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5471 (match_operand:SI 5 "arm_hard_register_operand" ""))
5472 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5473 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5474 "TARGET_ARM && XVECLEN (operands[0], 0) == 5"
5475 "stm%?ia\\t%1!, {%3, %4, %5, %6}"
5476 [(set_attr "predicable" "yes")
5477 (set_attr "type" "store4")]
5480 (define_insn "*stmsi_postinc4_thumb"
5481 [(match_parallel 0 "store_multiple_operation"
5482 [(set (match_operand:SI 1 "s_register_operand" "=l")
5483 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5485 (set (mem:SI (match_dup 2))
5486 (match_operand:SI 3 "arm_hard_register_operand" ""))
5487 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5488 (match_operand:SI 4 "arm_hard_register_operand" ""))
5489 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5490 (match_operand:SI 5 "arm_hard_register_operand" ""))
5491 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5492 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5493 "TARGET_THUMB && XVECLEN (operands[0], 0) == 5"
5494 "stmia\\t%1!, {%3, %4, %5, %6}"
5495 [(set_attr "type" "store4")]
5498 (define_insn "*stmsi_postinc3"
5499 [(match_parallel 0 "store_multiple_operation"
5500 [(set (match_operand:SI 1 "s_register_operand" "=r")
5501 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5503 (set (mem:SI (match_dup 2))
5504 (match_operand:SI 3 "arm_hard_register_operand" ""))
5505 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5506 (match_operand:SI 4 "arm_hard_register_operand" ""))
5507 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5508 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
5509 "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
5510 "stm%?ia\\t%1!, {%3, %4, %5}"
5511 [(set_attr "predicable" "yes")
5512 (set_attr "type" "store3")]
5515 (define_insn "*stmsi_postinc2"
5516 [(match_parallel 0 "store_multiple_operation"
5517 [(set (match_operand:SI 1 "s_register_operand" "=r")
5518 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5520 (set (mem:SI (match_dup 2))
5521 (match_operand:SI 3 "arm_hard_register_operand" ""))
5522 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5523 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
5524 "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
5525 "stm%?ia\\t%1!, {%3, %4}"
5526 [(set_attr "predicable" "yes")
5527 (set_attr "type" "store2")]
5530 ;; Ordinary store multiple
5532 (define_insn "*stmsi4"
5533 [(match_parallel 0 "store_multiple_operation"
5534 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
5535 (match_operand:SI 2 "arm_hard_register_operand" ""))
5536 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
5537 (match_operand:SI 3 "arm_hard_register_operand" ""))
5538 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
5539 (match_operand:SI 4 "arm_hard_register_operand" ""))
5540 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
5541 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
5542 "TARGET_ARM && XVECLEN (operands[0], 0) == 4"
5543 "stm%?ia\\t%1, {%2, %3, %4, %5}"
5544 [(set_attr "predicable" "yes")
5545 (set_attr "type" "store4")]
5548 (define_insn "*stmsi3"
5549 [(match_parallel 0 "store_multiple_operation"
5550 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
5551 (match_operand:SI 2 "arm_hard_register_operand" ""))
5552 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
5553 (match_operand:SI 3 "arm_hard_register_operand" ""))
5554 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
5555 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
5556 "TARGET_ARM && XVECLEN (operands[0], 0) == 3"
5557 "stm%?ia\\t%1, {%2, %3, %4}"
5558 [(set_attr "predicable" "yes")
5559 (set_attr "type" "store3")]
5562 (define_insn "*stmsi2"
5563 [(match_parallel 0 "store_multiple_operation"
5564 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
5565 (match_operand:SI 2 "arm_hard_register_operand" ""))
5566 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
5567 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
5568 "TARGET_ARM && XVECLEN (operands[0], 0) == 2"
5569 "stm%?ia\\t%1, {%2, %3}"
5570 [(set_attr "predicable" "yes")
5571 (set_attr "type" "store2")]
5574 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
5575 ;; We could let this apply for blocks of less than this, but it clobbers so
5576 ;; many registers that there is then probably a better way.
5578 (define_expand "movmemqi"
5579 [(match_operand:BLK 0 "general_operand" "")
5580 (match_operand:BLK 1 "general_operand" "")
5581 (match_operand:SI 2 "const_int_operand" "")
5582 (match_operand:SI 3 "const_int_operand" "")]
5587 if (arm_gen_movmemqi (operands))
5591 else /* TARGET_THUMB */
5593 if ( INTVAL (operands[3]) != 4
5594 || INTVAL (operands[2]) > 48)
5597 thumb_expand_movmemqi (operands);
5603 ;; Thumb block-move insns
5605 (define_insn "movmem12b"
5606 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
5607 (mem:SI (match_operand:SI 3 "register_operand" "1")))
5608 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5609 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
5610 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5611 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
5612 (set (match_operand:SI 0 "register_operand" "=l")
5613 (plus:SI (match_dup 2) (const_int 12)))
5614 (set (match_operand:SI 1 "register_operand" "=l")
5615 (plus:SI (match_dup 3) (const_int 12)))
5616 (clobber (match_scratch:SI 4 "=&l"))
5617 (clobber (match_scratch:SI 5 "=&l"))
5618 (clobber (match_scratch:SI 6 "=&l"))]
5620 "* return thumb_output_move_mem_multiple (3, operands);"
5621 [(set_attr "length" "4")
5622 ; This isn't entirely accurate... It loads as well, but in terms of
5623 ; scheduling the following insn it is better to consider it as a store
5624 (set_attr "type" "store3")]
5627 (define_insn "movmem8b"
5628 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
5629 (mem:SI (match_operand:SI 3 "register_operand" "1")))
5630 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5631 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
5632 (set (match_operand:SI 0 "register_operand" "=l")
5633 (plus:SI (match_dup 2) (const_int 8)))
5634 (set (match_operand:SI 1 "register_operand" "=l")
5635 (plus:SI (match_dup 3) (const_int 8)))
5636 (clobber (match_scratch:SI 4 "=&l"))
5637 (clobber (match_scratch:SI 5 "=&l"))]
5639 "* return thumb_output_move_mem_multiple (2, operands);"
5640 [(set_attr "length" "4")
5641 ; This isn't entirely accurate... It loads as well, but in terms of
5642 ; scheduling the following insn it is better to consider it as a store
5643 (set_attr "type" "store2")]
5648 ;; Compare & branch insns
5649 ;; The range calculations are based as follows:
5650 ;; For forward branches, the address calculation returns the address of
5651 ;; the next instruction. This is 2 beyond the branch instruction.
5652 ;; For backward branches, the address calculation returns the address of
5653 ;; the first instruction in this pattern (cmp). This is 2 before the branch
5654 ;; instruction for the shortest sequence, and 4 before the branch instruction
5655 ;; if we have to jump around an unconditional branch.
5656 ;; To the basic branch range the PC offset must be added (this is +4).
5657 ;; So for forward branches we have
5658 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
5659 ;; And for backward branches we have
5660 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
5662 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
5663 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
5665 (define_expand "cbranchsi4"
5666 [(set (pc) (if_then_else
5667 (match_operator 0 "arm_comparison_operator"
5668 [(match_operand:SI 1 "s_register_operand" "")
5669 (match_operand:SI 2 "nonmemory_operand" "")])
5670 (label_ref (match_operand 3 "" ""))
5674 if (thumb_cmpneg_operand (operands[2], SImode))
5676 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
5677 operands[3], operands[0]));
5680 if (!thumb_cmp_operand (operands[2], SImode))
5681 operands[2] = force_reg (SImode, operands[2]);
5684 (define_insn "*cbranchsi4_insn"
5685 [(set (pc) (if_then_else
5686 (match_operator 0 "arm_comparison_operator"
5687 [(match_operand:SI 1 "s_register_operand" "l,*h")
5688 (match_operand:SI 2 "thumb_cmp_operand" "lI*h,*r")])
5689 (label_ref (match_operand 3 "" ""))
5693 output_asm_insn (\"cmp\\t%1, %2\", operands);
5695 switch (get_attr_length (insn))
5697 case 4: return \"b%d0\\t%l3\";
5698 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
5699 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
5702 [(set (attr "far_jump")
5704 (eq_attr "length" "8")
5705 (const_string "yes")
5706 (const_string "no")))
5707 (set (attr "length")
5709 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
5710 (le (minus (match_dup 3) (pc)) (const_int 256)))
5713 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
5714 (le (minus (match_dup 3) (pc)) (const_int 2048)))
5719 (define_insn "cbranchsi4_scratch"
5720 [(set (pc) (if_then_else
5721 (match_operator 4 "arm_comparison_operator"
5722 [(match_operand:SI 1 "s_register_operand" "l,0")
5723 (match_operand:SI 2 "thumb_cmpneg_operand" "L,J")])
5724 (label_ref (match_operand 3 "" ""))
5726 (clobber (match_scratch:SI 0 "=l,l"))]
5729 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
5731 switch (get_attr_length (insn))
5733 case 4: return \"b%d4\\t%l3\";
5734 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
5735 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
5738 [(set (attr "far_jump")
5740 (eq_attr "length" "8")
5741 (const_string "yes")
5742 (const_string "no")))
5743 (set (attr "length")
5745 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
5746 (le (minus (match_dup 3) (pc)) (const_int 256)))
5749 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
5750 (le (minus (match_dup 3) (pc)) (const_int 2048)))
5754 (define_insn "*movsi_cbranchsi4"
5757 (match_operator 3 "arm_comparison_operator"
5758 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
5760 (label_ref (match_operand 2 "" ""))
5762 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
5766 if (which_alternative == 0)
5767 output_asm_insn (\"cmp\t%0, #0\", operands);
5768 else if (which_alternative == 1)
5769 output_asm_insn (\"sub\t%0, %1, #0\", operands);
5772 output_asm_insn (\"cmp\t%1, #0\", operands);
5773 if (which_alternative == 2)
5774 output_asm_insn (\"mov\t%0, %1\", operands);
5776 output_asm_insn (\"str\t%1, %0\", operands);
5778 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
5780 case 4: return \"b%d3\\t%l2\";
5781 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
5782 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
5785 [(set (attr "far_jump")
5787 (ior (and (gt (symbol_ref ("which_alternative"))
5789 (eq_attr "length" "8"))
5790 (eq_attr "length" "10"))
5791 (const_string "yes")
5792 (const_string "no")))
5793 (set (attr "length")
5795 (le (symbol_ref ("which_alternative"))
5798 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
5799 (le (minus (match_dup 2) (pc)) (const_int 256)))
5802 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
5803 (le (minus (match_dup 2) (pc)) (const_int 2048)))
5807 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
5808 (le (minus (match_dup 2) (pc)) (const_int 256)))
5811 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
5812 (le (minus (match_dup 2) (pc)) (const_int 2048)))
5817 (define_insn "*negated_cbranchsi4"
5820 (match_operator 0 "equality_operator"
5821 [(match_operand:SI 1 "s_register_operand" "l")
5822 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
5823 (label_ref (match_operand 3 "" ""))
5827 output_asm_insn (\"cmn\\t%1, %2\", operands);
5828 switch (get_attr_length (insn))
5830 case 4: return \"b%d0\\t%l3\";
5831 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
5832 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
5835 [(set (attr "far_jump")
5837 (eq_attr "length" "8")
5838 (const_string "yes")
5839 (const_string "no")))
5840 (set (attr "length")
5842 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
5843 (le (minus (match_dup 3) (pc)) (const_int 256)))
5846 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
5847 (le (minus (match_dup 3) (pc)) (const_int 2048)))
5852 (define_insn "*tbit_cbranch"
5855 (match_operator 0 "equality_operator"
5856 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
5858 (match_operand:SI 2 "const_int_operand" "i"))
5860 (label_ref (match_operand 3 "" ""))
5862 (clobber (match_scratch:SI 4 "=l"))]
5867 op[0] = operands[4];
5868 op[1] = operands[1];
5869 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
5871 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
5872 switch (get_attr_length (insn))
5874 case 4: return \"b%d0\\t%l3\";
5875 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
5876 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
5879 [(set (attr "far_jump")
5881 (eq_attr "length" "8")
5882 (const_string "yes")
5883 (const_string "no")))
5884 (set (attr "length")
5886 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
5887 (le (minus (match_dup 3) (pc)) (const_int 256)))
5890 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
5891 (le (minus (match_dup 3) (pc)) (const_int 2048)))
5896 (define_insn "*tstsi3_cbranch"
5899 (match_operator 3 "equality_operator"
5900 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
5901 (match_operand:SI 1 "s_register_operand" "l"))
5903 (label_ref (match_operand 2 "" ""))
5908 output_asm_insn (\"tst\\t%0, %1\", operands);
5909 switch (get_attr_length (insn))
5911 case 4: return \"b%d3\\t%l2\";
5912 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
5913 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
5916 [(set (attr "far_jump")
5918 (eq_attr "length" "8")
5919 (const_string "yes")
5920 (const_string "no")))
5921 (set (attr "length")
5923 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
5924 (le (minus (match_dup 2) (pc)) (const_int 256)))
5927 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
5928 (le (minus (match_dup 2) (pc)) (const_int 2048)))
5933 (define_insn "*andsi3_cbranch"
5936 (match_operator 5 "equality_operator"
5937 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
5938 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
5940 (label_ref (match_operand 4 "" ""))
5942 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
5943 (and:SI (match_dup 2) (match_dup 3)))
5944 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
5948 if (which_alternative == 0)
5949 output_asm_insn (\"and\\t%0, %3\", operands);
5950 else if (which_alternative == 1)
5952 output_asm_insn (\"and\\t%1, %3\", operands);
5953 output_asm_insn (\"mov\\t%0, %1\", operands);
5957 output_asm_insn (\"and\\t%1, %3\", operands);
5958 output_asm_insn (\"str\\t%1, %0\", operands);
5961 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
5963 case 4: return \"b%d5\\t%l4\";
5964 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
5965 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
5968 [(set (attr "far_jump")
5970 (ior (and (eq (symbol_ref ("which_alternative"))
5972 (eq_attr "length" "8"))
5973 (eq_attr "length" "10"))
5974 (const_string "yes")
5975 (const_string "no")))
5976 (set (attr "length")
5978 (eq (symbol_ref ("which_alternative"))
5981 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
5982 (le (minus (match_dup 4) (pc)) (const_int 256)))
5985 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
5986 (le (minus (match_dup 4) (pc)) (const_int 2048)))
5990 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
5991 (le (minus (match_dup 4) (pc)) (const_int 256)))
5994 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
5995 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6000 (define_insn "*orrsi3_cbranch_scratch"
6003 (match_operator 4 "equality_operator"
6004 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6005 (match_operand:SI 2 "s_register_operand" "l"))
6007 (label_ref (match_operand 3 "" ""))
6009 (clobber (match_scratch:SI 0 "=l"))]
6013 output_asm_insn (\"orr\\t%0, %2\", operands);
6014 switch (get_attr_length (insn))
6016 case 4: return \"b%d4\\t%l3\";
6017 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6018 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6021 [(set (attr "far_jump")
6023 (eq_attr "length" "8")
6024 (const_string "yes")
6025 (const_string "no")))
6026 (set (attr "length")
6028 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6029 (le (minus (match_dup 3) (pc)) (const_int 256)))
6032 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6033 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6038 (define_insn "*orrsi3_cbranch"
6041 (match_operator 5 "equality_operator"
6042 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6043 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6045 (label_ref (match_operand 4 "" ""))
6047 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6048 (ior:SI (match_dup 2) (match_dup 3)))
6049 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6053 if (which_alternative == 0)
6054 output_asm_insn (\"orr\\t%0, %3\", operands);
6055 else if (which_alternative == 1)
6057 output_asm_insn (\"orr\\t%1, %3\", operands);
6058 output_asm_insn (\"mov\\t%0, %1\", operands);
6062 output_asm_insn (\"orr\\t%1, %3\", operands);
6063 output_asm_insn (\"str\\t%1, %0\", operands);
6066 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6068 case 4: return \"b%d5\\t%l4\";
6069 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6070 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6073 [(set (attr "far_jump")
6075 (ior (and (eq (symbol_ref ("which_alternative"))
6077 (eq_attr "length" "8"))
6078 (eq_attr "length" "10"))
6079 (const_string "yes")
6080 (const_string "no")))
6081 (set (attr "length")
6083 (eq (symbol_ref ("which_alternative"))
6086 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6087 (le (minus (match_dup 4) (pc)) (const_int 256)))
6090 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6091 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6095 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6096 (le (minus (match_dup 4) (pc)) (const_int 256)))
6099 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6100 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6105 (define_insn "*xorsi3_cbranch_scratch"
6108 (match_operator 4 "equality_operator"
6109 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6110 (match_operand:SI 2 "s_register_operand" "l"))
6112 (label_ref (match_operand 3 "" ""))
6114 (clobber (match_scratch:SI 0 "=l"))]
6118 output_asm_insn (\"eor\\t%0, %2\", operands);
6119 switch (get_attr_length (insn))
6121 case 4: return \"b%d4\\t%l3\";
6122 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6123 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6126 [(set (attr "far_jump")
6128 (eq_attr "length" "8")
6129 (const_string "yes")
6130 (const_string "no")))
6131 (set (attr "length")
6133 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6134 (le (minus (match_dup 3) (pc)) (const_int 256)))
6137 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6138 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6143 (define_insn "*xorsi3_cbranch"
6146 (match_operator 5 "equality_operator"
6147 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6148 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6150 (label_ref (match_operand 4 "" ""))
6152 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6153 (xor:SI (match_dup 2) (match_dup 3)))
6154 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6158 if (which_alternative == 0)
6159 output_asm_insn (\"eor\\t%0, %3\", operands);
6160 else if (which_alternative == 1)
6162 output_asm_insn (\"eor\\t%1, %3\", operands);
6163 output_asm_insn (\"mov\\t%0, %1\", operands);
6167 output_asm_insn (\"eor\\t%1, %3\", operands);
6168 output_asm_insn (\"str\\t%1, %0\", operands);
6171 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6173 case 4: return \"b%d5\\t%l4\";
6174 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6175 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6178 [(set (attr "far_jump")
6180 (ior (and (eq (symbol_ref ("which_alternative"))
6182 (eq_attr "length" "8"))
6183 (eq_attr "length" "10"))
6184 (const_string "yes")
6185 (const_string "no")))
6186 (set (attr "length")
6188 (eq (symbol_ref ("which_alternative"))
6191 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6192 (le (minus (match_dup 4) (pc)) (const_int 256)))
6195 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6196 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6200 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6201 (le (minus (match_dup 4) (pc)) (const_int 256)))
6204 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6205 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6210 (define_insn "*bicsi3_cbranch_scratch"
6213 (match_operator 4 "equality_operator"
6214 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6215 (match_operand:SI 1 "s_register_operand" "0"))
6217 (label_ref (match_operand 3 "" ""))
6219 (clobber (match_scratch:SI 0 "=l"))]
6223 output_asm_insn (\"bic\\t%0, %2\", operands);
6224 switch (get_attr_length (insn))
6226 case 4: return \"b%d4\\t%l3\";
6227 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6228 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6231 [(set (attr "far_jump")
6233 (eq_attr "length" "8")
6234 (const_string "yes")
6235 (const_string "no")))
6236 (set (attr "length")
6238 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6239 (le (minus (match_dup 3) (pc)) (const_int 256)))
6242 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6243 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6248 (define_insn "*bicsi3_cbranch"
6251 (match_operator 5 "equality_operator"
6252 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6253 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6255 (label_ref (match_operand 4 "" ""))
6257 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6258 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6259 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6263 if (which_alternative == 0)
6264 output_asm_insn (\"bic\\t%0, %3\", operands);
6265 else if (which_alternative <= 2)
6267 output_asm_insn (\"bic\\t%1, %3\", operands);
6268 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6269 conditions again, since we're only testing for equality. */
6270 output_asm_insn (\"mov\\t%0, %1\", operands);
6274 output_asm_insn (\"bic\\t%1, %3\", operands);
6275 output_asm_insn (\"str\\t%1, %0\", operands);
6278 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6280 case 4: return \"b%d5\\t%l4\";
6281 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6282 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6285 [(set (attr "far_jump")
6287 (ior (and (eq (symbol_ref ("which_alternative"))
6289 (eq_attr "length" "8"))
6290 (eq_attr "length" "10"))
6291 (const_string "yes")
6292 (const_string "no")))
6293 (set (attr "length")
6295 (eq (symbol_ref ("which_alternative"))
6298 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6299 (le (minus (match_dup 4) (pc)) (const_int 256)))
6302 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6303 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6307 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6308 (le (minus (match_dup 4) (pc)) (const_int 256)))
6311 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6312 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6317 (define_insn "*cbranchne_decr1"
6319 (if_then_else (match_operator 3 "equality_operator"
6320 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6322 (label_ref (match_operand 4 "" ""))
6324 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6325 (plus:SI (match_dup 2) (const_int -1)))
6326 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6331 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6333 VOIDmode, operands[2], const1_rtx);
6334 cond[1] = operands[4];
6336 if (which_alternative == 0)
6337 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6338 else if (which_alternative == 1)
6340 /* We must provide an alternative for a hi reg because reload
6341 cannot handle output reloads on a jump instruction, but we
6342 can't subtract into that. Fortunately a mov from lo to hi
6343 does not clobber the condition codes. */
6344 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6345 output_asm_insn (\"mov\\t%0, %1\", operands);
6349 /* Similarly, but the target is memory. */
6350 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6351 output_asm_insn (\"str\\t%1, %0\", operands);
6354 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6357 output_asm_insn (\"b%d0\\t%l1\", cond);
6360 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6361 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6363 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6364 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6368 [(set (attr "far_jump")
6370 (ior (and (eq (symbol_ref ("which_alternative"))
6372 (eq_attr "length" "8"))
6373 (eq_attr "length" "10"))
6374 (const_string "yes")
6375 (const_string "no")))
6376 (set_attr_alternative "length"
6380 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6381 (le (minus (match_dup 4) (pc)) (const_int 256)))
6384 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6385 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6390 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6391 (le (minus (match_dup 4) (pc)) (const_int 256)))
6394 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6395 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6400 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6401 (le (minus (match_dup 4) (pc)) (const_int 256)))
6404 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6405 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6410 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6411 (le (minus (match_dup 4) (pc)) (const_int 256)))
6414 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6415 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6420 (define_insn "*addsi3_cbranch"
6423 (match_operator 4 "comparison_operator"
6425 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
6426 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
6428 (label_ref (match_operand 5 "" ""))
6431 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6432 (plus:SI (match_dup 2) (match_dup 3)))
6433 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
6435 && (GET_CODE (operands[4]) == EQ
6436 || GET_CODE (operands[4]) == NE
6437 || GET_CODE (operands[4]) == GE
6438 || GET_CODE (operands[4]) == LT)"
6444 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
6445 cond[1] = operands[2];
6446 cond[2] = operands[3];
6448 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6449 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6451 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6453 if (which_alternative >= 3
6454 && which_alternative < 4)
6455 output_asm_insn (\"mov\\t%0, %1\", operands);
6456 else if (which_alternative >= 4)
6457 output_asm_insn (\"str\\t%1, %0\", operands);
6459 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
6462 return \"b%d4\\t%l5\";
6464 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6466 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6470 [(set (attr "far_jump")
6472 (ior (and (lt (symbol_ref ("which_alternative"))
6474 (eq_attr "length" "8"))
6475 (eq_attr "length" "10"))
6476 (const_string "yes")
6477 (const_string "no")))
6478 (set (attr "length")
6480 (lt (symbol_ref ("which_alternative"))
6483 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
6484 (le (minus (match_dup 5) (pc)) (const_int 256)))
6487 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
6488 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6492 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
6493 (le (minus (match_dup 5) (pc)) (const_int 256)))
6496 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
6497 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6502 (define_insn "*addsi3_cbranch_scratch"
6505 (match_operator 3 "comparison_operator"
6507 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
6508 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
6510 (label_ref (match_operand 4 "" ""))
6512 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
6514 && (GET_CODE (operands[3]) == EQ
6515 || GET_CODE (operands[3]) == NE
6516 || GET_CODE (operands[3]) == GE
6517 || GET_CODE (operands[3]) == LT)"
6520 switch (which_alternative)
6523 output_asm_insn (\"cmp\t%1, #%n2\", operands);
6526 output_asm_insn (\"cmn\t%1, %2\", operands);
6529 if (INTVAL (operands[2]) < 0)
6530 output_asm_insn (\"sub\t%0, %1, %2\", operands);
6532 output_asm_insn (\"add\t%0, %1, %2\", operands);
6535 if (INTVAL (operands[2]) < 0)
6536 output_asm_insn (\"sub\t%0, %0, %2\", operands);
6538 output_asm_insn (\"add\t%0, %0, %2\", operands);
6542 switch (get_attr_length (insn))
6545 return \"b%d3\\t%l4\";
6547 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6549 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6553 [(set (attr "far_jump")
6555 (eq_attr "length" "8")
6556 (const_string "yes")
6557 (const_string "no")))
6558 (set (attr "length")
6560 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6561 (le (minus (match_dup 4) (pc)) (const_int 256)))
6564 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6565 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6570 (define_insn "*subsi3_cbranch"
6573 (match_operator 4 "comparison_operator"
6575 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
6576 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6578 (label_ref (match_operand 5 "" ""))
6580 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6581 (minus:SI (match_dup 2) (match_dup 3)))
6582 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6584 && (GET_CODE (operands[4]) == EQ
6585 || GET_CODE (operands[4]) == NE
6586 || GET_CODE (operands[4]) == GE
6587 || GET_CODE (operands[4]) == LT)"
6590 if (which_alternative == 0)
6591 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
6592 else if (which_alternative == 1)
6594 /* We must provide an alternative for a hi reg because reload
6595 cannot handle output reloads on a jump instruction, but we
6596 can't subtract into that. Fortunately a mov from lo to hi
6597 does not clobber the condition codes. */
6598 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
6599 output_asm_insn (\"mov\\t%0, %1\", operands);
6603 /* Similarly, but the target is memory. */
6604 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
6605 output_asm_insn (\"str\\t%1, %0\", operands);
6608 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
6611 return \"b%d4\\t%l5\";
6613 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6615 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6619 [(set (attr "far_jump")
6621 (ior (and (eq (symbol_ref ("which_alternative"))
6623 (eq_attr "length" "8"))
6624 (eq_attr "length" "10"))
6625 (const_string "yes")
6626 (const_string "no")))
6627 (set (attr "length")
6629 (eq (symbol_ref ("which_alternative"))
6632 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
6633 (le (minus (match_dup 5) (pc)) (const_int 256)))
6636 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
6637 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6641 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
6642 (le (minus (match_dup 5) (pc)) (const_int 256)))
6645 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
6646 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6651 (define_insn "*subsi3_cbranch_scratch"
6654 (match_operator 0 "arm_comparison_operator"
6655 [(minus:SI (match_operand:SI 1 "register_operand" "l")
6656 (match_operand:SI 2 "nonmemory_operand" "l"))
6658 (label_ref (match_operand 3 "" ""))
6661 && (GET_CODE (operands[0]) == EQ
6662 || GET_CODE (operands[0]) == NE
6663 || GET_CODE (operands[0]) == GE
6664 || GET_CODE (operands[0]) == LT)"
6666 output_asm_insn (\"cmp\\t%1, %2\", operands);
6667 switch (get_attr_length (insn))
6669 case 4: return \"b%d0\\t%l3\";
6670 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6671 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6674 [(set (attr "far_jump")
6676 (eq_attr "length" "8")
6677 (const_string "yes")
6678 (const_string "no")))
6679 (set (attr "length")
6681 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6682 (le (minus (match_dup 3) (pc)) (const_int 256)))
6685 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6686 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6691 ;; Comparison and test insns
6693 (define_expand "cmpsi"
6694 [(match_operand:SI 0 "s_register_operand" "")
6695 (match_operand:SI 1 "arm_add_operand" "")]
6698 arm_compare_op0 = operands[0];
6699 arm_compare_op1 = operands[1];
6704 (define_expand "cmpsf"
6705 [(match_operand:SF 0 "s_register_operand" "")
6706 (match_operand:SF 1 "arm_float_compare_operand" "")]
6707 "TARGET_ARM && TARGET_HARD_FLOAT"
6709 arm_compare_op0 = operands[0];
6710 arm_compare_op1 = operands[1];
6715 (define_expand "cmpdf"
6716 [(match_operand:DF 0 "s_register_operand" "")
6717 (match_operand:DF 1 "arm_float_compare_operand" "")]
6718 "TARGET_ARM && TARGET_HARD_FLOAT"
6720 arm_compare_op0 = operands[0];
6721 arm_compare_op1 = operands[1];
6726 (define_insn "*arm_cmpsi_insn"
6727 [(set (reg:CC CC_REGNUM)
6728 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
6729 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
6734 [(set_attr "conds" "set")]
6737 (define_insn "*cmpsi_shiftsi"
6738 [(set (reg:CC CC_REGNUM)
6739 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
6740 (match_operator:SI 3 "shift_operator"
6741 [(match_operand:SI 1 "s_register_operand" "r")
6742 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
6745 [(set_attr "conds" "set")
6746 (set_attr "shift" "1")
6747 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
6748 (const_string "alu_shift")
6749 (const_string "alu_shift_reg")))]
6752 (define_insn "*cmpsi_shiftsi_swp"
6753 [(set (reg:CC_SWP CC_REGNUM)
6754 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6755 [(match_operand:SI 1 "s_register_operand" "r")
6756 (match_operand:SI 2 "reg_or_int_operand" "rM")])
6757 (match_operand:SI 0 "s_register_operand" "r")))]
6760 [(set_attr "conds" "set")
6761 (set_attr "shift" "1")
6762 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
6763 (const_string "alu_shift")
6764 (const_string "alu_shift_reg")))]
6767 (define_insn "*cmpsi_negshiftsi_si"
6768 [(set (reg:CC_Z CC_REGNUM)
6770 (neg:SI (match_operator:SI 1 "shift_operator"
6771 [(match_operand:SI 2 "s_register_operand" "r")
6772 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6773 (match_operand:SI 0 "s_register_operand" "r")))]
6776 [(set_attr "conds" "set")
6777 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6778 (const_string "alu_shift")
6779 (const_string "alu_shift_reg")))]
6782 ;; Cirrus SF compare instruction
6783 (define_insn "*cirrus_cmpsf"
6784 [(set (reg:CCFP CC_REGNUM)
6785 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
6786 (match_operand:SF 1 "cirrus_fp_register" "v")))]
6787 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6788 "cfcmps%?\\tr15, %V0, %V1"
6789 [(set_attr "type" "mav_farith")
6790 (set_attr "cirrus" "compare")]
6793 ;; Cirrus DF compare instruction
6794 (define_insn "*cirrus_cmpdf"
6795 [(set (reg:CCFP CC_REGNUM)
6796 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
6797 (match_operand:DF 1 "cirrus_fp_register" "v")))]
6798 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6799 "cfcmpd%?\\tr15, %V0, %V1"
6800 [(set_attr "type" "mav_farith")
6801 (set_attr "cirrus" "compare")]
6804 ;; Cirrus DI compare instruction
6805 (define_expand "cmpdi"
6806 [(match_operand:DI 0 "cirrus_fp_register" "")
6807 (match_operand:DI 1 "cirrus_fp_register" "")]
6808 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6810 arm_compare_op0 = operands[0];
6811 arm_compare_op1 = operands[1];
6815 (define_insn "*cirrus_cmpdi"
6816 [(set (reg:CC CC_REGNUM)
6817 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
6818 (match_operand:DI 1 "cirrus_fp_register" "v")))]
6819 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6820 "cfcmp64%?\\tr15, %V0, %V1"
6821 [(set_attr "type" "mav_farith")
6822 (set_attr "cirrus" "compare")]
6825 ; This insn allows redundant compares to be removed by cse, nothing should
6826 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6827 ; is deleted later on. The match_dup will match the mode here, so that
6828 ; mode changes of the condition codes aren't lost by this even though we don't
6829 ; specify what they are.
6831 (define_insn "*deleted_compare"
6832 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6834 "\\t%@ deleted compare"
6835 [(set_attr "conds" "set")
6836 (set_attr "length" "0")]
6840 ;; Conditional branch insns
6842 (define_expand "beq"
6844 (if_then_else (eq (match_dup 1) (const_int 0))
6845 (label_ref (match_operand 0 "" ""))
6848 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
6851 (define_expand "bne"
6853 (if_then_else (ne (match_dup 1) (const_int 0))
6854 (label_ref (match_operand 0 "" ""))
6857 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
6860 (define_expand "bgt"
6862 (if_then_else (gt (match_dup 1) (const_int 0))
6863 (label_ref (match_operand 0 "" ""))
6866 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
6869 (define_expand "ble"
6871 (if_then_else (le (match_dup 1) (const_int 0))
6872 (label_ref (match_operand 0 "" ""))
6875 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
6878 (define_expand "bge"
6880 (if_then_else (ge (match_dup 1) (const_int 0))
6881 (label_ref (match_operand 0 "" ""))
6884 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
6887 (define_expand "blt"
6889 (if_then_else (lt (match_dup 1) (const_int 0))
6890 (label_ref (match_operand 0 "" ""))
6893 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
6896 (define_expand "bgtu"
6898 (if_then_else (gtu (match_dup 1) (const_int 0))
6899 (label_ref (match_operand 0 "" ""))
6902 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
6905 (define_expand "bleu"
6907 (if_then_else (leu (match_dup 1) (const_int 0))
6908 (label_ref (match_operand 0 "" ""))
6911 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
6914 (define_expand "bgeu"
6916 (if_then_else (geu (match_dup 1) (const_int 0))
6917 (label_ref (match_operand 0 "" ""))
6920 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
6923 (define_expand "bltu"
6925 (if_then_else (ltu (match_dup 1) (const_int 0))
6926 (label_ref (match_operand 0 "" ""))
6929 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
6932 (define_expand "bunordered"
6934 (if_then_else (unordered (match_dup 1) (const_int 0))
6935 (label_ref (match_operand 0 "" ""))
6937 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6938 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
6942 (define_expand "bordered"
6944 (if_then_else (ordered (match_dup 1) (const_int 0))
6945 (label_ref (match_operand 0 "" ""))
6947 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6948 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
6952 (define_expand "bungt"
6954 (if_then_else (ungt (match_dup 1) (const_int 0))
6955 (label_ref (match_operand 0 "" ""))
6957 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6958 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
6961 (define_expand "bunlt"
6963 (if_then_else (unlt (match_dup 1) (const_int 0))
6964 (label_ref (match_operand 0 "" ""))
6966 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6967 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
6970 (define_expand "bunge"
6972 (if_then_else (unge (match_dup 1) (const_int 0))
6973 (label_ref (match_operand 0 "" ""))
6975 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6976 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
6979 (define_expand "bunle"
6981 (if_then_else (unle (match_dup 1) (const_int 0))
6982 (label_ref (match_operand 0 "" ""))
6984 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6985 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
6988 ;; The following two patterns need two branch instructions, since there is
6989 ;; no single instruction that will handle all cases.
6990 (define_expand "buneq"
6992 (if_then_else (uneq (match_dup 1) (const_int 0))
6993 (label_ref (match_operand 0 "" ""))
6995 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
6996 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
6999 (define_expand "bltgt"
7001 (if_then_else (ltgt (match_dup 1) (const_int 0))
7002 (label_ref (match_operand 0 "" ""))
7004 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7005 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7009 ;; Patterns to match conditional branch insns.
7012 ; Special pattern to match UNEQ.
7013 (define_insn "*arm_buneq"
7015 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7016 (label_ref (match_operand 0 "" ""))
7018 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7020 gcc_assert (!arm_ccfsm_state);
7022 return \"bvs\\t%l0\;beq\\t%l0\";
7024 [(set_attr "conds" "jump_clob")
7025 (set_attr "length" "8")]
7028 ; Special pattern to match LTGT.
7029 (define_insn "*arm_bltgt"
7031 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7032 (label_ref (match_operand 0 "" ""))
7034 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7036 gcc_assert (!arm_ccfsm_state);
7038 return \"bmi\\t%l0\;bgt\\t%l0\";
7040 [(set_attr "conds" "jump_clob")
7041 (set_attr "length" "8")]
7044 (define_insn "*arm_cond_branch"
7046 (if_then_else (match_operator 1 "arm_comparison_operator"
7047 [(match_operand 2 "cc_register" "") (const_int 0)])
7048 (label_ref (match_operand 0 "" ""))
7052 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7054 arm_ccfsm_state += 2;
7057 return \"b%d1\\t%l0\";
7059 [(set_attr "conds" "use")
7060 (set_attr "type" "branch")]
7063 ; Special pattern to match reversed UNEQ.
7064 (define_insn "*arm_buneq_reversed"
7066 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7068 (label_ref (match_operand 0 "" ""))))]
7069 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7071 gcc_assert (!arm_ccfsm_state);
7073 return \"bmi\\t%l0\;bgt\\t%l0\";
7075 [(set_attr "conds" "jump_clob")
7076 (set_attr "length" "8")]
7079 ; Special pattern to match reversed LTGT.
7080 (define_insn "*arm_bltgt_reversed"
7082 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7084 (label_ref (match_operand 0 "" ""))))]
7085 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7087 gcc_assert (!arm_ccfsm_state);
7089 return \"bvs\\t%l0\;beq\\t%l0\";
7091 [(set_attr "conds" "jump_clob")
7092 (set_attr "length" "8")]
7095 (define_insn "*arm_cond_branch_reversed"
7097 (if_then_else (match_operator 1 "arm_comparison_operator"
7098 [(match_operand 2 "cc_register" "") (const_int 0)])
7100 (label_ref (match_operand 0 "" ""))))]
7103 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7105 arm_ccfsm_state += 2;
7108 return \"b%D1\\t%l0\";
7110 [(set_attr "conds" "use")
7111 (set_attr "type" "branch")]
7118 (define_expand "seq"
7119 [(set (match_operand:SI 0 "s_register_operand" "")
7120 (eq:SI (match_dup 1) (const_int 0)))]
7122 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7125 (define_expand "sne"
7126 [(set (match_operand:SI 0 "s_register_operand" "")
7127 (ne:SI (match_dup 1) (const_int 0)))]
7129 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7132 (define_expand "sgt"
7133 [(set (match_operand:SI 0 "s_register_operand" "")
7134 (gt:SI (match_dup 1) (const_int 0)))]
7136 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7139 (define_expand "sle"
7140 [(set (match_operand:SI 0 "s_register_operand" "")
7141 (le:SI (match_dup 1) (const_int 0)))]
7143 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7146 (define_expand "sge"
7147 [(set (match_operand:SI 0 "s_register_operand" "")
7148 (ge:SI (match_dup 1) (const_int 0)))]
7150 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7153 (define_expand "slt"
7154 [(set (match_operand:SI 0 "s_register_operand" "")
7155 (lt:SI (match_dup 1) (const_int 0)))]
7157 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7160 (define_expand "sgtu"
7161 [(set (match_operand:SI 0 "s_register_operand" "")
7162 (gtu:SI (match_dup 1) (const_int 0)))]
7164 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7167 (define_expand "sleu"
7168 [(set (match_operand:SI 0 "s_register_operand" "")
7169 (leu:SI (match_dup 1) (const_int 0)))]
7171 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7174 (define_expand "sgeu"
7175 [(set (match_operand:SI 0 "s_register_operand" "")
7176 (geu:SI (match_dup 1) (const_int 0)))]
7178 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7181 (define_expand "sltu"
7182 [(set (match_operand:SI 0 "s_register_operand" "")
7183 (ltu:SI (match_dup 1) (const_int 0)))]
7185 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7188 (define_expand "sunordered"
7189 [(set (match_operand:SI 0 "s_register_operand" "")
7190 (unordered:SI (match_dup 1) (const_int 0)))]
7191 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7192 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7196 (define_expand "sordered"
7197 [(set (match_operand:SI 0 "s_register_operand" "")
7198 (ordered:SI (match_dup 1) (const_int 0)))]
7199 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7200 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7204 (define_expand "sungt"
7205 [(set (match_operand:SI 0 "s_register_operand" "")
7206 (ungt:SI (match_dup 1) (const_int 0)))]
7207 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7208 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7212 (define_expand "sunge"
7213 [(set (match_operand:SI 0 "s_register_operand" "")
7214 (unge:SI (match_dup 1) (const_int 0)))]
7215 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7216 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7220 (define_expand "sunlt"
7221 [(set (match_operand:SI 0 "s_register_operand" "")
7222 (unlt:SI (match_dup 1) (const_int 0)))]
7223 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7224 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7228 (define_expand "sunle"
7229 [(set (match_operand:SI 0 "s_register_operand" "")
7230 (unle:SI (match_dup 1) (const_int 0)))]
7231 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7232 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7236 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7237 ;;; simple ARM instructions.
7239 ; (define_expand "suneq"
7240 ; [(set (match_operand:SI 0 "s_register_operand" "")
7241 ; (uneq:SI (match_dup 1) (const_int 0)))]
7242 ; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7243 ; "gcc_unreachable ();"
7246 ; (define_expand "sltgt"
7247 ; [(set (match_operand:SI 0 "s_register_operand" "")
7248 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7249 ; "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7250 ; "gcc_unreachable ();"
7253 (define_insn "*mov_scc"
7254 [(set (match_operand:SI 0 "s_register_operand" "=r")
7255 (match_operator:SI 1 "arm_comparison_operator"
7256 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7258 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7259 [(set_attr "conds" "use")
7260 (set_attr "length" "8")]
7263 (define_insn "*mov_negscc"
7264 [(set (match_operand:SI 0 "s_register_operand" "=r")
7265 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7266 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7268 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7269 [(set_attr "conds" "use")
7270 (set_attr "length" "8")]
7273 (define_insn "*mov_notscc"
7274 [(set (match_operand:SI 0 "s_register_operand" "=r")
7275 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7276 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7278 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7279 [(set_attr "conds" "use")
7280 (set_attr "length" "8")]
7284 ;; Conditional move insns
7286 (define_expand "movsicc"
7287 [(set (match_operand:SI 0 "s_register_operand" "")
7288 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7289 (match_operand:SI 2 "arm_not_operand" "")
7290 (match_operand:SI 3 "arm_not_operand" "")))]
7294 enum rtx_code code = GET_CODE (operands[1]);
7297 if (code == UNEQ || code == LTGT)
7300 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
7301 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7305 (define_expand "movsfcc"
7306 [(set (match_operand:SF 0 "s_register_operand" "")
7307 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7308 (match_operand:SF 2 "s_register_operand" "")
7309 (match_operand:SF 3 "nonmemory_operand" "")))]
7313 enum rtx_code code = GET_CODE (operands[1]);
7316 if (code == UNEQ || code == LTGT)
7319 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7320 Otherwise, ensure it is a valid FP add operand */
7321 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7322 || (!arm_float_add_operand (operands[3], SFmode)))
7323 operands[3] = force_reg (SFmode, operands[3]);
7325 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
7326 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7330 (define_expand "movdfcc"
7331 [(set (match_operand:DF 0 "s_register_operand" "")
7332 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7333 (match_operand:DF 2 "s_register_operand" "")
7334 (match_operand:DF 3 "arm_float_add_operand" "")))]
7335 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7338 enum rtx_code code = GET_CODE (operands[1]);
7341 if (code == UNEQ || code == LTGT)
7344 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
7345 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7349 (define_insn "*movsicc_insn"
7350 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7352 (match_operator 3 "arm_comparison_operator"
7353 [(match_operand 4 "cc_register" "") (const_int 0)])
7354 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7355 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7362 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7363 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7364 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7365 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7366 [(set_attr "length" "4,4,4,4,8,8,8,8")
7367 (set_attr "conds" "use")]
7370 (define_insn "*movsfcc_soft_insn"
7371 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7372 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7373 [(match_operand 4 "cc_register" "") (const_int 0)])
7374 (match_operand:SF 1 "s_register_operand" "0,r")
7375 (match_operand:SF 2 "s_register_operand" "r,0")))]
7376 "TARGET_ARM && TARGET_SOFT_FLOAT"
7380 [(set_attr "conds" "use")]
7384 ;; Jump and linkage insns
7386 (define_expand "jump"
7388 (label_ref (match_operand 0 "" "")))]
7393 (define_insn "*arm_jump"
7395 (label_ref (match_operand 0 "" "")))]
7399 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7401 arm_ccfsm_state += 2;
7404 return \"b%?\\t%l0\";
7407 [(set_attr "predicable" "yes")]
7410 (define_insn "*thumb_jump"
7412 (label_ref (match_operand 0 "" "")))]
7415 if (get_attr_length (insn) == 2)
7417 return \"bl\\t%l0\\t%@ far jump\";
7419 [(set (attr "far_jump")
7421 (eq_attr "length" "4")
7422 (const_string "yes")
7423 (const_string "no")))
7424 (set (attr "length")
7426 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7427 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7432 (define_expand "call"
7433 [(parallel [(call (match_operand 0 "memory_operand" "")
7434 (match_operand 1 "general_operand" ""))
7435 (use (match_operand 2 "" ""))
7436 (clobber (reg:SI LR_REGNUM))])]
7442 /* In an untyped call, we can get NULL for operand 2. */
7443 if (operands[2] == NULL_RTX)
7444 operands[2] = const0_rtx;
7446 /* This is to decide if we should generate indirect calls by loading the
7447 32 bit address of the callee into a register before performing the
7448 branch and link. operand[2] encodes the long_call/short_call
7449 attribute of the function being called. This attribute is set whenever
7450 __attribute__((long_call/short_call)) or #pragma long_call/no_long_call
7451 is used, and the short_call attribute can also be set if function is
7452 declared as static or if it has already been defined in the current
7453 compilation unit. See arm.c and arm.h for info about this. The third
7454 parameter to arm_is_longcall_p is used to tell it which pattern
7456 callee = XEXP (operands[0], 0);
7458 if (GET_CODE (callee) != REG
7459 && arm_is_longcall_p (operands[0], INTVAL (operands[2]), 0))
7460 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7464 (define_insn "*call_reg_armv5"
7465 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7466 (match_operand 1 "" ""))
7467 (use (match_operand 2 "" ""))
7468 (clobber (reg:SI LR_REGNUM))]
7469 "TARGET_ARM && arm_arch5"
7471 [(set_attr "type" "call")]
7474 (define_insn "*call_reg_arm"
7475 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7476 (match_operand 1 "" ""))
7477 (use (match_operand 2 "" ""))
7478 (clobber (reg:SI LR_REGNUM))]
7479 "TARGET_ARM && !arm_arch5"
7481 return output_call (operands);
7483 ;; length is worst case, normally it is only two
7484 [(set_attr "length" "12")
7485 (set_attr "type" "call")]
7488 (define_insn "*call_mem"
7489 [(call (mem:SI (match_operand:SI 0 "memory_operand" "m"))
7490 (match_operand 1 "" ""))
7491 (use (match_operand 2 "" ""))
7492 (clobber (reg:SI LR_REGNUM))]
7495 return output_call_mem (operands);
7497 [(set_attr "length" "12")
7498 (set_attr "type" "call")]
7501 (define_insn "*call_reg_thumb_v5"
7502 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7503 (match_operand 1 "" ""))
7504 (use (match_operand 2 "" ""))
7505 (clobber (reg:SI LR_REGNUM))]
7506 "TARGET_THUMB && arm_arch5"
7508 [(set_attr "length" "2")
7509 (set_attr "type" "call")]
7512 (define_insn "*call_reg_thumb"
7513 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7514 (match_operand 1 "" ""))
7515 (use (match_operand 2 "" ""))
7516 (clobber (reg:SI LR_REGNUM))]
7517 "TARGET_THUMB && !arm_arch5"
7520 if (!TARGET_CALLER_INTERWORKING)
7521 return thumb_call_via_reg (operands[0]);
7522 else if (operands[1] == const0_rtx)
7523 return \"bl\\t%__interwork_call_via_%0\";
7524 else if (frame_pointer_needed)
7525 return \"bl\\t%__interwork_r7_call_via_%0\";
7527 return \"bl\\t%__interwork_r11_call_via_%0\";
7529 [(set_attr "type" "call")]
7532 (define_expand "call_value"
7533 [(parallel [(set (match_operand 0 "" "")
7534 (call (match_operand 1 "memory_operand" "")
7535 (match_operand 2 "general_operand" "")))
7536 (use (match_operand 3 "" ""))
7537 (clobber (reg:SI LR_REGNUM))])]
7541 rtx callee = XEXP (operands[1], 0);
7543 /* In an untyped call, we can get NULL for operand 2. */
7544 if (operands[3] == 0)
7545 operands[3] = const0_rtx;
7547 /* See the comment in define_expand \"call\". */
7548 if (GET_CODE (callee) != REG
7549 && arm_is_longcall_p (operands[1], INTVAL (operands[3]), 0))
7550 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7554 (define_insn "*call_value_reg_armv5"
7555 [(set (match_operand 0 "" "")
7556 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7557 (match_operand 2 "" "")))
7558 (use (match_operand 3 "" ""))
7559 (clobber (reg:SI LR_REGNUM))]
7560 "TARGET_ARM && arm_arch5"
7562 [(set_attr "type" "call")]
7565 (define_insn "*call_value_reg_arm"
7566 [(set (match_operand 0 "" "")
7567 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7568 (match_operand 2 "" "")))
7569 (use (match_operand 3 "" ""))
7570 (clobber (reg:SI LR_REGNUM))]
7571 "TARGET_ARM && !arm_arch5"
7573 return output_call (&operands[1]);
7575 [(set_attr "length" "12")
7576 (set_attr "type" "call")]
7579 (define_insn "*call_value_mem"
7580 [(set (match_operand 0 "" "")
7581 (call (mem:SI (match_operand:SI 1 "memory_operand" "m"))
7582 (match_operand 2 "" "")))
7583 (use (match_operand 3 "" ""))
7584 (clobber (reg:SI LR_REGNUM))]
7585 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7587 return output_call_mem (&operands[1]);
7589 [(set_attr "length" "12")
7590 (set_attr "type" "call")]
7593 (define_insn "*call_value_reg_thumb_v5"
7594 [(set (match_operand 0 "" "")
7595 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7596 (match_operand 2 "" "")))
7597 (use (match_operand 3 "" ""))
7598 (clobber (reg:SI LR_REGNUM))]
7599 "TARGET_THUMB && arm_arch5"
7601 [(set_attr "length" "2")
7602 (set_attr "type" "call")]
7605 (define_insn "*call_value_reg_thumb"
7606 [(set (match_operand 0 "" "")
7607 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7608 (match_operand 2 "" "")))
7609 (use (match_operand 3 "" ""))
7610 (clobber (reg:SI LR_REGNUM))]
7611 "TARGET_THUMB && !arm_arch5"
7614 if (!TARGET_CALLER_INTERWORKING)
7615 return thumb_call_via_reg (operands[1]);
7616 else if (operands[2] == const0_rtx)
7617 return \"bl\\t%__interwork_call_via_%1\";
7618 else if (frame_pointer_needed)
7619 return \"bl\\t%__interwork_r7_call_via_%1\";
7621 return \"bl\\t%__interwork_r11_call_via_%1\";
7623 [(set_attr "type" "call")]
7626 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7627 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7629 (define_insn "*call_symbol"
7630 [(call (mem:SI (match_operand:SI 0 "" ""))
7631 (match_operand 1 "" ""))
7632 (use (match_operand 2 "" ""))
7633 (clobber (reg:SI LR_REGNUM))]
7635 && (GET_CODE (operands[0]) == SYMBOL_REF)
7636 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
7639 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7641 [(set_attr "type" "call")]
7644 (define_insn "*call_value_symbol"
7645 [(set (match_operand 0 "" "")
7646 (call (mem:SI (match_operand:SI 1 "" ""))
7647 (match_operand:SI 2 "" "")))
7648 (use (match_operand 3 "" ""))
7649 (clobber (reg:SI LR_REGNUM))]
7651 && (GET_CODE (operands[1]) == SYMBOL_REF)
7652 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
7655 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7657 [(set_attr "type" "call")]
7660 (define_insn "*call_insn"
7661 [(call (mem:SI (match_operand:SI 0 "" ""))
7662 (match_operand:SI 1 "" ""))
7663 (use (match_operand 2 "" ""))
7664 (clobber (reg:SI LR_REGNUM))]
7666 && GET_CODE (operands[0]) == SYMBOL_REF
7667 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
7669 [(set_attr "length" "4")
7670 (set_attr "type" "call")]
7673 (define_insn "*call_value_insn"
7674 [(set (match_operand 0 "" "")
7675 (call (mem:SI (match_operand 1 "" ""))
7676 (match_operand 2 "" "")))
7677 (use (match_operand 3 "" ""))
7678 (clobber (reg:SI LR_REGNUM))]
7680 && GET_CODE (operands[1]) == SYMBOL_REF
7681 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
7683 [(set_attr "length" "4")
7684 (set_attr "type" "call")]
7687 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7688 (define_expand "sibcall"
7689 [(parallel [(call (match_operand 0 "memory_operand" "")
7690 (match_operand 1 "general_operand" ""))
7692 (use (match_operand 2 "" ""))])]
7696 if (operands[2] == NULL_RTX)
7697 operands[2] = const0_rtx;
7701 (define_expand "sibcall_value"
7702 [(parallel [(set (match_operand 0 "" "")
7703 (call (match_operand 1 "memory_operand" "")
7704 (match_operand 2 "general_operand" "")))
7706 (use (match_operand 3 "" ""))])]
7710 if (operands[3] == NULL_RTX)
7711 operands[3] = const0_rtx;
7715 (define_insn "*sibcall_insn"
7716 [(call (mem:SI (match_operand:SI 0 "" "X"))
7717 (match_operand 1 "" ""))
7719 (use (match_operand 2 "" ""))]
7720 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
7722 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7724 [(set_attr "type" "call")]
7727 (define_insn "*sibcall_value_insn"
7728 [(set (match_operand 0 "" "")
7729 (call (mem:SI (match_operand:SI 1 "" "X"))
7730 (match_operand 2 "" "")))
7732 (use (match_operand 3 "" ""))]
7733 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
7735 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7737 [(set_attr "type" "call")]
7740 ;; Often the return insn will be the same as loading from memory, so set attr
7741 (define_insn "return"
7743 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7746 if (arm_ccfsm_state == 2)
7748 arm_ccfsm_state += 2;
7751 return output_return_instruction (const_true_rtx, TRUE, FALSE);
7753 [(set_attr "type" "load1")
7754 (set_attr "length" "12")
7755 (set_attr "predicable" "yes")]
7758 (define_insn "*cond_return"
7760 (if_then_else (match_operator 0 "arm_comparison_operator"
7761 [(match_operand 1 "cc_register" "") (const_int 0)])
7764 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
7767 if (arm_ccfsm_state == 2)
7769 arm_ccfsm_state += 2;
7772 return output_return_instruction (operands[0], TRUE, FALSE);
7774 [(set_attr "conds" "use")
7775 (set_attr "length" "12")
7776 (set_attr "type" "load1")]
7779 (define_insn "*cond_return_inverted"
7781 (if_then_else (match_operator 0 "arm_comparison_operator"
7782 [(match_operand 1 "cc_register" "") (const_int 0)])
7785 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
7788 if (arm_ccfsm_state == 2)
7790 arm_ccfsm_state += 2;
7793 return output_return_instruction (operands[0], TRUE, TRUE);
7795 [(set_attr "conds" "use")
7796 (set_attr "length" "12")
7797 (set_attr "type" "load1")]
7800 ;; Generate a sequence of instructions to determine if the processor is
7801 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7804 (define_expand "return_addr_mask"
7806 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7808 (set (match_operand:SI 0 "s_register_operand" "")
7809 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7811 (const_int 67108860)))] ; 0x03fffffc
7814 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7817 (define_insn "*check_arch2"
7818 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7819 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7822 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7823 [(set_attr "length" "8")
7824 (set_attr "conds" "set")]
7827 ;; Call subroutine returning any type.
7829 (define_expand "untyped_call"
7830 [(parallel [(call (match_operand 0 "" "")
7832 (match_operand 1 "" "")
7833 (match_operand 2 "" "")])]
7838 rtx par = gen_rtx_PARALLEL (VOIDmode,
7839 rtvec_alloc (XVECLEN (operands[2], 0)));
7840 rtx addr = gen_reg_rtx (Pmode);
7844 emit_move_insn (addr, XEXP (operands[1], 0));
7845 mem = change_address (operands[1], BLKmode, addr);
7847 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7849 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7851 /* Default code only uses r0 as a return value, but we could
7852 be using anything up to 4 registers. */
7853 if (REGNO (src) == R0_REGNUM)
7854 src = gen_rtx_REG (TImode, R0_REGNUM);
7856 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7858 size += GET_MODE_SIZE (GET_MODE (src));
7861 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
7866 for (i = 0; i < XVECLEN (par, 0); i++)
7868 HOST_WIDE_INT offset = 0;
7869 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7872 emit_move_insn (addr, plus_constant (addr, size));
7874 mem = change_address (mem, GET_MODE (reg), NULL);
7875 if (REGNO (reg) == R0_REGNUM)
7877 /* On thumb we have to use a write-back instruction. */
7878 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
7879 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7880 size = TARGET_ARM ? 16 : 0;
7884 emit_move_insn (mem, reg);
7885 size = GET_MODE_SIZE (GET_MODE (reg));
7889 /* The optimizer does not know that the call sets the function value
7890 registers we stored in the result block. We avoid problems by
7891 claiming that all hard registers are used and clobbered at this
7893 emit_insn (gen_blockage ());
7899 (define_expand "untyped_return"
7900 [(match_operand:BLK 0 "memory_operand" "")
7901 (match_operand 1 "" "")]
7906 rtx addr = gen_reg_rtx (Pmode);
7910 emit_move_insn (addr, XEXP (operands[0], 0));
7911 mem = change_address (operands[0], BLKmode, addr);
7913 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7915 HOST_WIDE_INT offset = 0;
7916 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7919 emit_move_insn (addr, plus_constant (addr, size));
7921 mem = change_address (mem, GET_MODE (reg), NULL);
7922 if (REGNO (reg) == R0_REGNUM)
7924 /* On thumb we have to use a write-back instruction. */
7925 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
7926 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7927 size = TARGET_ARM ? 16 : 0;
7931 emit_move_insn (reg, mem);
7932 size = GET_MODE_SIZE (GET_MODE (reg));
7936 /* Emit USE insns before the return. */
7937 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7938 emit_insn (gen_rtx_USE (VOIDmode,
7939 SET_DEST (XVECEXP (operands[1], 0, i))));
7941 /* Construct the return. */
7942 expand_naked_return ();
7948 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7949 ;; all of memory. This blocks insns from being moved across this point.
7951 (define_insn "blockage"
7952 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7955 [(set_attr "length" "0")
7956 (set_attr "type" "block")]
7959 (define_expand "casesi"
7960 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
7961 (match_operand:SI 1 "const_int_operand" "") ; lower bound
7962 (match_operand:SI 2 "const_int_operand" "") ; total range
7963 (match_operand:SI 3 "" "") ; table label
7964 (match_operand:SI 4 "" "")] ; Out of range label
7969 if (operands[1] != const0_rtx)
7971 reg = gen_reg_rtx (SImode);
7973 emit_insn (gen_addsi3 (reg, operands[0],
7974 GEN_INT (-INTVAL (operands[1]))));
7978 if (!const_ok_for_arm (INTVAL (operands[2])))
7979 operands[2] = force_reg (SImode, operands[2]);
7981 emit_jump_insn (gen_casesi_internal (operands[0], operands[2], operands[3],
7987 ;; The USE in this pattern is needed to tell flow analysis that this is
7988 ;; a CASESI insn. It has no other purpose.
7989 (define_insn "casesi_internal"
7990 [(parallel [(set (pc)
7992 (leu (match_operand:SI 0 "s_register_operand" "r")
7993 (match_operand:SI 1 "arm_rhs_operand" "rI"))
7994 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
7995 (label_ref (match_operand 2 "" ""))))
7996 (label_ref (match_operand 3 "" ""))))
7997 (clobber (reg:CC CC_REGNUM))
7998 (use (label_ref (match_dup 2)))])]
8002 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8003 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8005 [(set_attr "conds" "clob")
8006 (set_attr "length" "12")]
8009 (define_expand "indirect_jump"
8011 (match_operand:SI 0 "s_register_operand" ""))]
8016 ;; NB Never uses BX.
8017 (define_insn "*arm_indirect_jump"
8019 (match_operand:SI 0 "s_register_operand" "r"))]
8021 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8022 [(set_attr "predicable" "yes")]
8025 (define_insn "*load_indirect_jump"
8027 (match_operand:SI 0 "memory_operand" "m"))]
8029 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8030 [(set_attr "type" "load1")
8031 (set_attr "pool_range" "4096")
8032 (set_attr "neg_pool_range" "4084")
8033 (set_attr "predicable" "yes")]
8036 ;; NB Never uses BX.
8037 (define_insn "*thumb_indirect_jump"
8039 (match_operand:SI 0 "register_operand" "l*r"))]
8042 [(set_attr "conds" "clob")
8043 (set_attr "length" "2")]
8054 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8055 return \"mov\\tr8, r8\";
8057 [(set (attr "length")
8058 (if_then_else (eq_attr "is_thumb" "yes")
8064 ;; Patterns to allow combination of arithmetic, cond code and shifts
8066 (define_insn "*arith_shiftsi"
8067 [(set (match_operand:SI 0 "s_register_operand" "=r")
8068 (match_operator:SI 1 "shiftable_operator"
8069 [(match_operator:SI 3 "shift_operator"
8070 [(match_operand:SI 4 "s_register_operand" "r")
8071 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8072 (match_operand:SI 2 "s_register_operand" "r")]))]
8074 "%i1%?\\t%0, %2, %4%S3"
8075 [(set_attr "predicable" "yes")
8076 (set_attr "shift" "4")
8077 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8078 (const_string "alu_shift")
8079 (const_string "alu_shift_reg")))]
8083 [(set (match_operand:SI 0 "s_register_operand" "")
8084 (match_operator:SI 1 "shiftable_operator"
8085 [(match_operator:SI 2 "shiftable_operator"
8086 [(match_operator:SI 3 "shift_operator"
8087 [(match_operand:SI 4 "s_register_operand" "")
8088 (match_operand:SI 5 "reg_or_int_operand" "")])
8089 (match_operand:SI 6 "s_register_operand" "")])
8090 (match_operand:SI 7 "arm_rhs_operand" "")]))
8091 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8094 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8097 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8100 (define_insn "*arith_shiftsi_compare0"
8101 [(set (reg:CC_NOOV CC_REGNUM)
8102 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8103 [(match_operator:SI 3 "shift_operator"
8104 [(match_operand:SI 4 "s_register_operand" "r")
8105 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8106 (match_operand:SI 2 "s_register_operand" "r")])
8108 (set (match_operand:SI 0 "s_register_operand" "=r")
8109 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8112 "%i1%?s\\t%0, %2, %4%S3"
8113 [(set_attr "conds" "set")
8114 (set_attr "shift" "4")
8115 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8116 (const_string "alu_shift")
8117 (const_string "alu_shift_reg")))]
8120 (define_insn "*arith_shiftsi_compare0_scratch"
8121 [(set (reg:CC_NOOV CC_REGNUM)
8122 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8123 [(match_operator:SI 3 "shift_operator"
8124 [(match_operand:SI 4 "s_register_operand" "r")
8125 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8126 (match_operand:SI 2 "s_register_operand" "r")])
8128 (clobber (match_scratch:SI 0 "=r"))]
8130 "%i1%?s\\t%0, %2, %4%S3"
8131 [(set_attr "conds" "set")
8132 (set_attr "shift" "4")
8133 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8134 (const_string "alu_shift")
8135 (const_string "alu_shift_reg")))]
8138 (define_insn "*sub_shiftsi"
8139 [(set (match_operand:SI 0 "s_register_operand" "=r")
8140 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8141 (match_operator:SI 2 "shift_operator"
8142 [(match_operand:SI 3 "s_register_operand" "r")
8143 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
8145 "sub%?\\t%0, %1, %3%S2"
8146 [(set_attr "predicable" "yes")
8147 (set_attr "shift" "3")
8148 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8149 (const_string "alu_shift")
8150 (const_string "alu_shift_reg")))]
8153 (define_insn "*sub_shiftsi_compare0"
8154 [(set (reg:CC_NOOV CC_REGNUM)
8156 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8157 (match_operator:SI 2 "shift_operator"
8158 [(match_operand:SI 3 "s_register_operand" "r")
8159 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8161 (set (match_operand:SI 0 "s_register_operand" "=r")
8162 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
8165 "sub%?s\\t%0, %1, %3%S2"
8166 [(set_attr "conds" "set")
8167 (set_attr "shift" "3")
8168 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8169 (const_string "alu_shift")
8170 (const_string "alu_shift_reg")))]
8173 (define_insn "*sub_shiftsi_compare0_scratch"
8174 [(set (reg:CC_NOOV CC_REGNUM)
8176 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8177 (match_operator:SI 2 "shift_operator"
8178 [(match_operand:SI 3 "s_register_operand" "r")
8179 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8181 (clobber (match_scratch:SI 0 "=r"))]
8183 "sub%?s\\t%0, %1, %3%S2"
8184 [(set_attr "conds" "set")
8185 (set_attr "shift" "3")
8186 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8187 (const_string "alu_shift")
8188 (const_string "alu_shift_reg")))]
8193 (define_insn "*and_scc"
8194 [(set (match_operand:SI 0 "s_register_operand" "=r")
8195 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8196 [(match_operand 3 "cc_register" "") (const_int 0)])
8197 (match_operand:SI 2 "s_register_operand" "r")))]
8199 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8200 [(set_attr "conds" "use")
8201 (set_attr "length" "8")]
8204 (define_insn "*ior_scc"
8205 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8206 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8207 [(match_operand 3 "cc_register" "") (const_int 0)])
8208 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8212 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8213 [(set_attr "conds" "use")
8214 (set_attr "length" "4,8")]
8217 (define_insn "*compare_scc"
8218 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8219 (match_operator:SI 1 "arm_comparison_operator"
8220 [(match_operand:SI 2 "s_register_operand" "r,r")
8221 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8222 (clobber (reg:CC CC_REGNUM))]
8225 if (operands[3] == const0_rtx)
8227 if (GET_CODE (operands[1]) == LT)
8228 return \"mov\\t%0, %2, lsr #31\";
8230 if (GET_CODE (operands[1]) == GE)
8231 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
8233 if (GET_CODE (operands[1]) == EQ)
8234 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
8237 if (GET_CODE (operands[1]) == NE)
8239 if (which_alternative == 1)
8240 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
8241 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
8243 if (which_alternative == 1)
8244 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
8246 output_asm_insn (\"cmp\\t%2, %3\", operands);
8247 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
8249 [(set_attr "conds" "clob")
8250 (set_attr "length" "12")]
8253 (define_insn "*cond_move"
8254 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8255 (if_then_else:SI (match_operator 3 "equality_operator"
8256 [(match_operator 4 "arm_comparison_operator"
8257 [(match_operand 5 "cc_register" "") (const_int 0)])
8259 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8260 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8263 if (GET_CODE (operands[3]) == NE)
8265 if (which_alternative != 1)
8266 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8267 if (which_alternative != 0)
8268 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8271 if (which_alternative != 0)
8272 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8273 if (which_alternative != 1)
8274 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8277 [(set_attr "conds" "use")
8278 (set_attr "length" "4,4,8")]
8281 (define_insn "*cond_arith"
8282 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8283 (match_operator:SI 5 "shiftable_operator"
8284 [(match_operator:SI 4 "arm_comparison_operator"
8285 [(match_operand:SI 2 "s_register_operand" "r,r")
8286 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8287 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8288 (clobber (reg:CC CC_REGNUM))]
8291 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8292 return \"%i5\\t%0, %1, %2, lsr #31\";
8294 output_asm_insn (\"cmp\\t%2, %3\", operands);
8295 if (GET_CODE (operands[5]) == AND)
8296 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8297 else if (GET_CODE (operands[5]) == MINUS)
8298 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8299 else if (which_alternative != 0)
8300 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8301 return \"%i5%d4\\t%0, %1, #1\";
8303 [(set_attr "conds" "clob")
8304 (set_attr "length" "12")]
8307 (define_insn "*cond_sub"
8308 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8309 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8310 (match_operator:SI 4 "arm_comparison_operator"
8311 [(match_operand:SI 2 "s_register_operand" "r,r")
8312 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8313 (clobber (reg:CC CC_REGNUM))]
8316 output_asm_insn (\"cmp\\t%2, %3\", operands);
8317 if (which_alternative != 0)
8318 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8319 return \"sub%d4\\t%0, %1, #1\";
8321 [(set_attr "conds" "clob")
8322 (set_attr "length" "8,12")]
8325 (define_insn "*cmp_ite0"
8326 [(set (match_operand 6 "dominant_cc_register" "")
8329 (match_operator 4 "arm_comparison_operator"
8330 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8331 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8332 (match_operator:SI 5 "arm_comparison_operator"
8333 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8334 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8340 static const char * const opcodes[4][2] =
8342 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8343 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8344 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8345 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8346 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8347 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8348 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8349 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8352 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8354 return opcodes[which_alternative][swap];
8356 [(set_attr "conds" "set")
8357 (set_attr "length" "8")]
8360 (define_insn "*cmp_ite1"
8361 [(set (match_operand 6 "dominant_cc_register" "")
8364 (match_operator 4 "arm_comparison_operator"
8365 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8366 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8367 (match_operator:SI 5 "arm_comparison_operator"
8368 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8369 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8375 static const char * const opcodes[4][2] =
8377 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8378 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8379 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8380 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8381 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8382 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8383 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8384 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8387 comparison_dominates_p (GET_CODE (operands[5]),
8388 reverse_condition (GET_CODE (operands[4])));
8390 return opcodes[which_alternative][swap];
8392 [(set_attr "conds" "set")
8393 (set_attr "length" "8")]
8396 (define_insn "*cmp_and"
8397 [(set (match_operand 6 "dominant_cc_register" "")
8400 (match_operator 4 "arm_comparison_operator"
8401 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8402 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8403 (match_operator:SI 5 "arm_comparison_operator"
8404 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8405 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8410 static const char *const opcodes[4][2] =
8412 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8413 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8414 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8415 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8416 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8417 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8418 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8419 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8422 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8424 return opcodes[which_alternative][swap];
8426 [(set_attr "conds" "set")
8427 (set_attr "predicable" "no")
8428 (set_attr "length" "8")]
8431 (define_insn "*cmp_ior"
8432 [(set (match_operand 6 "dominant_cc_register" "")
8435 (match_operator 4 "arm_comparison_operator"
8436 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8437 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8438 (match_operator:SI 5 "arm_comparison_operator"
8439 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8440 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8445 static const char *const opcodes[4][2] =
8447 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8448 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8449 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8450 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8451 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8452 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8453 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8454 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8457 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8459 return opcodes[which_alternative][swap];
8462 [(set_attr "conds" "set")
8463 (set_attr "length" "8")]
8466 (define_insn_and_split "*ior_scc_scc"
8467 [(set (match_operand:SI 0 "s_register_operand" "=r")
8468 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8469 [(match_operand:SI 1 "s_register_operand" "r")
8470 (match_operand:SI 2 "arm_add_operand" "rIL")])
8471 (match_operator:SI 6 "arm_comparison_operator"
8472 [(match_operand:SI 4 "s_register_operand" "r")
8473 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8474 (clobber (reg:CC CC_REGNUM))]
8476 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8479 "TARGET_ARM && reload_completed"
8483 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8484 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8486 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8488 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8491 [(set_attr "conds" "clob")
8492 (set_attr "length" "16")])
8494 ; If the above pattern is followed by a CMP insn, then the compare is
8495 ; redundant, since we can rework the conditional instruction that follows.
8496 (define_insn_and_split "*ior_scc_scc_cmp"
8497 [(set (match_operand 0 "dominant_cc_register" "")
8498 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8499 [(match_operand:SI 1 "s_register_operand" "r")
8500 (match_operand:SI 2 "arm_add_operand" "rIL")])
8501 (match_operator:SI 6 "arm_comparison_operator"
8502 [(match_operand:SI 4 "s_register_operand" "r")
8503 (match_operand:SI 5 "arm_add_operand" "rIL")]))
8505 (set (match_operand:SI 7 "s_register_operand" "=r")
8506 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8507 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8510 "TARGET_ARM && reload_completed"
8514 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8515 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8517 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8519 [(set_attr "conds" "set")
8520 (set_attr "length" "16")])
8522 (define_insn_and_split "*and_scc_scc"
8523 [(set (match_operand:SI 0 "s_register_operand" "=r")
8524 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8525 [(match_operand:SI 1 "s_register_operand" "r")
8526 (match_operand:SI 2 "arm_add_operand" "rIL")])
8527 (match_operator:SI 6 "arm_comparison_operator"
8528 [(match_operand:SI 4 "s_register_operand" "r")
8529 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8530 (clobber (reg:CC CC_REGNUM))]
8532 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8535 "TARGET_ARM && reload_completed
8536 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8541 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8542 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8544 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8546 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8549 [(set_attr "conds" "clob")
8550 (set_attr "length" "16")])
8552 ; If the above pattern is followed by a CMP insn, then the compare is
8553 ; redundant, since we can rework the conditional instruction that follows.
8554 (define_insn_and_split "*and_scc_scc_cmp"
8555 [(set (match_operand 0 "dominant_cc_register" "")
8556 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
8557 [(match_operand:SI 1 "s_register_operand" "r")
8558 (match_operand:SI 2 "arm_add_operand" "rIL")])
8559 (match_operator:SI 6 "arm_comparison_operator"
8560 [(match_operand:SI 4 "s_register_operand" "r")
8561 (match_operand:SI 5 "arm_add_operand" "rIL")]))
8563 (set (match_operand:SI 7 "s_register_operand" "=r")
8564 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8565 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8568 "TARGET_ARM && reload_completed"
8572 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8573 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8575 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8577 [(set_attr "conds" "set")
8578 (set_attr "length" "16")])
8580 ;; If there is no dominance in the comparison, then we can still save an
8581 ;; instruction in the AND case, since we can know that the second compare
8582 ;; need only zero the value if false (if true, then the value is already
8584 (define_insn_and_split "*and_scc_scc_nodom"
8585 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
8586 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8587 [(match_operand:SI 1 "s_register_operand" "r,r,0")
8588 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
8589 (match_operator:SI 6 "arm_comparison_operator"
8590 [(match_operand:SI 4 "s_register_operand" "r,r,r")
8591 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
8592 (clobber (reg:CC CC_REGNUM))]
8594 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8597 "TARGET_ARM && reload_completed"
8598 [(parallel [(set (match_dup 0)
8599 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
8600 (clobber (reg:CC CC_REGNUM))])
8601 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
8603 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
8606 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
8607 operands[4], operands[5]),
8609 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
8611 [(set_attr "conds" "clob")
8612 (set_attr "length" "20")])
8615 [(set (reg:CC_NOOV CC_REGNUM)
8616 (compare:CC_NOOV (ior:SI
8617 (and:SI (match_operand:SI 0 "s_register_operand" "")
8619 (match_operator:SI 1 "comparison_operator"
8620 [(match_operand:SI 2 "s_register_operand" "")
8621 (match_operand:SI 3 "arm_add_operand" "")]))
8623 (clobber (match_operand:SI 4 "s_register_operand" ""))]
8626 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
8628 (set (reg:CC_NOOV CC_REGNUM)
8629 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
8634 [(set (reg:CC_NOOV CC_REGNUM)
8635 (compare:CC_NOOV (ior:SI
8636 (match_operator:SI 1 "comparison_operator"
8637 [(match_operand:SI 2 "s_register_operand" "")
8638 (match_operand:SI 3 "arm_add_operand" "")])
8639 (and:SI (match_operand:SI 0 "s_register_operand" "")
8642 (clobber (match_operand:SI 4 "s_register_operand" ""))]
8645 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
8647 (set (reg:CC_NOOV CC_REGNUM)
8648 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
8652 (define_insn "*negscc"
8653 [(set (match_operand:SI 0 "s_register_operand" "=r")
8654 (neg:SI (match_operator 3 "arm_comparison_operator"
8655 [(match_operand:SI 1 "s_register_operand" "r")
8656 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
8657 (clobber (reg:CC CC_REGNUM))]
8660 if (GET_CODE (operands[3]) == LT && operands[3] == const0_rtx)
8661 return \"mov\\t%0, %1, asr #31\";
8663 if (GET_CODE (operands[3]) == NE)
8664 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
8666 if (GET_CODE (operands[3]) == GT)
8667 return \"subs\\t%0, %1, %2\;mvnne\\t%0, %0, asr #31\";
8669 output_asm_insn (\"cmp\\t%1, %2\", operands);
8670 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
8671 return \"mvn%d3\\t%0, #0\";
8673 [(set_attr "conds" "clob")
8674 (set_attr "length" "12")]
8677 (define_insn "movcond"
8678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8680 (match_operator 5 "arm_comparison_operator"
8681 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8682 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
8683 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8684 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
8685 (clobber (reg:CC CC_REGNUM))]
8688 if (GET_CODE (operands[5]) == LT
8689 && (operands[4] == const0_rtx))
8691 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
8693 if (operands[2] == const0_rtx)
8694 return \"and\\t%0, %1, %3, asr #31\";
8695 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
8697 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
8699 if (operands[1] == const0_rtx)
8700 return \"bic\\t%0, %2, %3, asr #31\";
8701 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
8703 /* The only case that falls through to here is when both ops 1 & 2
8707 if (GET_CODE (operands[5]) == GE
8708 && (operands[4] == const0_rtx))
8710 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
8712 if (operands[2] == const0_rtx)
8713 return \"bic\\t%0, %1, %3, asr #31\";
8714 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
8716 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
8718 if (operands[1] == const0_rtx)
8719 return \"and\\t%0, %2, %3, asr #31\";
8720 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
8722 /* The only case that falls through to here is when both ops 1 & 2
8725 if (GET_CODE (operands[4]) == CONST_INT
8726 && !const_ok_for_arm (INTVAL (operands[4])))
8727 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
8729 output_asm_insn (\"cmp\\t%3, %4\", operands);
8730 if (which_alternative != 0)
8731 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
8732 if (which_alternative != 1)
8733 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
8736 [(set_attr "conds" "clob")
8737 (set_attr "length" "8,8,12")]
8740 (define_insn "*ifcompare_plus_move"
8741 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8742 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
8743 [(match_operand:SI 4 "s_register_operand" "r,r")
8744 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
8746 (match_operand:SI 2 "s_register_operand" "r,r")
8747 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
8748 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
8749 (clobber (reg:CC CC_REGNUM))]
8752 [(set_attr "conds" "clob")
8753 (set_attr "length" "8,12")]
8756 (define_insn "*if_plus_move"
8757 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8759 (match_operator 4 "arm_comparison_operator"
8760 [(match_operand 5 "cc_register" "") (const_int 0)])
8762 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
8763 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
8764 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
8768 sub%d4\\t%0, %2, #%n3
8769 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
8770 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
8771 [(set_attr "conds" "use")
8772 (set_attr "length" "4,4,8,8")
8773 (set_attr "type" "*,*,*,*")]
8776 (define_insn "*ifcompare_move_plus"
8777 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8778 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
8779 [(match_operand:SI 4 "s_register_operand" "r,r")
8780 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
8781 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
8783 (match_operand:SI 2 "s_register_operand" "r,r")
8784 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
8785 (clobber (reg:CC CC_REGNUM))]
8788 [(set_attr "conds" "clob")
8789 (set_attr "length" "8,12")]
8792 (define_insn "*if_move_plus"
8793 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8795 (match_operator 4 "arm_comparison_operator"
8796 [(match_operand 5 "cc_register" "") (const_int 0)])
8797 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
8799 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
8800 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
8804 sub%D4\\t%0, %2, #%n3
8805 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
8806 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
8807 [(set_attr "conds" "use")
8808 (set_attr "length" "4,4,8,8")
8809 (set_attr "type" "*,*,*,*")]
8812 (define_insn "*ifcompare_arith_arith"
8813 [(set (match_operand:SI 0 "s_register_operand" "=r")
8814 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
8815 [(match_operand:SI 5 "s_register_operand" "r")
8816 (match_operand:SI 6 "arm_add_operand" "rIL")])
8817 (match_operator:SI 8 "shiftable_operator"
8818 [(match_operand:SI 1 "s_register_operand" "r")
8819 (match_operand:SI 2 "arm_rhs_operand" "rI")])
8820 (match_operator:SI 7 "shiftable_operator"
8821 [(match_operand:SI 3 "s_register_operand" "r")
8822 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
8823 (clobber (reg:CC CC_REGNUM))]
8826 [(set_attr "conds" "clob")
8827 (set_attr "length" "12")]
8830 (define_insn "*if_arith_arith"
8831 [(set (match_operand:SI 0 "s_register_operand" "=r")
8832 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
8833 [(match_operand 8 "cc_register" "") (const_int 0)])
8834 (match_operator:SI 6 "shiftable_operator"
8835 [(match_operand:SI 1 "s_register_operand" "r")
8836 (match_operand:SI 2 "arm_rhs_operand" "rI")])
8837 (match_operator:SI 7 "shiftable_operator"
8838 [(match_operand:SI 3 "s_register_operand" "r")
8839 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
8841 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
8842 [(set_attr "conds" "use")
8843 (set_attr "length" "8")]
8846 (define_insn "*ifcompare_arith_move"
8847 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8848 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
8849 [(match_operand:SI 2 "s_register_operand" "r,r")
8850 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
8851 (match_operator:SI 7 "shiftable_operator"
8852 [(match_operand:SI 4 "s_register_operand" "r,r")
8853 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
8854 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
8855 (clobber (reg:CC CC_REGNUM))]
8858 /* If we have an operation where (op x 0) is the identity operation and
8859 the conditional operator is LT or GE and we are comparing against zero and
8860 everything is in registers then we can do this in two instructions. */
8861 if (operands[3] == const0_rtx
8862 && GET_CODE (operands[7]) != AND
8863 && GET_CODE (operands[5]) == REG
8864 && GET_CODE (operands[1]) == REG
8865 && REGNO (operands[1]) == REGNO (operands[4])
8866 && REGNO (operands[4]) != REGNO (operands[0]))
8868 if (GET_CODE (operands[6]) == LT)
8869 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
8870 else if (GET_CODE (operands[6]) == GE)
8871 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
8873 if (GET_CODE (operands[3]) == CONST_INT
8874 && !const_ok_for_arm (INTVAL (operands[3])))
8875 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
8877 output_asm_insn (\"cmp\\t%2, %3\", operands);
8878 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
8879 if (which_alternative != 0)
8880 return \"mov%D6\\t%0, %1\";
8883 [(set_attr "conds" "clob")
8884 (set_attr "length" "8,12")]
8887 (define_insn "*if_arith_move"
8888 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8889 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
8890 [(match_operand 6 "cc_register" "") (const_int 0)])
8891 (match_operator:SI 5 "shiftable_operator"
8892 [(match_operand:SI 2 "s_register_operand" "r,r")
8893 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8894 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
8898 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
8899 [(set_attr "conds" "use")
8900 (set_attr "length" "4,8")
8901 (set_attr "type" "*,*")]
8904 (define_insn "*ifcompare_move_arith"
8905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8906 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
8907 [(match_operand:SI 4 "s_register_operand" "r,r")
8908 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
8909 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
8910 (match_operator:SI 7 "shiftable_operator"
8911 [(match_operand:SI 2 "s_register_operand" "r,r")
8912 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8913 (clobber (reg:CC CC_REGNUM))]
8916 /* If we have an operation where (op x 0) is the identity operation and
8917 the conditional operator is LT or GE and we are comparing against zero and
8918 everything is in registers then we can do this in two instructions */
8919 if (operands[5] == const0_rtx
8920 && GET_CODE (operands[7]) != AND
8921 && GET_CODE (operands[3]) == REG
8922 && GET_CODE (operands[1]) == REG
8923 && REGNO (operands[1]) == REGNO (operands[2])
8924 && REGNO (operands[2]) != REGNO (operands[0]))
8926 if (GET_CODE (operands[6]) == GE)
8927 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
8928 else if (GET_CODE (operands[6]) == LT)
8929 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
8932 if (GET_CODE (operands[5]) == CONST_INT
8933 && !const_ok_for_arm (INTVAL (operands[5])))
8934 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
8936 output_asm_insn (\"cmp\\t%4, %5\", operands);
8938 if (which_alternative != 0)
8939 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
8940 return \"%I7%D6\\t%0, %2, %3\";
8942 [(set_attr "conds" "clob")
8943 (set_attr "length" "8,12")]
8946 (define_insn "*if_move_arith"
8947 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8949 (match_operator 4 "arm_comparison_operator"
8950 [(match_operand 6 "cc_register" "") (const_int 0)])
8951 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
8952 (match_operator:SI 5 "shiftable_operator"
8953 [(match_operand:SI 2 "s_register_operand" "r,r")
8954 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
8958 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
8959 [(set_attr "conds" "use")
8960 (set_attr "length" "4,8")
8961 (set_attr "type" "*,*")]
8964 (define_insn "*ifcompare_move_not"
8965 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8967 (match_operator 5 "arm_comparison_operator"
8968 [(match_operand:SI 3 "s_register_operand" "r,r")
8969 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
8970 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
8972 (match_operand:SI 2 "s_register_operand" "r,r"))))
8973 (clobber (reg:CC CC_REGNUM))]
8976 [(set_attr "conds" "clob")
8977 (set_attr "length" "8,12")]
8980 (define_insn "*if_move_not"
8981 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8983 (match_operator 4 "arm_comparison_operator"
8984 [(match_operand 3 "cc_register" "") (const_int 0)])
8985 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
8986 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
8990 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
8991 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
8992 [(set_attr "conds" "use")
8993 (set_attr "length" "4,8,8")]
8996 (define_insn "*ifcompare_not_move"
8997 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8999 (match_operator 5 "arm_comparison_operator"
9000 [(match_operand:SI 3 "s_register_operand" "r,r")
9001 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9003 (match_operand:SI 2 "s_register_operand" "r,r"))
9004 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9005 (clobber (reg:CC CC_REGNUM))]
9008 [(set_attr "conds" "clob")
9009 (set_attr "length" "8,12")]
9012 (define_insn "*if_not_move"
9013 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9015 (match_operator 4 "arm_comparison_operator"
9016 [(match_operand 3 "cc_register" "") (const_int 0)])
9017 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9018 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9022 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9023 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9024 [(set_attr "conds" "use")
9025 (set_attr "length" "4,8,8")]
9028 (define_insn "*ifcompare_shift_move"
9029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9031 (match_operator 6 "arm_comparison_operator"
9032 [(match_operand:SI 4 "s_register_operand" "r,r")
9033 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9034 (match_operator:SI 7 "shift_operator"
9035 [(match_operand:SI 2 "s_register_operand" "r,r")
9036 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9037 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9038 (clobber (reg:CC CC_REGNUM))]
9041 [(set_attr "conds" "clob")
9042 (set_attr "length" "8,12")]
9045 (define_insn "*if_shift_move"
9046 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9048 (match_operator 5 "arm_comparison_operator"
9049 [(match_operand 6 "cc_register" "") (const_int 0)])
9050 (match_operator:SI 4 "shift_operator"
9051 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9052 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9053 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9057 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9058 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9059 [(set_attr "conds" "use")
9060 (set_attr "shift" "2")
9061 (set_attr "length" "4,8,8")
9062 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9063 (const_string "alu_shift")
9064 (const_string "alu_shift_reg")))]
9067 (define_insn "*ifcompare_move_shift"
9068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9070 (match_operator 6 "arm_comparison_operator"
9071 [(match_operand:SI 4 "s_register_operand" "r,r")
9072 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9073 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9074 (match_operator:SI 7 "shift_operator"
9075 [(match_operand:SI 2 "s_register_operand" "r,r")
9076 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9077 (clobber (reg:CC CC_REGNUM))]
9080 [(set_attr "conds" "clob")
9081 (set_attr "length" "8,12")]
9084 (define_insn "*if_move_shift"
9085 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9087 (match_operator 5 "arm_comparison_operator"
9088 [(match_operand 6 "cc_register" "") (const_int 0)])
9089 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9090 (match_operator:SI 4 "shift_operator"
9091 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9092 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9096 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9097 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9098 [(set_attr "conds" "use")
9099 (set_attr "shift" "2")
9100 (set_attr "length" "4,8,8")
9101 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9102 (const_string "alu_shift")
9103 (const_string "alu_shift_reg")))]
9106 (define_insn "*ifcompare_shift_shift"
9107 [(set (match_operand:SI 0 "s_register_operand" "=r")
9109 (match_operator 7 "arm_comparison_operator"
9110 [(match_operand:SI 5 "s_register_operand" "r")
9111 (match_operand:SI 6 "arm_add_operand" "rIL")])
9112 (match_operator:SI 8 "shift_operator"
9113 [(match_operand:SI 1 "s_register_operand" "r")
9114 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9115 (match_operator:SI 9 "shift_operator"
9116 [(match_operand:SI 3 "s_register_operand" "r")
9117 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9118 (clobber (reg:CC CC_REGNUM))]
9121 [(set_attr "conds" "clob")
9122 (set_attr "length" "12")]
9125 (define_insn "*if_shift_shift"
9126 [(set (match_operand:SI 0 "s_register_operand" "=r")
9128 (match_operator 5 "arm_comparison_operator"
9129 [(match_operand 8 "cc_register" "") (const_int 0)])
9130 (match_operator:SI 6 "shift_operator"
9131 [(match_operand:SI 1 "s_register_operand" "r")
9132 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9133 (match_operator:SI 7 "shift_operator"
9134 [(match_operand:SI 3 "s_register_operand" "r")
9135 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9137 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9138 [(set_attr "conds" "use")
9139 (set_attr "shift" "1")
9140 (set_attr "length" "8")
9141 (set (attr "type") (if_then_else
9142 (and (match_operand 2 "const_int_operand" "")
9143 (match_operand 4 "const_int_operand" ""))
9144 (const_string "alu_shift")
9145 (const_string "alu_shift_reg")))]
9148 (define_insn "*ifcompare_not_arith"
9149 [(set (match_operand:SI 0 "s_register_operand" "=r")
9151 (match_operator 6 "arm_comparison_operator"
9152 [(match_operand:SI 4 "s_register_operand" "r")
9153 (match_operand:SI 5 "arm_add_operand" "rIL")])
9154 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9155 (match_operator:SI 7 "shiftable_operator"
9156 [(match_operand:SI 2 "s_register_operand" "r")
9157 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9158 (clobber (reg:CC CC_REGNUM))]
9161 [(set_attr "conds" "clob")
9162 (set_attr "length" "12")]
9165 (define_insn "*if_not_arith"
9166 [(set (match_operand:SI 0 "s_register_operand" "=r")
9168 (match_operator 5 "arm_comparison_operator"
9169 [(match_operand 4 "cc_register" "") (const_int 0)])
9170 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9171 (match_operator:SI 6 "shiftable_operator"
9172 [(match_operand:SI 2 "s_register_operand" "r")
9173 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9175 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9176 [(set_attr "conds" "use")
9177 (set_attr "length" "8")]
9180 (define_insn "*ifcompare_arith_not"
9181 [(set (match_operand:SI 0 "s_register_operand" "=r")
9183 (match_operator 6 "arm_comparison_operator"
9184 [(match_operand:SI 4 "s_register_operand" "r")
9185 (match_operand:SI 5 "arm_add_operand" "rIL")])
9186 (match_operator:SI 7 "shiftable_operator"
9187 [(match_operand:SI 2 "s_register_operand" "r")
9188 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9189 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9190 (clobber (reg:CC CC_REGNUM))]
9193 [(set_attr "conds" "clob")
9194 (set_attr "length" "12")]
9197 (define_insn "*if_arith_not"
9198 [(set (match_operand:SI 0 "s_register_operand" "=r")
9200 (match_operator 5 "arm_comparison_operator"
9201 [(match_operand 4 "cc_register" "") (const_int 0)])
9202 (match_operator:SI 6 "shiftable_operator"
9203 [(match_operand:SI 2 "s_register_operand" "r")
9204 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9205 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9207 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9208 [(set_attr "conds" "use")
9209 (set_attr "length" "8")]
9212 (define_insn "*ifcompare_neg_move"
9213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9215 (match_operator 5 "arm_comparison_operator"
9216 [(match_operand:SI 3 "s_register_operand" "r,r")
9217 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9218 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9219 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9220 (clobber (reg:CC CC_REGNUM))]
9223 [(set_attr "conds" "clob")
9224 (set_attr "length" "8,12")]
9227 (define_insn "*if_neg_move"
9228 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9230 (match_operator 4 "arm_comparison_operator"
9231 [(match_operand 3 "cc_register" "") (const_int 0)])
9232 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9233 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9237 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9238 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9239 [(set_attr "conds" "use")
9240 (set_attr "length" "4,8,8")]
9243 (define_insn "*ifcompare_move_neg"
9244 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9246 (match_operator 5 "arm_comparison_operator"
9247 [(match_operand:SI 3 "s_register_operand" "r,r")
9248 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9249 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9250 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9251 (clobber (reg:CC CC_REGNUM))]
9254 [(set_attr "conds" "clob")
9255 (set_attr "length" "8,12")]
9258 (define_insn "*if_move_neg"
9259 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9261 (match_operator 4 "arm_comparison_operator"
9262 [(match_operand 3 "cc_register" "") (const_int 0)])
9263 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9264 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9268 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9269 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9270 [(set_attr "conds" "use")
9271 (set_attr "length" "4,8,8")]
9274 (define_insn "*arith_adjacentmem"
9275 [(set (match_operand:SI 0 "s_register_operand" "=r")
9276 (match_operator:SI 1 "shiftable_operator"
9277 [(match_operand:SI 2 "memory_operand" "m")
9278 (match_operand:SI 3 "memory_operand" "m")]))
9279 (clobber (match_scratch:SI 4 "=r"))]
9280 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9286 HOST_WIDE_INT val1 = 0, val2 = 0;
9288 if (REGNO (operands[0]) > REGNO (operands[4]))
9290 ldm[1] = operands[4];
9291 ldm[2] = operands[0];
9295 ldm[1] = operands[0];
9296 ldm[2] = operands[4];
9299 base_reg = XEXP (operands[2], 0);
9301 if (!REG_P (base_reg))
9303 val1 = INTVAL (XEXP (base_reg, 1));
9304 base_reg = XEXP (base_reg, 0);
9307 if (!REG_P (XEXP (operands[3], 0)))
9308 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9310 arith[0] = operands[0];
9311 arith[3] = operands[1];
9325 if (val1 !=0 && val2 != 0)
9327 if (val1 == 4 || val2 == 4)
9328 /* Other val must be 8, since we know they are adjacent and neither
9330 output_asm_insn (\"ldm%?ib\\t%0, {%1, %2}\", ldm);
9335 ldm[0] = ops[0] = operands[4];
9337 ops[2] = GEN_INT (val1);
9338 output_add_immediate (ops);
9340 output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
9342 output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
9348 output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
9350 output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
9355 output_asm_insn (\"ldm%?ia\\t%0, {%1, %2}\", ldm);
9357 output_asm_insn (\"ldm%?da\\t%0, {%1, %2}\", ldm);
9359 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9362 [(set_attr "length" "12")
9363 (set_attr "predicable" "yes")
9364 (set_attr "type" "load1")]
9367 ; This pattern is never tried by combine, so do it as a peephole
9370 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9371 (match_operand:SI 1 "arm_general_register_operand" ""))
9372 (set (reg:CC CC_REGNUM)
9373 (compare:CC (match_dup 1) (const_int 0)))]
9375 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9376 (set (match_dup 0) (match_dup 1))])]
9380 ; Peepholes to spot possible load- and store-multiples, if the ordering is
9381 ; reversed, check that the memory references aren't volatile.
9384 [(set (match_operand:SI 0 "s_register_operand" "=r")
9385 (match_operand:SI 4 "memory_operand" "m"))
9386 (set (match_operand:SI 1 "s_register_operand" "=r")
9387 (match_operand:SI 5 "memory_operand" "m"))
9388 (set (match_operand:SI 2 "s_register_operand" "=r")
9389 (match_operand:SI 6 "memory_operand" "m"))
9390 (set (match_operand:SI 3 "s_register_operand" "=r")
9391 (match_operand:SI 7 "memory_operand" "m"))]
9392 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
9394 return emit_ldm_seq (operands, 4);
9399 [(set (match_operand:SI 0 "s_register_operand" "=r")
9400 (match_operand:SI 3 "memory_operand" "m"))
9401 (set (match_operand:SI 1 "s_register_operand" "=r")
9402 (match_operand:SI 4 "memory_operand" "m"))
9403 (set (match_operand:SI 2 "s_register_operand" "=r")
9404 (match_operand:SI 5 "memory_operand" "m"))]
9405 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
9407 return emit_ldm_seq (operands, 3);
9412 [(set (match_operand:SI 0 "s_register_operand" "=r")
9413 (match_operand:SI 2 "memory_operand" "m"))
9414 (set (match_operand:SI 1 "s_register_operand" "=r")
9415 (match_operand:SI 3 "memory_operand" "m"))]
9416 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
9418 return emit_ldm_seq (operands, 2);
9423 [(set (match_operand:SI 4 "memory_operand" "=m")
9424 (match_operand:SI 0 "s_register_operand" "r"))
9425 (set (match_operand:SI 5 "memory_operand" "=m")
9426 (match_operand:SI 1 "s_register_operand" "r"))
9427 (set (match_operand:SI 6 "memory_operand" "=m")
9428 (match_operand:SI 2 "s_register_operand" "r"))
9429 (set (match_operand:SI 7 "memory_operand" "=m")
9430 (match_operand:SI 3 "s_register_operand" "r"))]
9431 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
9433 return emit_stm_seq (operands, 4);
9438 [(set (match_operand:SI 3 "memory_operand" "=m")
9439 (match_operand:SI 0 "s_register_operand" "r"))
9440 (set (match_operand:SI 4 "memory_operand" "=m")
9441 (match_operand:SI 1 "s_register_operand" "r"))
9442 (set (match_operand:SI 5 "memory_operand" "=m")
9443 (match_operand:SI 2 "s_register_operand" "r"))]
9444 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
9446 return emit_stm_seq (operands, 3);
9451 [(set (match_operand:SI 2 "memory_operand" "=m")
9452 (match_operand:SI 0 "s_register_operand" "r"))
9453 (set (match_operand:SI 3 "memory_operand" "=m")
9454 (match_operand:SI 1 "s_register_operand" "r"))]
9455 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
9457 return emit_stm_seq (operands, 2);
9462 [(set (match_operand:SI 0 "s_register_operand" "")
9463 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9465 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9466 [(match_operand:SI 3 "s_register_operand" "")
9467 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9468 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9470 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9471 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9476 ;; This split can be used because CC_Z mode implies that the following
9477 ;; branch will be an equality, or an unsigned inequality, so the sign
9478 ;; extension is not needed.
9481 [(set (reg:CC_Z CC_REGNUM)
9483 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9485 (match_operand 1 "const_int_operand" "")))
9486 (clobber (match_scratch:SI 2 ""))]
9488 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9489 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9490 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9491 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9493 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9497 (define_expand "prologue"
9498 [(clobber (const_int 0))]
9501 arm_expand_prologue ();
9503 thumb_expand_prologue ();
9508 (define_expand "epilogue"
9509 [(clobber (const_int 0))]
9512 if (current_function_calls_eh_return)
9513 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9515 thumb_expand_epilogue ();
9516 else if (USE_RETURN_INSN (FALSE))
9518 emit_jump_insn (gen_return ());
9521 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9523 gen_rtx_RETURN (VOIDmode)),
9529 ;; Note - although unspec_volatile's USE all hard registers,
9530 ;; USEs are ignored after relaod has completed. Thus we need
9531 ;; to add an unspec of the link register to ensure that flow
9532 ;; does not think that it is unused by the sibcall branch that
9533 ;; will replace the standard function epilogue.
9534 (define_insn "sibcall_epilogue"
9535 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9536 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9539 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9540 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9541 return arm_output_epilogue (next_nonnote_insn (insn));
9543 ;; Length is absolute worst case
9544 [(set_attr "length" "44")
9545 (set_attr "type" "block")
9546 ;; We don't clobber the conditions, but the potential length of this
9547 ;; operation is sufficient to make conditionalizing the sequence
9548 ;; unlikely to be profitable.
9549 (set_attr "conds" "clob")]
9552 (define_insn "*epilogue_insns"
9553 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
9557 return arm_output_epilogue (NULL);
9558 else /* TARGET_THUMB */
9559 return thumb_unexpanded_epilogue ();
9561 ; Length is absolute worst case
9562 [(set_attr "length" "44")
9563 (set_attr "type" "block")
9564 ;; We don't clobber the conditions, but the potential length of this
9565 ;; operation is sufficient to make conditionalizing the sequence
9566 ;; unlikely to be profitable.
9567 (set_attr "conds" "clob")]
9570 (define_expand "eh_epilogue"
9571 [(use (match_operand:SI 0 "register_operand" ""))
9572 (use (match_operand:SI 1 "register_operand" ""))
9573 (use (match_operand:SI 2 "register_operand" ""))]
9577 cfun->machine->eh_epilogue_sp_ofs = operands[1];
9578 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
9580 rtx ra = gen_rtx_REG (Pmode, 2);
9582 emit_move_insn (ra, operands[2]);
9585 /* This is a hack -- we may have crystalized the function type too
9587 cfun->machine->func_type = 0;
9591 ;; This split is only used during output to reduce the number of patterns
9592 ;; that need assembler instructions adding to them. We allowed the setting
9593 ;; of the conditions to be implicit during rtl generation so that
9594 ;; the conditional compare patterns would work. However this conflicts to
9595 ;; some extent with the conditional data operations, so we have to split them
9599 [(set (match_operand:SI 0 "s_register_operand" "")
9600 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
9601 [(match_operand 2 "" "") (match_operand 3 "" "")])
9603 (match_operand 4 "" "")))
9604 (clobber (reg:CC CC_REGNUM))]
9605 "TARGET_ARM && reload_completed"
9606 [(set (match_dup 5) (match_dup 6))
9607 (cond_exec (match_dup 7)
9608 (set (match_dup 0) (match_dup 4)))]
9611 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9612 operands[2], operands[3]);
9613 enum rtx_code rc = GET_CODE (operands[1]);
9615 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
9616 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
9617 if (mode == CCFPmode || mode == CCFPEmode)
9618 rc = reverse_condition_maybe_unordered (rc);
9620 rc = reverse_condition (rc);
9622 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
9627 [(set (match_operand:SI 0 "s_register_operand" "")
9628 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
9629 [(match_operand 2 "" "") (match_operand 3 "" "")])
9630 (match_operand 4 "" "")
9632 (clobber (reg:CC CC_REGNUM))]
9633 "TARGET_ARM && reload_completed"
9634 [(set (match_dup 5) (match_dup 6))
9635 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
9636 (set (match_dup 0) (match_dup 4)))]
9639 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9640 operands[2], operands[3]);
9642 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
9643 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
9648 [(set (match_operand:SI 0 "s_register_operand" "")
9649 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
9650 [(match_operand 2 "" "") (match_operand 3 "" "")])
9651 (match_operand 4 "" "")
9652 (match_operand 5 "" "")))
9653 (clobber (reg:CC CC_REGNUM))]
9654 "TARGET_ARM && reload_completed"
9655 [(set (match_dup 6) (match_dup 7))
9656 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
9657 (set (match_dup 0) (match_dup 4)))
9658 (cond_exec (match_dup 8)
9659 (set (match_dup 0) (match_dup 5)))]
9662 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9663 operands[2], operands[3]);
9664 enum rtx_code rc = GET_CODE (operands[1]);
9666 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9667 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
9668 if (mode == CCFPmode || mode == CCFPEmode)
9669 rc = reverse_condition_maybe_unordered (rc);
9671 rc = reverse_condition (rc);
9673 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9678 [(set (match_operand:SI 0 "s_register_operand" "")
9679 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
9680 [(match_operand:SI 2 "s_register_operand" "")
9681 (match_operand:SI 3 "arm_add_operand" "")])
9682 (match_operand:SI 4 "arm_rhs_operand" "")
9684 (match_operand:SI 5 "s_register_operand" ""))))
9685 (clobber (reg:CC CC_REGNUM))]
9686 "TARGET_ARM && reload_completed"
9687 [(set (match_dup 6) (match_dup 7))
9688 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
9689 (set (match_dup 0) (match_dup 4)))
9690 (cond_exec (match_dup 8)
9691 (set (match_dup 0) (not:SI (match_dup 5))))]
9694 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9695 operands[2], operands[3]);
9696 enum rtx_code rc = GET_CODE (operands[1]);
9698 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9699 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
9700 if (mode == CCFPmode || mode == CCFPEmode)
9701 rc = reverse_condition_maybe_unordered (rc);
9703 rc = reverse_condition (rc);
9705 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9709 (define_insn "*cond_move_not"
9710 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9711 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9712 [(match_operand 3 "cc_register" "") (const_int 0)])
9713 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9715 (match_operand:SI 2 "s_register_operand" "r,r"))))]
9719 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
9720 [(set_attr "conds" "use")
9721 (set_attr "length" "4,8")]
9724 ;; The next two patterns occur when an AND operation is followed by a
9725 ;; scc insn sequence
9727 (define_insn "*sign_extract_onebit"
9728 [(set (match_operand:SI 0 "s_register_operand" "=r")
9729 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
9731 (match_operand:SI 2 "const_int_operand" "n")))
9732 (clobber (reg:CC CC_REGNUM))]
9735 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
9736 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
9737 return \"mvnne\\t%0, #0\";
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "8")]
9743 (define_insn "*not_signextract_onebit"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r")
9746 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
9748 (match_operand:SI 2 "const_int_operand" "n"))))
9749 (clobber (reg:CC CC_REGNUM))]
9752 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
9753 output_asm_insn (\"tst\\t%1, %2\", operands);
9754 output_asm_insn (\"mvneq\\t%0, #0\", operands);
9755 return \"movne\\t%0, #0\";
9757 [(set_attr "conds" "clob")
9758 (set_attr "length" "12")]
9761 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
9762 ;; expressions. For simplicity, the first register is also in the unspec
9764 (define_insn "*push_multi"
9765 [(match_parallel 2 "multi_register_push"
9766 [(set (match_operand:BLK 0 "memory_operand" "=m")
9767 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
9768 UNSPEC_PUSH_MULT))])]
9772 int num_saves = XVECLEN (operands[2], 0);
9774 /* For the StrongARM at least it is faster to
9775 use STR to store only a single register. */
9777 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
9783 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
9785 for (i = 1; i < num_saves; i++)
9787 strcat (pattern, \", %|\");
9789 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
9792 strcat (pattern, \"}\");
9793 output_asm_insn (pattern, operands);
9798 [(set_attr "type" "store4")]
9801 (define_insn "stack_tie"
9802 [(set (mem:BLK (scratch))
9803 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r")
9804 (match_operand:SI 1 "s_register_operand" "r")]
9808 [(set_attr "length" "0")]
9811 ;; Similarly for the floating point registers
9812 (define_insn "*push_fp_multi"
9813 [(match_parallel 2 "multi_register_push"
9814 [(set (match_operand:BLK 0 "memory_operand" "=m")
9815 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
9816 UNSPEC_PUSH_MULT))])]
9817 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_FPA"
9822 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
9823 output_asm_insn (pattern, operands);
9826 [(set_attr "type" "f_store")]
9829 ;; Special patterns for dealing with the constant pool
9831 (define_insn "align_4"
9832 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
9835 assemble_align (32);
9840 (define_insn "align_8"
9841 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
9844 assemble_align (64);
9849 (define_insn "consttable_end"
9850 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
9853 making_const_table = FALSE;
9858 (define_insn "consttable_1"
9859 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
9862 making_const_table = TRUE;
9863 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
9867 [(set_attr "length" "4")]
9870 (define_insn "consttable_2"
9871 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
9874 making_const_table = TRUE;
9875 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
9879 [(set_attr "length" "4")]
9882 (define_insn "consttable_4"
9883 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
9887 making_const_table = TRUE;
9888 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
9893 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
9894 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
9898 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
9903 [(set_attr "length" "4")]
9906 (define_insn "consttable_8"
9907 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
9911 making_const_table = TRUE;
9912 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
9917 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
9918 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
9922 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
9927 [(set_attr "length" "8")]
9930 ;; Miscellaneous Thumb patterns
9932 (define_expand "tablejump"
9933 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
9934 (use (label_ref (match_operand 1 "" "")))])]
9939 /* Hopefully, CSE will eliminate this copy. */
9940 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
9941 rtx reg2 = gen_reg_rtx (SImode);
9943 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
9949 ;; NB never uses BX.
9950 (define_insn "*thumb_tablejump"
9951 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
9952 (use (label_ref (match_operand 1 "" "")))]
9955 [(set_attr "length" "2")]
9960 (define_insn "clzsi2"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r")
9962 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
9963 "TARGET_ARM && arm_arch5"
9965 [(set_attr "predicable" "yes")])
9967 (define_expand "ffssi2"
9968 [(set (match_operand:SI 0 "s_register_operand" "")
9969 (ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
9970 "TARGET_ARM && arm_arch5"
9975 t1 = gen_reg_rtx (SImode);
9976 t2 = gen_reg_rtx (SImode);
9977 t3 = gen_reg_rtx (SImode);
9979 emit_insn (gen_negsi2 (t1, operands[1]));
9980 emit_insn (gen_andsi3 (t2, operands[1], t1));
9981 emit_insn (gen_clzsi2 (t3, t2));
9982 emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3));
9987 (define_expand "ctzsi2"
9988 [(set (match_operand:SI 0 "s_register_operand" "")
9989 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
9990 "TARGET_ARM && arm_arch5"
9995 t1 = gen_reg_rtx (SImode);
9996 t2 = gen_reg_rtx (SImode);
9997 t3 = gen_reg_rtx (SImode);
9999 emit_insn (gen_negsi2 (t1, operands[1]));
10000 emit_insn (gen_andsi3 (t2, operands[1], t1));
10001 emit_insn (gen_clzsi2 (t3, t2));
10002 emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3));
10007 ;; V5E instructions.
10009 (define_insn "prefetch"
10010 [(prefetch (match_operand:SI 0 "address_operand" "p")
10011 (match_operand:SI 1 "" "")
10012 (match_operand:SI 2 "" ""))]
10013 "TARGET_ARM && arm_arch5e"
10016 ;; General predication pattern
10019 [(match_operator 0 "arm_comparison_operator"
10020 [(match_operand 1 "cc_register" "")
10026 (define_insn "prologue_use"
10027 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10029 "%@ %0 needed for prologue"
10033 ;; Patterns for exception handling
10035 (define_expand "eh_return"
10036 [(use (match_operand 0 "general_operand" ""))]
10041 emit_insn (gen_arm_eh_return (operands[0]));
10043 emit_insn (gen_thumb_eh_return (operands[0]));
10048 ;; We can't expand this before we know where the link register is stored.
10049 (define_insn_and_split "arm_eh_return"
10050 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10052 (clobber (match_scratch:SI 1 "=&r"))]
10055 "&& reload_completed"
10059 arm_set_return_address (operands[0], operands[1]);
10064 (define_insn_and_split "thumb_eh_return"
10065 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10067 (clobber (match_scratch:SI 1 "=&l"))]
10070 "&& reload_completed"
10074 thumb_set_return_address (operands[0], operands[1]);
10079 ;; Load the FPA co-processor patterns
10081 ;; Load the Maverick co-processor patterns
10082 (include "cirrus.md")
10083 ;; Load the Intel Wireless Multimedia Extension patterns
10084 (include "iwmmxt.md")
10085 ;; Load the VFP co-processor patterns