1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 ;; and Martin Simmons (@harleqn.co.uk).
6 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 ;; This file is part of GCC.
10 ;; GCC is free software; you can redistribute it and/or modify it
11 ;; under the terms of the GNU General Public License as published
12 ;; by the Free Software Foundation; either version 2, or (at your
13 ;; option) any later version.
15 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
16 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 ;; License for more details.
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GCC; see the file COPYING. If not, write to
22 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
23 ;; Boston, MA 02110-1301, USA.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
56 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
57 ; operand 0 is the result,
58 ; operand 1 the parameter.
59 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
60 ; operand 0 is the result,
61 ; operand 1 the parameter.
62 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
63 ; operand 0 is the first register,
64 ; subsequent registers are in parallel (use ...)
66 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
67 ; usage, that is, we will add the pic_register
68 ; value to it before trying to dereference it.
69 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
70 ; GLOBAL_OFFSET_TABLE. The operation is fully
71 ; described by the RTL but must be wrapped to
72 ; prevent combine from trying to rip it apart.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 20) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
101 ;; UNSPEC_VOLATILE Usage:
104 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
106 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
107 ; instruction epilogue sequence that isn't expanded
108 ; into normal RTL. Used for both normal and sibcall
110 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
111 ; for inlined constants.
112 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
114 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
116 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
118 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
120 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
122 (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction.
123 (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction.
124 (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN
125 (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions
126 (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions
127 (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions
128 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
133 ;;---------------------------------------------------------------------------
136 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
137 ; generating ARM code. This is used to control the length of some insn
138 ; patterns that share the same RTL in both ARM and Thumb code.
139 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
141 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
142 ; scheduling decisions for the load unit and the multiplier.
143 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
145 ; IS_XSCALE is set to 'yes' when compiling for XScale.
146 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
148 ;; Operand number of an input operand that is shifted. Zero if the
149 ;; given instruction does not shift one of its input operands.
150 (define_attr "shift" "" (const_int 0))
152 ; Floating Point Unit. If we only have floating point emulation, then there
153 ; is no point in scheduling the floating point insns. (Well, for best
154 ; performance we should try and group them together).
155 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
156 (const (symbol_ref "arm_fpu_attr")))
158 ; LENGTH of an instruction (in bytes)
159 (define_attr "length" "" (const_int 4))
161 ; POOL_RANGE is how far away from a constant pool entry that this insn
162 ; can be placed. If the distance is zero, then this insn will never
163 ; reference the pool.
164 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
165 ; before its address.
166 (define_attr "pool_range" "" (const_int 0))
167 (define_attr "neg_pool_range" "" (const_int 0))
169 ; An assembler sequence may clobber the condition codes without us knowing.
170 ; If such an insn references the pool, then we have no way of knowing how,
171 ; so use the most conservative value for pool_range.
172 (define_asm_attributes
173 [(set_attr "conds" "clob")
174 (set_attr "length" "4")
175 (set_attr "pool_range" "250")])
177 ;; The instruction used to implement a particular pattern. This
178 ;; information is used by pipeline descriptions to provide accurate
179 ;; scheduling information.
182 "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other"
183 (const_string "other"))
185 ; TYPE attribute is used to detect floating point instructions which, if
186 ; running on a co-processor can run in parallel with other, basic instructions
187 ; If write-buffer scheduling is enabled then it can also be used in the
188 ; scheduling of writes.
190 ; Classification of each insn
191 ; alu any alu instruction that doesn't hit memory or fp
192 ; regs or have a shifted source operand
193 ; alu_shift any data instruction that doesn't hit memory or fp
194 ; regs, but has a source operand shifted by a constant
195 ; alu_shift_reg any data instruction that doesn't hit memory or fp
196 ; regs, but has a source operand shifted by a register value
197 ; mult a multiply instruction
198 ; block blockage insn, this blocks all functional units
199 ; float a floating point arithmetic operation (subject to expansion)
200 ; fdivd DFmode floating point division
201 ; fdivs SFmode floating point division
202 ; fmul Floating point multiply
203 ; ffmul Fast floating point multiply
204 ; farith Floating point arithmetic (4 cycle)
205 ; ffarith Fast floating point arithmetic (2 cycle)
206 ; float_em a floating point arithmetic operation that is normally emulated
207 ; even on a machine with an fpa.
208 ; f_load a floating point load from memory
209 ; f_store a floating point store to memory
210 ; f_load[sd] single/double load from memory
211 ; f_store[sd] single/double store to memory
212 ; f_flag a transfer of co-processor flags to the CPSR
213 ; f_mem_r a transfer of a floating point register to a real reg via mem
214 ; r_mem_f the reverse of f_mem_r
215 ; f_2_r fast transfer float to arm (no memory needed)
216 ; r_2_f fast transfer arm to float
217 ; f_cvt convert floating<->integral
219 ; call a subroutine call
220 ; load_byte load byte(s) from memory to arm registers
221 ; load1 load 1 word from memory to arm registers
222 ; load2 load 2 words from memory to arm registers
223 ; load3 load 3 words from memory to arm registers
224 ; load4 load 4 words from memory to arm registers
225 ; store store 1 word to memory from arm registers
226 ; store2 store 2 words
227 ; store3 store 3 words
228 ; store4 store 4 (or more) words
229 ; Additions for Cirrus Maverick co-processor:
230 ; mav_farith Floating point arithmetic (4 cycle)
231 ; mav_dmult Double multiplies (7 cycle)
234 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
236 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
237 (const_string "mult")
238 (const_string "alu")))
240 ; Load scheduling, set from the arm_ld_sched variable
241 ; initialized by arm_override_options()
242 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
244 ; condition codes: this one is used by final_prescan_insn to speed up
245 ; conditionalizing instructions. It saves having to scan the rtl to see if
246 ; it uses or alters the condition codes.
248 ; USE means that the condition codes are used by the insn in the process of
249 ; outputting code, this means (at present) that we can't use the insn in
252 ; SET means that the purpose of the insn is to set the condition codes in a
253 ; well defined manner.
255 ; CLOB means that the condition codes are altered in an undefined manner, if
256 ; they are altered at all
258 ; JUMP_CLOB is used when the condition cannot be represented by a single
259 ; instruction (UNEQ and LTGT). These cannot be predicated.
261 ; NOCOND means that the condition codes are neither altered nor affect the
262 ; output of this insn
264 (define_attr "conds" "use,set,clob,jump_clob,nocond"
265 (if_then_else (eq_attr "type" "call")
266 (const_string "clob")
267 (const_string "nocond")))
269 ; Predicable means that the insn can be conditionally executed based on
270 ; an automatically added predicate (additional patterns are generated by
271 ; gen...). We default to 'no' because no Thumb patterns match this rule
272 ; and not all ARM patterns do.
273 (define_attr "predicable" "no,yes" (const_string "no"))
275 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
276 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
277 ; suffer blockages enough to warrant modelling this (and it can adversely
278 ; affect the schedule).
279 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
281 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
282 ; to stall the processor. Used with model_wbuf above.
283 (define_attr "write_conflict" "no,yes"
284 (if_then_else (eq_attr "type"
285 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
287 (const_string "no")))
289 ; Classify the insns into those that take one cycle and those that take more
290 ; than one on the main cpu execution unit.
291 (define_attr "core_cycles" "single,multi"
292 (if_then_else (eq_attr "type"
293 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
294 (const_string "single")
295 (const_string "multi")))
297 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
298 ;; distant label. Only applicable to Thumb code.
299 (define_attr "far_jump" "yes,no" (const_string "no"))
302 ;; The number of machine instructions this pattern expands to.
303 ;; Used for Thumb-2 conditional execution.
304 (define_attr "ce_count" "" (const_int 1))
306 ;;---------------------------------------------------------------------------
309 ; A list of modes that are exactly 64 bits in size. We use this to expand
310 ; some splits that are the same for all modes when operating on ARM
312 (define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
314 ;;---------------------------------------------------------------------------
317 (include "predicates.md")
318 (include "constraints.md")
320 ;;---------------------------------------------------------------------------
321 ;; Pipeline descriptions
323 ;; Processor type. This is created automatically from arm-cores.def.
324 (include "arm-tune.md")
326 ;; True if the generic scheduling description should be used.
328 (define_attr "generic_sched" "yes,no"
330 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs")
332 (const_string "yes"))))
334 (define_attr "generic_vfp" "yes,no"
336 (and (eq_attr "fpu" "vfp")
337 (eq_attr "tune" "!arm1020e,arm1022e"))
339 (const_string "no"))))
341 (include "arm-generic.md")
342 (include "arm926ejs.md")
343 (include "arm1020e.md")
344 (include "arm1026ejs.md")
345 (include "arm1136jfs.md")
348 ;;---------------------------------------------------------------------------
353 ;; Note: For DImode insns, there is normally no reason why operands should
354 ;; not be in the same register, what we don't want is for something being
355 ;; written to partially overlap something that is an input.
356 ;; Cirrus 64bit additions should not be split because we have a native
357 ;; 64bit addition instructions.
359 (define_expand "adddi3"
361 [(set (match_operand:DI 0 "s_register_operand" "")
362 (plus:DI (match_operand:DI 1 "s_register_operand" "")
363 (match_operand:DI 2 "s_register_operand" "")))
364 (clobber (reg:CC CC_REGNUM))])]
367 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
369 if (!cirrus_fp_register (operands[0], DImode))
370 operands[0] = force_reg (DImode, operands[0]);
371 if (!cirrus_fp_register (operands[1], DImode))
372 operands[1] = force_reg (DImode, operands[1]);
373 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
379 if (GET_CODE (operands[1]) != REG)
380 operands[1] = force_reg (SImode, operands[1]);
381 if (GET_CODE (operands[2]) != REG)
382 operands[2] = force_reg (SImode, operands[2]);
387 (define_insn "*thumb1_adddi3"
388 [(set (match_operand:DI 0 "register_operand" "=l")
389 (plus:DI (match_operand:DI 1 "register_operand" "%0")
390 (match_operand:DI 2 "register_operand" "l")))
391 (clobber (reg:CC CC_REGNUM))
394 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
395 [(set_attr "length" "4")]
398 (define_insn_and_split "*arm_adddi3"
399 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
400 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
401 (match_operand:DI 2 "s_register_operand" "r, 0")))
402 (clobber (reg:CC CC_REGNUM))]
403 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
405 "TARGET_32BIT && reload_completed"
406 [(parallel [(set (reg:CC_C CC_REGNUM)
407 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
409 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
410 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
411 (plus:SI (match_dup 4) (match_dup 5))))]
414 operands[3] = gen_highpart (SImode, operands[0]);
415 operands[0] = gen_lowpart (SImode, operands[0]);
416 operands[4] = gen_highpart (SImode, operands[1]);
417 operands[1] = gen_lowpart (SImode, operands[1]);
418 operands[5] = gen_highpart (SImode, operands[2]);
419 operands[2] = gen_lowpart (SImode, operands[2]);
421 [(set_attr "conds" "clob")
422 (set_attr "length" "8")]
425 (define_insn_and_split "*adddi_sesidi_di"
426 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
427 (plus:DI (sign_extend:DI
428 (match_operand:SI 2 "s_register_operand" "r,r"))
429 (match_operand:DI 1 "s_register_operand" "r,0")))
430 (clobber (reg:CC CC_REGNUM))]
431 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
433 "TARGET_32BIT && reload_completed"
434 [(parallel [(set (reg:CC_C CC_REGNUM)
435 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
437 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
438 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
439 (plus:SI (ashiftrt:SI (match_dup 2)
444 operands[3] = gen_highpart (SImode, operands[0]);
445 operands[0] = gen_lowpart (SImode, operands[0]);
446 operands[4] = gen_highpart (SImode, operands[1]);
447 operands[1] = gen_lowpart (SImode, operands[1]);
448 operands[2] = gen_lowpart (SImode, operands[2]);
450 [(set_attr "conds" "clob")
451 (set_attr "length" "8")]
454 (define_insn_and_split "*adddi_zesidi_di"
455 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
456 (plus:DI (zero_extend:DI
457 (match_operand:SI 2 "s_register_operand" "r,r"))
458 (match_operand:DI 1 "s_register_operand" "r,0")))
459 (clobber (reg:CC CC_REGNUM))]
460 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
462 "TARGET_32BIT && reload_completed"
463 [(parallel [(set (reg:CC_C CC_REGNUM)
464 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
466 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
467 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
468 (plus:SI (match_dup 4) (const_int 0))))]
471 operands[3] = gen_highpart (SImode, operands[0]);
472 operands[0] = gen_lowpart (SImode, operands[0]);
473 operands[4] = gen_highpart (SImode, operands[1]);
474 operands[1] = gen_lowpart (SImode, operands[1]);
475 operands[2] = gen_lowpart (SImode, operands[2]);
477 [(set_attr "conds" "clob")
478 (set_attr "length" "8")]
481 (define_expand "addsi3"
482 [(set (match_operand:SI 0 "s_register_operand" "")
483 (plus:SI (match_operand:SI 1 "s_register_operand" "")
484 (match_operand:SI 2 "reg_or_int_operand" "")))]
487 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
489 arm_split_constant (PLUS, SImode, NULL_RTX,
490 INTVAL (operands[2]), operands[0], operands[1],
491 optimize && !no_new_pseudos);
497 ; If there is a scratch available, this will be faster than synthesizing the
500 [(match_scratch:SI 3 "r")
501 (set (match_operand:SI 0 "arm_general_register_operand" "")
502 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
503 (match_operand:SI 2 "const_int_operand" "")))]
505 !(const_ok_for_arm (INTVAL (operands[2]))
506 || const_ok_for_arm (-INTVAL (operands[2])))
507 && const_ok_for_arm (~INTVAL (operands[2]))"
508 [(set (match_dup 3) (match_dup 2))
509 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
513 (define_insn_and_split "*arm_addsi3"
514 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
515 (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
516 (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
523 GET_CODE (operands[2]) == CONST_INT
524 && !(const_ok_for_arm (INTVAL (operands[2]))
525 || const_ok_for_arm (-INTVAL (operands[2])))"
526 [(clobber (const_int 0))]
528 arm_split_constant (PLUS, SImode, curr_insn,
529 INTVAL (operands[2]), operands[0],
533 [(set_attr "length" "4,4,16")
534 (set_attr "predicable" "yes")]
537 ;; Register group 'k' is a single register group containing only the stack
538 ;; register. Trying to reload it will always fail catastrophically,
539 ;; so never allow those alternatives to match if reloading is needed.
541 (define_insn "*thumb1_addsi3"
542 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
543 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
544 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
547 static const char * const asms[] =
549 \"add\\t%0, %0, %2\",
550 \"sub\\t%0, %0, #%n2\",
551 \"add\\t%0, %1, %2\",
552 \"add\\t%0, %0, %2\",
553 \"add\\t%0, %0, %2\",
554 \"add\\t%0, %1, %2\",
557 if ((which_alternative == 2 || which_alternative == 6)
558 && GET_CODE (operands[2]) == CONST_INT
559 && INTVAL (operands[2]) < 0)
560 return \"sub\\t%0, %1, #%n2\";
561 return asms[which_alternative];
563 [(set_attr "length" "2")]
566 ;; Reloading and elimination of the frame pointer can
567 ;; sometimes cause this optimization to be missed.
569 [(set (match_operand:SI 0 "arm_general_register_operand" "")
570 (match_operand:SI 1 "const_int_operand" ""))
572 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
574 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
575 && (INTVAL (operands[1]) & 3) == 0"
576 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
580 ;; ??? Make Thumb-2 variants which prefer low regs
581 (define_insn "*addsi3_compare0"
582 [(set (reg:CC_NOOV CC_REGNUM)
584 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
585 (match_operand:SI 2 "arm_add_operand" "rI,L"))
587 (set (match_operand:SI 0 "s_register_operand" "=r,r")
588 (plus:SI (match_dup 1) (match_dup 2)))]
592 sub%.\\t%0, %1, #%n2"
593 [(set_attr "conds" "set")]
596 (define_insn "*addsi3_compare0_scratch"
597 [(set (reg:CC_NOOV CC_REGNUM)
599 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
600 (match_operand:SI 1 "arm_add_operand" "rI,L"))
606 [(set_attr "conds" "set")]
609 (define_insn "*compare_negsi_si"
610 [(set (reg:CC_Z CC_REGNUM)
612 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
613 (match_operand:SI 1 "s_register_operand" "r")))]
616 [(set_attr "conds" "set")]
619 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
620 ;; addend is a constant.
621 (define_insn "*cmpsi2_addneg"
622 [(set (reg:CC CC_REGNUM)
624 (match_operand:SI 1 "s_register_operand" "r,r")
625 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
626 (set (match_operand:SI 0 "s_register_operand" "=r,r")
627 (plus:SI (match_dup 1)
628 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
629 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
632 add%.\\t%0, %1, #%n2"
633 [(set_attr "conds" "set")]
636 ;; Convert the sequence
638 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
642 ;; bcs dest ((unsigned)rn >= 1)
643 ;; similarly for the beq variant using bcc.
644 ;; This is a common looping idiom (while (n--))
646 [(set (match_operand:SI 0 "arm_general_register_operand" "")
647 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
649 (set (match_operand 2 "cc_register" "")
650 (compare (match_dup 0) (const_int -1)))
652 (if_then_else (match_operator 3 "equality_operator"
653 [(match_dup 2) (const_int 0)])
654 (match_operand 4 "" "")
655 (match_operand 5 "" "")))]
656 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
660 (match_dup 1) (const_int 1)))
661 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
663 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
666 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
667 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
670 operands[2], const0_rtx);"
673 ;; The next four insns work because they compare the result with one of
674 ;; the operands, and we know that the use of the condition code is
675 ;; either GEU or LTU, so we can use the carry flag from the addition
676 ;; instead of doing the compare a second time.
677 (define_insn "*addsi3_compare_op1"
678 [(set (reg:CC_C CC_REGNUM)
680 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
681 (match_operand:SI 2 "arm_add_operand" "rI,L"))
683 (set (match_operand:SI 0 "s_register_operand" "=r,r")
684 (plus:SI (match_dup 1) (match_dup 2)))]
688 sub%.\\t%0, %1, #%n2"
689 [(set_attr "conds" "set")]
692 (define_insn "*addsi3_compare_op2"
693 [(set (reg:CC_C CC_REGNUM)
695 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
696 (match_operand:SI 2 "arm_add_operand" "rI,L"))
698 (set (match_operand:SI 0 "s_register_operand" "=r,r")
699 (plus:SI (match_dup 1) (match_dup 2)))]
703 sub%.\\t%0, %1, #%n2"
704 [(set_attr "conds" "set")]
707 (define_insn "*compare_addsi2_op0"
708 [(set (reg:CC_C CC_REGNUM)
710 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
711 (match_operand:SI 1 "arm_add_operand" "rI,L"))
717 [(set_attr "conds" "set")]
720 (define_insn "*compare_addsi2_op1"
721 [(set (reg:CC_C CC_REGNUM)
723 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
724 (match_operand:SI 1 "arm_add_operand" "rI,L"))
730 [(set_attr "conds" "set")]
733 (define_insn "*addsi3_carryin"
734 [(set (match_operand:SI 0 "s_register_operand" "=r")
735 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
736 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
737 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
740 [(set_attr "conds" "use")]
743 (define_insn "*addsi3_carryin_shift"
744 [(set (match_operand:SI 0 "s_register_operand" "=r")
745 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
747 (match_operator:SI 2 "shift_operator"
748 [(match_operand:SI 3 "s_register_operand" "r")
749 (match_operand:SI 4 "reg_or_int_operand" "rM")])
750 (match_operand:SI 1 "s_register_operand" "r"))))]
752 "adc%?\\t%0, %1, %3%S2"
753 [(set_attr "conds" "use")
754 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
755 (const_string "alu_shift")
756 (const_string "alu_shift_reg")))]
759 (define_insn "*addsi3_carryin_alt1"
760 [(set (match_operand:SI 0 "s_register_operand" "=r")
761 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
762 (match_operand:SI 2 "arm_rhs_operand" "rI"))
763 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
766 [(set_attr "conds" "use")]
769 (define_insn "*addsi3_carryin_alt2"
770 [(set (match_operand:SI 0 "s_register_operand" "=r")
771 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
772 (match_operand:SI 1 "s_register_operand" "r"))
773 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
776 [(set_attr "conds" "use")]
779 (define_insn "*addsi3_carryin_alt3"
780 [(set (match_operand:SI 0 "s_register_operand" "=r")
781 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
782 (match_operand:SI 2 "arm_rhs_operand" "rI"))
783 (match_operand:SI 1 "s_register_operand" "r")))]
786 [(set_attr "conds" "use")]
789 (define_expand "incscc"
790 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
791 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
792 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
793 (match_operand:SI 1 "s_register_operand" "0,?r")))]
798 (define_insn "*arm_incscc"
799 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
801 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
802 (match_operand:SI 1 "s_register_operand" "0,?r")))]
806 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
807 [(set_attr "conds" "use")
808 (set_attr "length" "4,8")]
811 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
813 [(set (match_operand:SI 0 "s_register_operand" "")
814 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
815 (match_operand:SI 2 "s_register_operand" ""))
817 (clobber (match_operand:SI 3 "s_register_operand" ""))]
819 [(set (match_dup 3) (match_dup 1))
820 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
822 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
825 (define_expand "addsf3"
826 [(set (match_operand:SF 0 "s_register_operand" "")
827 (plus:SF (match_operand:SF 1 "s_register_operand" "")
828 (match_operand:SF 2 "arm_float_add_operand" "")))]
829 "TARGET_32BIT && TARGET_HARD_FLOAT"
832 && !cirrus_fp_register (operands[2], SFmode))
833 operands[2] = force_reg (SFmode, operands[2]);
836 (define_expand "adddf3"
837 [(set (match_operand:DF 0 "s_register_operand" "")
838 (plus:DF (match_operand:DF 1 "s_register_operand" "")
839 (match_operand:DF 2 "arm_float_add_operand" "")))]
840 "TARGET_32BIT && TARGET_HARD_FLOAT"
843 && !cirrus_fp_register (operands[2], DFmode))
844 operands[2] = force_reg (DFmode, operands[2]);
847 (define_expand "subdi3"
849 [(set (match_operand:DI 0 "s_register_operand" "")
850 (minus:DI (match_operand:DI 1 "s_register_operand" "")
851 (match_operand:DI 2 "s_register_operand" "")))
852 (clobber (reg:CC CC_REGNUM))])]
855 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
857 && cirrus_fp_register (operands[0], DImode)
858 && cirrus_fp_register (operands[1], DImode))
860 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
866 if (GET_CODE (operands[1]) != REG)
867 operands[1] = force_reg (SImode, operands[1]);
868 if (GET_CODE (operands[2]) != REG)
869 operands[2] = force_reg (SImode, operands[2]);
874 (define_insn "*arm_subdi3"
875 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
876 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
877 (match_operand:DI 2 "s_register_operand" "r,0,0")))
878 (clobber (reg:CC CC_REGNUM))]
880 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
881 [(set_attr "conds" "clob")
882 (set_attr "length" "8")]
885 (define_insn "*thumb_subdi3"
886 [(set (match_operand:DI 0 "register_operand" "=l")
887 (minus:DI (match_operand:DI 1 "register_operand" "0")
888 (match_operand:DI 2 "register_operand" "l")))
889 (clobber (reg:CC CC_REGNUM))]
891 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
892 [(set_attr "length" "4")]
895 (define_insn "*subdi_di_zesidi"
896 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
897 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
899 (match_operand:SI 2 "s_register_operand" "r,r"))))
900 (clobber (reg:CC CC_REGNUM))]
902 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
903 [(set_attr "conds" "clob")
904 (set_attr "length" "8")]
907 (define_insn "*subdi_di_sesidi"
908 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
909 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
911 (match_operand:SI 2 "s_register_operand" "r,r"))))
912 (clobber (reg:CC CC_REGNUM))]
914 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
915 [(set_attr "conds" "clob")
916 (set_attr "length" "8")]
919 (define_insn "*subdi_zesidi_di"
920 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
921 (minus:DI (zero_extend:DI
922 (match_operand:SI 2 "s_register_operand" "r,r"))
923 (match_operand:DI 1 "s_register_operand" "?r,0")))
924 (clobber (reg:CC CC_REGNUM))]
926 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
927 [(set_attr "conds" "clob")
928 (set_attr "length" "8")]
931 (define_insn "*subdi_sesidi_di"
932 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
933 (minus:DI (sign_extend:DI
934 (match_operand:SI 2 "s_register_operand" "r,r"))
935 (match_operand:DI 1 "s_register_operand" "?r,0")))
936 (clobber (reg:CC CC_REGNUM))]
938 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
939 [(set_attr "conds" "clob")
940 (set_attr "length" "8")]
943 (define_insn "*subdi_zesidi_zesidi"
944 [(set (match_operand:DI 0 "s_register_operand" "=r")
945 (minus:DI (zero_extend:DI
946 (match_operand:SI 1 "s_register_operand" "r"))
948 (match_operand:SI 2 "s_register_operand" "r"))))
949 (clobber (reg:CC CC_REGNUM))]
951 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
952 [(set_attr "conds" "clob")
953 (set_attr "length" "8")]
956 (define_expand "subsi3"
957 [(set (match_operand:SI 0 "s_register_operand" "")
958 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
959 (match_operand:SI 2 "s_register_operand" "")))]
962 if (GET_CODE (operands[1]) == CONST_INT)
966 arm_split_constant (MINUS, SImode, NULL_RTX,
967 INTVAL (operands[1]), operands[0],
968 operands[2], optimize && !no_new_pseudos);
971 else /* TARGET_THUMB1 */
972 operands[1] = force_reg (SImode, operands[1]);
977 (define_insn "*thumb1_subsi3_insn"
978 [(set (match_operand:SI 0 "register_operand" "=l")
979 (minus:SI (match_operand:SI 1 "register_operand" "l")
980 (match_operand:SI 2 "register_operand" "l")))]
983 [(set_attr "length" "2")]
986 ; ??? Check Thumb-2 split length
987 (define_insn_and_split "*arm_subsi3_insn"
988 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
989 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
990 (match_operand:SI 2 "s_register_operand" "r,r")))]
996 && GET_CODE (operands[1]) == CONST_INT
997 && !const_ok_for_arm (INTVAL (operands[1]))"
998 [(clobber (const_int 0))]
1000 arm_split_constant (MINUS, SImode, curr_insn,
1001 INTVAL (operands[1]), operands[0], operands[2], 0);
1004 [(set_attr "length" "4,16")
1005 (set_attr "predicable" "yes")]
1009 [(match_scratch:SI 3 "r")
1010 (set (match_operand:SI 0 "arm_general_register_operand" "")
1011 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1012 (match_operand:SI 2 "arm_general_register_operand" "")))]
1014 && !const_ok_for_arm (INTVAL (operands[1]))
1015 && const_ok_for_arm (~INTVAL (operands[1]))"
1016 [(set (match_dup 3) (match_dup 1))
1017 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1021 (define_insn "*subsi3_compare0"
1022 [(set (reg:CC_NOOV CC_REGNUM)
1024 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1025 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1027 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1028 (minus:SI (match_dup 1) (match_dup 2)))]
1033 [(set_attr "conds" "set")]
1036 (define_expand "decscc"
1037 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1038 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1039 (match_operator:SI 2 "arm_comparison_operator"
1040 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1045 (define_insn "*arm_decscc"
1046 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1047 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1048 (match_operator:SI 2 "arm_comparison_operator"
1049 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1053 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1054 [(set_attr "conds" "use")
1055 (set_attr "length" "*,8")]
1058 (define_expand "subsf3"
1059 [(set (match_operand:SF 0 "s_register_operand" "")
1060 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1061 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1062 "TARGET_32BIT && TARGET_HARD_FLOAT"
1064 if (TARGET_MAVERICK)
1066 if (!cirrus_fp_register (operands[1], SFmode))
1067 operands[1] = force_reg (SFmode, operands[1]);
1068 if (!cirrus_fp_register (operands[2], SFmode))
1069 operands[2] = force_reg (SFmode, operands[2]);
1073 (define_expand "subdf3"
1074 [(set (match_operand:DF 0 "s_register_operand" "")
1075 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1076 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1077 "TARGET_32BIT && TARGET_HARD_FLOAT"
1079 if (TARGET_MAVERICK)
1081 if (!cirrus_fp_register (operands[1], DFmode))
1082 operands[1] = force_reg (DFmode, operands[1]);
1083 if (!cirrus_fp_register (operands[2], DFmode))
1084 operands[2] = force_reg (DFmode, operands[2]);
1089 ;; Multiplication insns
1091 (define_expand "mulsi3"
1092 [(set (match_operand:SI 0 "s_register_operand" "")
1093 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1094 (match_operand:SI 1 "s_register_operand" "")))]
1099 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1100 (define_insn "*arm_mulsi3"
1101 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1102 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1103 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1104 "TARGET_32BIT && !arm_arch6"
1105 "mul%?\\t%0, %2, %1"
1106 [(set_attr "insn" "mul")
1107 (set_attr "predicable" "yes")]
1110 (define_insn "*arm_mulsi3_v6"
1111 [(set (match_operand:SI 0 "s_register_operand" "=r")
1112 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1113 (match_operand:SI 2 "s_register_operand" "r")))]
1114 "TARGET_32BIT && arm_arch6"
1115 "mul%?\\t%0, %1, %2"
1116 [(set_attr "insn" "mul")
1117 (set_attr "predicable" "yes")]
1120 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1121 ; 1 and 2; are the same, because reload will make operand 0 match
1122 ; operand 1 without realizing that this conflicts with operand 2. We fix
1123 ; this by adding another alternative to match this case, and then `reload'
1124 ; it ourselves. This alternative must come first.
1125 (define_insn "*thumb_mulsi3"
1126 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1127 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1128 (match_operand:SI 2 "register_operand" "l,l,l")))]
1129 "TARGET_THUMB1 && !arm_arch6"
1131 if (which_alternative < 2)
1132 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1134 return \"mul\\t%0, %2\";
1136 [(set_attr "length" "4,4,2")
1137 (set_attr "insn" "mul")]
1140 (define_insn "*thumb_mulsi3_v6"
1141 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1142 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1143 (match_operand:SI 2 "register_operand" "l,0,0")))]
1144 "TARGET_THUMB1 && arm_arch6"
1149 [(set_attr "length" "2")
1150 (set_attr "insn" "mul")]
1153 (define_insn "*mulsi3_compare0"
1154 [(set (reg:CC_NOOV CC_REGNUM)
1155 (compare:CC_NOOV (mult:SI
1156 (match_operand:SI 2 "s_register_operand" "r,r")
1157 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1159 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1160 (mult:SI (match_dup 2) (match_dup 1)))]
1161 "TARGET_ARM && !arm_arch6"
1162 "mul%.\\t%0, %2, %1"
1163 [(set_attr "conds" "set")
1164 (set_attr "insn" "muls")]
1167 (define_insn "*mulsi3_compare0_v6"
1168 [(set (reg:CC_NOOV CC_REGNUM)
1169 (compare:CC_NOOV (mult:SI
1170 (match_operand:SI 2 "s_register_operand" "r")
1171 (match_operand:SI 1 "s_register_operand" "r"))
1173 (set (match_operand:SI 0 "s_register_operand" "=r")
1174 (mult:SI (match_dup 2) (match_dup 1)))]
1175 "TARGET_ARM && arm_arch6 && optimize_size"
1176 "mul%.\\t%0, %2, %1"
1177 [(set_attr "conds" "set")
1178 (set_attr "insn" "muls")]
1181 (define_insn "*mulsi_compare0_scratch"
1182 [(set (reg:CC_NOOV CC_REGNUM)
1183 (compare:CC_NOOV (mult:SI
1184 (match_operand:SI 2 "s_register_operand" "r,r")
1185 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1187 (clobber (match_scratch:SI 0 "=&r,&r"))]
1188 "TARGET_ARM && !arm_arch6"
1189 "mul%.\\t%0, %2, %1"
1190 [(set_attr "conds" "set")
1191 (set_attr "insn" "muls")]
1194 (define_insn "*mulsi_compare0_scratch_v6"
1195 [(set (reg:CC_NOOV CC_REGNUM)
1196 (compare:CC_NOOV (mult:SI
1197 (match_operand:SI 2 "s_register_operand" "r")
1198 (match_operand:SI 1 "s_register_operand" "r"))
1200 (clobber (match_scratch:SI 0 "=r"))]
1201 "TARGET_ARM && arm_arch6 && optimize_size"
1202 "mul%.\\t%0, %2, %1"
1203 [(set_attr "conds" "set")
1204 (set_attr "insn" "muls")]
1207 ;; Unnamed templates to match MLA instruction.
1209 (define_insn "*mulsi3addsi"
1210 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1212 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1213 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1214 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1215 "TARGET_32BIT && !arm_arch6"
1216 "mla%?\\t%0, %2, %1, %3"
1217 [(set_attr "insn" "mla")
1218 (set_attr "predicable" "yes")]
1221 (define_insn "*mulsi3addsi_v6"
1222 [(set (match_operand:SI 0 "s_register_operand" "=r")
1224 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1225 (match_operand:SI 1 "s_register_operand" "r"))
1226 (match_operand:SI 3 "s_register_operand" "r")))]
1227 "TARGET_32BIT && arm_arch6"
1228 "mla%?\\t%0, %2, %1, %3"
1229 [(set_attr "insn" "mla")
1230 (set_attr "predicable" "yes")]
1233 (define_insn "*mulsi3addsi_compare0"
1234 [(set (reg:CC_NOOV CC_REGNUM)
1237 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1238 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1239 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1241 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1242 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1244 "TARGET_ARM && arm_arch6"
1245 "mla%.\\t%0, %2, %1, %3"
1246 [(set_attr "conds" "set")
1247 (set_attr "insn" "mlas")]
1250 (define_insn "*mulsi3addsi_compare0_v6"
1251 [(set (reg:CC_NOOV CC_REGNUM)
1254 (match_operand:SI 2 "s_register_operand" "r")
1255 (match_operand:SI 1 "s_register_operand" "r"))
1256 (match_operand:SI 3 "s_register_operand" "r"))
1258 (set (match_operand:SI 0 "s_register_operand" "=r")
1259 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1261 "TARGET_ARM && arm_arch6 && optimize_size"
1262 "mla%.\\t%0, %2, %1, %3"
1263 [(set_attr "conds" "set")
1264 (set_attr "insn" "mlas")]
1267 (define_insn "*mulsi3addsi_compare0_scratch"
1268 [(set (reg:CC_NOOV CC_REGNUM)
1271 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1272 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1273 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1275 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1276 "TARGET_ARM && !arm_arch6"
1277 "mla%.\\t%0, %2, %1, %3"
1278 [(set_attr "conds" "set")
1279 (set_attr "insn" "mlas")]
1282 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1283 [(set (reg:CC_NOOV CC_REGNUM)
1286 (match_operand:SI 2 "s_register_operand" "r")
1287 (match_operand:SI 1 "s_register_operand" "r"))
1288 (match_operand:SI 3 "s_register_operand" "r"))
1290 (clobber (match_scratch:SI 0 "=r"))]
1291 "TARGET_ARM && arm_arch6 && optimize_size"
1292 "mla%.\\t%0, %2, %1, %3"
1293 [(set_attr "conds" "set")
1294 (set_attr "insn" "mlas")]
1297 ;; Unnamed template to match long long multiply-accumulate (smlal)
1299 (define_insn "*mulsidi3adddi"
1300 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1303 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1304 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1305 (match_operand:DI 1 "s_register_operand" "0")))]
1306 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1307 "smlal%?\\t%Q0, %R0, %3, %2"
1308 [(set_attr "insn" "smlal")
1309 (set_attr "predicable" "yes")]
1312 (define_insn "*mulsidi3adddi_v6"
1313 [(set (match_operand:DI 0 "s_register_operand" "=r")
1316 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1317 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1318 (match_operand:DI 1 "s_register_operand" "0")))]
1319 "TARGET_32BIT && arm_arch6"
1320 "smlal%?\\t%Q0, %R0, %3, %2"
1321 [(set_attr "insn" "smlal")
1322 (set_attr "predicable" "yes")]
1325 (define_insn "mulsidi3"
1326 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1328 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1329 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1330 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1331 "smull%?\\t%Q0, %R0, %1, %2"
1332 [(set_attr "insn" "smull")
1333 (set_attr "predicable" "yes")]
1336 (define_insn "mulsidi3_v6"
1337 [(set (match_operand:DI 0 "s_register_operand" "=r")
1339 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1340 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1341 "TARGET_32BIT && arm_arch6"
1342 "smull%?\\t%Q0, %R0, %1, %2"
1343 [(set_attr "insn" "smull")
1344 (set_attr "predicable" "yes")]
1347 (define_insn "umulsidi3"
1348 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1350 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1351 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1352 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1353 "umull%?\\t%Q0, %R0, %1, %2"
1354 [(set_attr "insn" "umull")
1355 (set_attr "predicable" "yes")]
1358 (define_insn "umulsidi3_v6"
1359 [(set (match_operand:DI 0 "s_register_operand" "=r")
1361 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1362 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1363 "TARGET_32BIT && arm_arch6"
1364 "umull%?\\t%Q0, %R0, %1, %2"
1365 [(set_attr "insn" "umull")
1366 (set_attr "predicable" "yes")]
1369 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1371 (define_insn "*umulsidi3adddi"
1372 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1375 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1376 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1377 (match_operand:DI 1 "s_register_operand" "0")))]
1378 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1379 "umlal%?\\t%Q0, %R0, %3, %2"
1380 [(set_attr "insn" "umlal")
1381 (set_attr "predicable" "yes")]
1384 (define_insn "*umulsidi3adddi_v6"
1385 [(set (match_operand:DI 0 "s_register_operand" "=r")
1388 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1389 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1390 (match_operand:DI 1 "s_register_operand" "0")))]
1391 "TARGET_32BIT && arm_arch6"
1392 "umlal%?\\t%Q0, %R0, %3, %2"
1393 [(set_attr "insn" "umlal")
1394 (set_attr "predicable" "yes")]
1397 (define_insn "smulsi3_highpart"
1398 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1402 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1403 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1405 (clobber (match_scratch:SI 3 "=&r,&r"))]
1406 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1407 "smull%?\\t%3, %0, %2, %1"
1408 [(set_attr "insn" "smull")
1409 (set_attr "predicable" "yes")]
1412 (define_insn "smulsi3_highpart_v6"
1413 [(set (match_operand:SI 0 "s_register_operand" "=r")
1417 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1418 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1420 (clobber (match_scratch:SI 3 "=r"))]
1421 "TARGET_32BIT && arm_arch6"
1422 "smull%?\\t%3, %0, %2, %1"
1423 [(set_attr "insn" "smull")
1424 (set_attr "predicable" "yes")]
1427 (define_insn "umulsi3_highpart"
1428 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1432 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1433 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1435 (clobber (match_scratch:SI 3 "=&r,&r"))]
1436 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1437 "umull%?\\t%3, %0, %2, %1"
1438 [(set_attr "insn" "umull")
1439 (set_attr "predicable" "yes")]
1442 (define_insn "umulsi3_highpart_v6"
1443 [(set (match_operand:SI 0 "s_register_operand" "=r")
1447 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1448 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1450 (clobber (match_scratch:SI 3 "=r"))]
1451 "TARGET_32BIT && arm_arch6"
1452 "umull%?\\t%3, %0, %2, %1"
1453 [(set_attr "insn" "umull")
1454 (set_attr "predicable" "yes")]
1457 (define_insn "mulhisi3"
1458 [(set (match_operand:SI 0 "s_register_operand" "=r")
1459 (mult:SI (sign_extend:SI
1460 (match_operand:HI 1 "s_register_operand" "%r"))
1462 (match_operand:HI 2 "s_register_operand" "r"))))]
1463 "TARGET_DSP_MULTIPLY"
1464 "smulbb%?\\t%0, %1, %2"
1465 [(set_attr "insn" "smulxy")
1466 (set_attr "predicable" "yes")]
1469 (define_insn "*mulhisi3tb"
1470 [(set (match_operand:SI 0 "s_register_operand" "=r")
1471 (mult:SI (ashiftrt:SI
1472 (match_operand:SI 1 "s_register_operand" "r")
1475 (match_operand:HI 2 "s_register_operand" "r"))))]
1476 "TARGET_DSP_MULTIPLY"
1477 "smultb%?\\t%0, %1, %2"
1478 [(set_attr "insn" "smulxy")
1479 (set_attr "predicable" "yes")]
1482 (define_insn "*mulhisi3bt"
1483 [(set (match_operand:SI 0 "s_register_operand" "=r")
1484 (mult:SI (sign_extend:SI
1485 (match_operand:HI 1 "s_register_operand" "r"))
1487 (match_operand:SI 2 "s_register_operand" "r")
1489 "TARGET_DSP_MULTIPLY"
1490 "smulbt%?\\t%0, %1, %2"
1491 [(set_attr "insn" "smulxy")
1492 (set_attr "predicable" "yes")]
1495 (define_insn "*mulhisi3tt"
1496 [(set (match_operand:SI 0 "s_register_operand" "=r")
1497 (mult:SI (ashiftrt:SI
1498 (match_operand:SI 1 "s_register_operand" "r")
1501 (match_operand:SI 2 "s_register_operand" "r")
1503 "TARGET_DSP_MULTIPLY"
1504 "smultt%?\\t%0, %1, %2"
1505 [(set_attr "insn" "smulxy")
1506 (set_attr "predicable" "yes")]
1509 (define_insn "*mulhisi3addsi"
1510 [(set (match_operand:SI 0 "s_register_operand" "=r")
1511 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1512 (mult:SI (sign_extend:SI
1513 (match_operand:HI 2 "s_register_operand" "%r"))
1515 (match_operand:HI 3 "s_register_operand" "r")))))]
1516 "TARGET_DSP_MULTIPLY"
1517 "smlabb%?\\t%0, %2, %3, %1"
1518 [(set_attr "insn" "smlaxy")
1519 (set_attr "predicable" "yes")]
1522 (define_insn "*mulhidi3adddi"
1523 [(set (match_operand:DI 0 "s_register_operand" "=r")
1525 (match_operand:DI 1 "s_register_operand" "0")
1526 (mult:DI (sign_extend:DI
1527 (match_operand:HI 2 "s_register_operand" "%r"))
1529 (match_operand:HI 3 "s_register_operand" "r")))))]
1530 "TARGET_DSP_MULTIPLY"
1531 "smlalbb%?\\t%Q0, %R0, %2, %3"
1532 [(set_attr "insn" "smlalxy")
1533 (set_attr "predicable" "yes")])
1535 (define_expand "mulsf3"
1536 [(set (match_operand:SF 0 "s_register_operand" "")
1537 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1538 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1539 "TARGET_32BIT && TARGET_HARD_FLOAT"
1542 && !cirrus_fp_register (operands[2], SFmode))
1543 operands[2] = force_reg (SFmode, operands[2]);
1546 (define_expand "muldf3"
1547 [(set (match_operand:DF 0 "s_register_operand" "")
1548 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1549 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1550 "TARGET_32BIT && TARGET_HARD_FLOAT"
1553 && !cirrus_fp_register (operands[2], DFmode))
1554 operands[2] = force_reg (DFmode, operands[2]);
1559 (define_expand "divsf3"
1560 [(set (match_operand:SF 0 "s_register_operand" "")
1561 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1562 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1563 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1566 (define_expand "divdf3"
1567 [(set (match_operand:DF 0 "s_register_operand" "")
1568 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1569 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1570 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1575 (define_expand "modsf3"
1576 [(set (match_operand:SF 0 "s_register_operand" "")
1577 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1578 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1579 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1582 (define_expand "moddf3"
1583 [(set (match_operand:DF 0 "s_register_operand" "")
1584 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1585 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1586 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1589 ;; Boolean and,ior,xor insns
1591 ;; Split up double word logical operations
1593 ;; Split up simple DImode logical operations. Simply perform the logical
1594 ;; operation on the upper and lower halves of the registers.
1596 [(set (match_operand:DI 0 "s_register_operand" "")
1597 (match_operator:DI 6 "logical_binary_operator"
1598 [(match_operand:DI 1 "s_register_operand" "")
1599 (match_operand:DI 2 "s_register_operand" "")]))]
1600 "TARGET_32BIT && reload_completed
1601 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1602 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1603 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1606 operands[3] = gen_highpart (SImode, operands[0]);
1607 operands[0] = gen_lowpart (SImode, operands[0]);
1608 operands[4] = gen_highpart (SImode, operands[1]);
1609 operands[1] = gen_lowpart (SImode, operands[1]);
1610 operands[5] = gen_highpart (SImode, operands[2]);
1611 operands[2] = gen_lowpart (SImode, operands[2]);
1616 [(set (match_operand:DI 0 "s_register_operand" "")
1617 (match_operator:DI 6 "logical_binary_operator"
1618 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1619 (match_operand:DI 1 "s_register_operand" "")]))]
1620 "TARGET_32BIT && reload_completed"
1621 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1622 (set (match_dup 3) (match_op_dup:SI 6
1623 [(ashiftrt:SI (match_dup 2) (const_int 31))
1627 operands[3] = gen_highpart (SImode, operands[0]);
1628 operands[0] = gen_lowpart (SImode, operands[0]);
1629 operands[4] = gen_highpart (SImode, operands[1]);
1630 operands[1] = gen_lowpart (SImode, operands[1]);
1631 operands[5] = gen_highpart (SImode, operands[2]);
1632 operands[2] = gen_lowpart (SImode, operands[2]);
1636 ;; The zero extend of operand 2 means we can just copy the high part of
1637 ;; operand1 into operand0.
1639 [(set (match_operand:DI 0 "s_register_operand" "")
1641 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1642 (match_operand:DI 1 "s_register_operand" "")))]
1643 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1644 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1645 (set (match_dup 3) (match_dup 4))]
1648 operands[4] = gen_highpart (SImode, operands[1]);
1649 operands[3] = gen_highpart (SImode, operands[0]);
1650 operands[0] = gen_lowpart (SImode, operands[0]);
1651 operands[1] = gen_lowpart (SImode, operands[1]);
1655 ;; The zero extend of operand 2 means we can just copy the high part of
1656 ;; operand1 into operand0.
1658 [(set (match_operand:DI 0 "s_register_operand" "")
1660 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1661 (match_operand:DI 1 "s_register_operand" "")))]
1662 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1663 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1664 (set (match_dup 3) (match_dup 4))]
1667 operands[4] = gen_highpart (SImode, operands[1]);
1668 operands[3] = gen_highpart (SImode, operands[0]);
1669 operands[0] = gen_lowpart (SImode, operands[0]);
1670 operands[1] = gen_lowpart (SImode, operands[1]);
1674 (define_insn "anddi3"
1675 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1676 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1677 (match_operand:DI 2 "s_register_operand" "r,r")))]
1678 "TARGET_32BIT && ! TARGET_IWMMXT"
1680 [(set_attr "length" "8")]
1683 (define_insn_and_split "*anddi_zesidi_di"
1684 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1685 (and:DI (zero_extend:DI
1686 (match_operand:SI 2 "s_register_operand" "r,r"))
1687 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1690 "TARGET_32BIT && reload_completed"
1691 ; The zero extend of operand 2 clears the high word of the output
1693 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1694 (set (match_dup 3) (const_int 0))]
1697 operands[3] = gen_highpart (SImode, operands[0]);
1698 operands[0] = gen_lowpart (SImode, operands[0]);
1699 operands[1] = gen_lowpart (SImode, operands[1]);
1701 [(set_attr "length" "8")]
1704 (define_insn "*anddi_sesdi_di"
1705 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1706 (and:DI (sign_extend:DI
1707 (match_operand:SI 2 "s_register_operand" "r,r"))
1708 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1711 [(set_attr "length" "8")]
1714 (define_expand "andsi3"
1715 [(set (match_operand:SI 0 "s_register_operand" "")
1716 (and:SI (match_operand:SI 1 "s_register_operand" "")
1717 (match_operand:SI 2 "reg_or_int_operand" "")))]
1722 if (GET_CODE (operands[2]) == CONST_INT)
1724 arm_split_constant (AND, SImode, NULL_RTX,
1725 INTVAL (operands[2]), operands[0],
1726 operands[1], optimize && !no_new_pseudos);
1731 else /* TARGET_THUMB1 */
1733 if (GET_CODE (operands[2]) != CONST_INT)
1734 operands[2] = force_reg (SImode, operands[2]);
1739 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1741 operands[2] = force_reg (SImode,
1742 GEN_INT (~INTVAL (operands[2])));
1744 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1749 for (i = 9; i <= 31; i++)
1751 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1753 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1757 else if ((((HOST_WIDE_INT) 1) << i) - 1
1758 == ~INTVAL (operands[2]))
1760 rtx shift = GEN_INT (i);
1761 rtx reg = gen_reg_rtx (SImode);
1763 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1764 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1770 operands[2] = force_reg (SImode, operands[2]);
1776 ; ??? Check split length for Thumb-2
1777 (define_insn_and_split "*arm_andsi3_insn"
1778 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1779 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1780 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1784 bic%?\\t%0, %1, #%B2
1787 && GET_CODE (operands[2]) == CONST_INT
1788 && !(const_ok_for_arm (INTVAL (operands[2]))
1789 || const_ok_for_arm (~INTVAL (operands[2])))"
1790 [(clobber (const_int 0))]
1792 arm_split_constant (AND, SImode, curr_insn,
1793 INTVAL (operands[2]), operands[0], operands[1], 0);
1796 [(set_attr "length" "4,4,16")
1797 (set_attr "predicable" "yes")]
1800 (define_insn "*thumb1_andsi3_insn"
1801 [(set (match_operand:SI 0 "register_operand" "=l")
1802 (and:SI (match_operand:SI 1 "register_operand" "%0")
1803 (match_operand:SI 2 "register_operand" "l")))]
1806 [(set_attr "length" "2")]
1809 (define_insn "*andsi3_compare0"
1810 [(set (reg:CC_NOOV CC_REGNUM)
1812 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1813 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1815 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1816 (and:SI (match_dup 1) (match_dup 2)))]
1820 bic%.\\t%0, %1, #%B2"
1821 [(set_attr "conds" "set")]
1824 (define_insn "*andsi3_compare0_scratch"
1825 [(set (reg:CC_NOOV CC_REGNUM)
1827 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1828 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1830 (clobber (match_scratch:SI 2 "=X,r"))]
1834 bic%.\\t%2, %0, #%B1"
1835 [(set_attr "conds" "set")]
1838 (define_insn "*zeroextractsi_compare0_scratch"
1839 [(set (reg:CC_NOOV CC_REGNUM)
1840 (compare:CC_NOOV (zero_extract:SI
1841 (match_operand:SI 0 "s_register_operand" "r")
1842 (match_operand 1 "const_int_operand" "n")
1843 (match_operand 2 "const_int_operand" "n"))
1846 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1847 && INTVAL (operands[1]) > 0
1848 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1849 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1851 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1852 << INTVAL (operands[2]));
1853 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1856 [(set_attr "conds" "set")]
1859 (define_insn_and_split "*ne_zeroextractsi"
1860 [(set (match_operand:SI 0 "s_register_operand" "=r")
1861 (ne:SI (zero_extract:SI
1862 (match_operand:SI 1 "s_register_operand" "r")
1863 (match_operand:SI 2 "const_int_operand" "n")
1864 (match_operand:SI 3 "const_int_operand" "n"))
1866 (clobber (reg:CC CC_REGNUM))]
1868 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1869 && INTVAL (operands[2]) > 0
1870 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1871 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1874 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1875 && INTVAL (operands[2]) > 0
1876 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1877 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1878 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1879 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1881 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1883 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1884 (match_dup 0) (const_int 1)))]
1886 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1887 << INTVAL (operands[3]));
1889 [(set_attr "conds" "clob")
1890 (set (attr "length")
1891 (if_then_else (eq_attr "is_thumb" "yes")
1896 (define_insn_and_split "*ne_zeroextractsi_shifted"
1897 [(set (match_operand:SI 0 "s_register_operand" "=r")
1898 (ne:SI (zero_extract:SI
1899 (match_operand:SI 1 "s_register_operand" "r")
1900 (match_operand:SI 2 "const_int_operand" "n")
1903 (clobber (reg:CC CC_REGNUM))]
1907 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1908 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1910 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1912 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1913 (match_dup 0) (const_int 1)))]
1915 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1917 [(set_attr "conds" "clob")
1918 (set_attr "length" "8")]
1921 (define_insn_and_split "*ite_ne_zeroextractsi"
1922 [(set (match_operand:SI 0 "s_register_operand" "=r")
1923 (if_then_else:SI (ne (zero_extract:SI
1924 (match_operand:SI 1 "s_register_operand" "r")
1925 (match_operand:SI 2 "const_int_operand" "n")
1926 (match_operand:SI 3 "const_int_operand" "n"))
1928 (match_operand:SI 4 "arm_not_operand" "rIK")
1930 (clobber (reg:CC CC_REGNUM))]
1932 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1933 && INTVAL (operands[2]) > 0
1934 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1935 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1936 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1939 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1940 && INTVAL (operands[2]) > 0
1941 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1942 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1943 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1944 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1945 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1947 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1949 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1950 (match_dup 0) (match_dup 4)))]
1952 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1953 << INTVAL (operands[3]));
1955 [(set_attr "conds" "clob")
1956 (set_attr "length" "8")]
1959 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
1960 [(set (match_operand:SI 0 "s_register_operand" "=r")
1961 (if_then_else:SI (ne (zero_extract:SI
1962 (match_operand:SI 1 "s_register_operand" "r")
1963 (match_operand:SI 2 "const_int_operand" "n")
1966 (match_operand:SI 3 "arm_not_operand" "rIK")
1968 (clobber (reg:CC CC_REGNUM))]
1969 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1971 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1972 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1973 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1975 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1977 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1978 (match_dup 0) (match_dup 3)))]
1980 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1982 [(set_attr "conds" "clob")
1983 (set_attr "length" "8")]
1987 [(set (match_operand:SI 0 "s_register_operand" "")
1988 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
1989 (match_operand:SI 2 "const_int_operand" "")
1990 (match_operand:SI 3 "const_int_operand" "")))
1991 (clobber (match_operand:SI 4 "s_register_operand" ""))]
1993 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
1994 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
1996 HOST_WIDE_INT temp = INTVAL (operands[2]);
1998 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
1999 operands[3] = GEN_INT (32 - temp);
2003 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2005 [(set (match_operand:SI 0 "s_register_operand" "")
2006 (match_operator:SI 1 "shiftable_operator"
2007 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2008 (match_operand:SI 3 "const_int_operand" "")
2009 (match_operand:SI 4 "const_int_operand" ""))
2010 (match_operand:SI 5 "s_register_operand" "")]))
2011 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2013 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2016 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2019 HOST_WIDE_INT temp = INTVAL (operands[3]);
2021 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2022 operands[4] = GEN_INT (32 - temp);
2027 [(set (match_operand:SI 0 "s_register_operand" "")
2028 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2029 (match_operand:SI 2 "const_int_operand" "")
2030 (match_operand:SI 3 "const_int_operand" "")))]
2032 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2033 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2035 HOST_WIDE_INT temp = INTVAL (operands[2]);
2037 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2038 operands[3] = GEN_INT (32 - temp);
2043 [(set (match_operand:SI 0 "s_register_operand" "")
2044 (match_operator:SI 1 "shiftable_operator"
2045 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2046 (match_operand:SI 3 "const_int_operand" "")
2047 (match_operand:SI 4 "const_int_operand" ""))
2048 (match_operand:SI 5 "s_register_operand" "")]))
2049 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2051 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2054 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2057 HOST_WIDE_INT temp = INTVAL (operands[3]);
2059 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2060 operands[4] = GEN_INT (32 - temp);
2064 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2065 ;;; represented by the bitfield, then this will produce incorrect results.
2066 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2067 ;;; which have a real bit-field insert instruction, the truncation happens
2068 ;;; in the bit-field insert instruction itself. Since arm does not have a
2069 ;;; bit-field insert instruction, we would have to emit code here to truncate
2070 ;;; the value before we insert. This loses some of the advantage of having
2071 ;;; this insv pattern, so this pattern needs to be reevalutated.
2073 ; ??? Use Thumb-2 bitfield insert/extract instructions
2074 (define_expand "insv"
2075 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2076 (match_operand:SI 1 "general_operand" "")
2077 (match_operand:SI 2 "general_operand" ""))
2078 (match_operand:SI 3 "reg_or_int_operand" ""))]
2082 int start_bit = INTVAL (operands[2]);
2083 int width = INTVAL (operands[1]);
2084 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2085 rtx target, subtarget;
2087 target = operands[0];
2088 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2089 subreg as the final target. */
2090 if (GET_CODE (target) == SUBREG)
2092 subtarget = gen_reg_rtx (SImode);
2093 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2094 < GET_MODE_SIZE (SImode))
2095 target = SUBREG_REG (target);
2100 if (GET_CODE (operands[3]) == CONST_INT)
2102 /* Since we are inserting a known constant, we may be able to
2103 reduce the number of bits that we have to clear so that
2104 the mask becomes simple. */
2105 /* ??? This code does not check to see if the new mask is actually
2106 simpler. It may not be. */
2107 rtx op1 = gen_reg_rtx (SImode);
2108 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2109 start of this pattern. */
2110 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2111 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2113 emit_insn (gen_andsi3 (op1, operands[0],
2114 gen_int_mode (~mask2, SImode)));
2115 emit_insn (gen_iorsi3 (subtarget, op1,
2116 gen_int_mode (op3_value << start_bit, SImode)));
2118 else if (start_bit == 0
2119 && !(const_ok_for_arm (mask)
2120 || const_ok_for_arm (~mask)))
2122 /* A Trick, since we are setting the bottom bits in the word,
2123 we can shift operand[3] up, operand[0] down, OR them together
2124 and rotate the result back again. This takes 3 insns, and
2125 the third might be mergeable into another op. */
2126 /* The shift up copes with the possibility that operand[3] is
2127 wider than the bitfield. */
2128 rtx op0 = gen_reg_rtx (SImode);
2129 rtx op1 = gen_reg_rtx (SImode);
2131 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2132 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2133 emit_insn (gen_iorsi3 (op1, op1, op0));
2134 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2136 else if ((width + start_bit == 32)
2137 && !(const_ok_for_arm (mask)
2138 || const_ok_for_arm (~mask)))
2140 /* Similar trick, but slightly less efficient. */
2142 rtx op0 = gen_reg_rtx (SImode);
2143 rtx op1 = gen_reg_rtx (SImode);
2145 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2146 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2147 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2148 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2152 rtx op0 = gen_int_mode (mask, SImode);
2153 rtx op1 = gen_reg_rtx (SImode);
2154 rtx op2 = gen_reg_rtx (SImode);
2156 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2158 rtx tmp = gen_reg_rtx (SImode);
2160 emit_insn (gen_movsi (tmp, op0));
2164 /* Mask out any bits in operand[3] that are not needed. */
2165 emit_insn (gen_andsi3 (op1, operands[3], op0));
2167 if (GET_CODE (op0) == CONST_INT
2168 && (const_ok_for_arm (mask << start_bit)
2169 || const_ok_for_arm (~(mask << start_bit))))
2171 op0 = gen_int_mode (~(mask << start_bit), SImode);
2172 emit_insn (gen_andsi3 (op2, operands[0], op0));
2176 if (GET_CODE (op0) == CONST_INT)
2178 rtx tmp = gen_reg_rtx (SImode);
2180 emit_insn (gen_movsi (tmp, op0));
2185 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2187 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2191 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2193 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2196 if (subtarget != target)
2198 /* If TARGET is still a SUBREG, then it must be wider than a word,
2199 so we must be careful only to set the subword we were asked to. */
2200 if (GET_CODE (target) == SUBREG)
2201 emit_move_insn (target, subtarget);
2203 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2210 ; constants for op 2 will never be given to these patterns.
2211 (define_insn_and_split "*anddi_notdi_di"
2212 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2213 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2214 (match_operand:DI 2 "s_register_operand" "0,r")))]
2217 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2218 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2219 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2222 operands[3] = gen_highpart (SImode, operands[0]);
2223 operands[0] = gen_lowpart (SImode, operands[0]);
2224 operands[4] = gen_highpart (SImode, operands[1]);
2225 operands[1] = gen_lowpart (SImode, operands[1]);
2226 operands[5] = gen_highpart (SImode, operands[2]);
2227 operands[2] = gen_lowpart (SImode, operands[2]);
2229 [(set_attr "length" "8")
2230 (set_attr "predicable" "yes")]
2233 (define_insn_and_split "*anddi_notzesidi_di"
2234 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2235 (and:DI (not:DI (zero_extend:DI
2236 (match_operand:SI 2 "s_register_operand" "r,r")))
2237 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2240 bic%?\\t%Q0, %Q1, %2
2242 ; (not (zero_extend ...)) allows us to just copy the high word from
2243 ; operand1 to operand0.
2246 && operands[0] != operands[1]"
2247 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2248 (set (match_dup 3) (match_dup 4))]
2251 operands[3] = gen_highpart (SImode, operands[0]);
2252 operands[0] = gen_lowpart (SImode, operands[0]);
2253 operands[4] = gen_highpart (SImode, operands[1]);
2254 operands[1] = gen_lowpart (SImode, operands[1]);
2256 [(set_attr "length" "4,8")
2257 (set_attr "predicable" "yes")]
2260 (define_insn_and_split "*anddi_notsesidi_di"
2261 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2262 (and:DI (not:DI (sign_extend:DI
2263 (match_operand:SI 2 "s_register_operand" "r,r")))
2264 (match_operand:DI 1 "s_register_operand" "0,r")))]
2267 "TARGET_32BIT && reload_completed"
2268 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2269 (set (match_dup 3) (and:SI (not:SI
2270 (ashiftrt:SI (match_dup 2) (const_int 31)))
2274 operands[3] = gen_highpart (SImode, operands[0]);
2275 operands[0] = gen_lowpart (SImode, operands[0]);
2276 operands[4] = gen_highpart (SImode, operands[1]);
2277 operands[1] = gen_lowpart (SImode, operands[1]);
2279 [(set_attr "length" "8")
2280 (set_attr "predicable" "yes")]
2283 (define_insn "andsi_notsi_si"
2284 [(set (match_operand:SI 0 "s_register_operand" "=r")
2285 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2286 (match_operand:SI 1 "s_register_operand" "r")))]
2288 "bic%?\\t%0, %1, %2"
2289 [(set_attr "predicable" "yes")]
2292 (define_insn "bicsi3"
2293 [(set (match_operand:SI 0 "register_operand" "=l")
2294 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2295 (match_operand:SI 2 "register_operand" "0")))]
2298 [(set_attr "length" "2")]
2301 (define_insn "andsi_not_shiftsi_si"
2302 [(set (match_operand:SI 0 "s_register_operand" "=r")
2303 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2304 [(match_operand:SI 2 "s_register_operand" "r")
2305 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2306 (match_operand:SI 1 "s_register_operand" "r")))]
2308 "bic%?\\t%0, %1, %2%S4"
2309 [(set_attr "predicable" "yes")
2310 (set_attr "shift" "2")
2311 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2312 (const_string "alu_shift")
2313 (const_string "alu_shift_reg")))]
2316 (define_insn "*andsi_notsi_si_compare0"
2317 [(set (reg:CC_NOOV CC_REGNUM)
2319 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2320 (match_operand:SI 1 "s_register_operand" "r"))
2322 (set (match_operand:SI 0 "s_register_operand" "=r")
2323 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2325 "bic%.\\t%0, %1, %2"
2326 [(set_attr "conds" "set")]
2329 (define_insn "*andsi_notsi_si_compare0_scratch"
2330 [(set (reg:CC_NOOV CC_REGNUM)
2332 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2333 (match_operand:SI 1 "s_register_operand" "r"))
2335 (clobber (match_scratch:SI 0 "=r"))]
2337 "bic%.\\t%0, %1, %2"
2338 [(set_attr "conds" "set")]
2341 (define_insn "iordi3"
2342 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2343 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2344 (match_operand:DI 2 "s_register_operand" "r,r")))]
2345 "TARGET_32BIT && ! TARGET_IWMMXT"
2347 [(set_attr "length" "8")
2348 (set_attr "predicable" "yes")]
2351 (define_insn "*iordi_zesidi_di"
2352 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2353 (ior:DI (zero_extend:DI
2354 (match_operand:SI 2 "s_register_operand" "r,r"))
2355 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2358 orr%?\\t%Q0, %Q1, %2
2360 [(set_attr "length" "4,8")
2361 (set_attr "predicable" "yes")]
2364 (define_insn "*iordi_sesidi_di"
2365 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2366 (ior:DI (sign_extend:DI
2367 (match_operand:SI 2 "s_register_operand" "r,r"))
2368 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2371 [(set_attr "length" "8")
2372 (set_attr "predicable" "yes")]
2375 (define_expand "iorsi3"
2376 [(set (match_operand:SI 0 "s_register_operand" "")
2377 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2378 (match_operand:SI 2 "reg_or_int_operand" "")))]
2381 if (GET_CODE (operands[2]) == CONST_INT)
2385 arm_split_constant (IOR, SImode, NULL_RTX,
2386 INTVAL (operands[2]), operands[0], operands[1],
2387 optimize && !no_new_pseudos);
2390 else /* TARGET_THUMB1 */
2391 operands [2] = force_reg (SImode, operands [2]);
2396 (define_insn_and_split "*arm_iorsi3"
2397 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2398 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2399 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2405 && GET_CODE (operands[2]) == CONST_INT
2406 && !const_ok_for_arm (INTVAL (operands[2]))"
2407 [(clobber (const_int 0))]
2409 arm_split_constant (IOR, SImode, curr_insn,
2410 INTVAL (operands[2]), operands[0], operands[1], 0);
2413 [(set_attr "length" "4,16")
2414 (set_attr "predicable" "yes")]
2417 (define_insn "*thumb1_iorsi3"
2418 [(set (match_operand:SI 0 "register_operand" "=l")
2419 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2420 (match_operand:SI 2 "register_operand" "l")))]
2423 [(set_attr "length" "2")]
2427 [(match_scratch:SI 3 "r")
2428 (set (match_operand:SI 0 "arm_general_register_operand" "")
2429 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2430 (match_operand:SI 2 "const_int_operand" "")))]
2432 && !const_ok_for_arm (INTVAL (operands[2]))
2433 && const_ok_for_arm (~INTVAL (operands[2]))"
2434 [(set (match_dup 3) (match_dup 2))
2435 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2439 (define_insn "*iorsi3_compare0"
2440 [(set (reg:CC_NOOV CC_REGNUM)
2441 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2442 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2444 (set (match_operand:SI 0 "s_register_operand" "=r")
2445 (ior:SI (match_dup 1) (match_dup 2)))]
2447 "orr%.\\t%0, %1, %2"
2448 [(set_attr "conds" "set")]
2451 (define_insn "*iorsi3_compare0_scratch"
2452 [(set (reg:CC_NOOV CC_REGNUM)
2453 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2454 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2456 (clobber (match_scratch:SI 0 "=r"))]
2458 "orr%.\\t%0, %1, %2"
2459 [(set_attr "conds" "set")]
2462 (define_insn "xordi3"
2463 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2464 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2465 (match_operand:DI 2 "s_register_operand" "r,r")))]
2466 "TARGET_32BIT && !TARGET_IWMMXT"
2468 [(set_attr "length" "8")
2469 (set_attr "predicable" "yes")]
2472 (define_insn "*xordi_zesidi_di"
2473 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2474 (xor:DI (zero_extend:DI
2475 (match_operand:SI 2 "s_register_operand" "r,r"))
2476 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2479 eor%?\\t%Q0, %Q1, %2
2481 [(set_attr "length" "4,8")
2482 (set_attr "predicable" "yes")]
2485 (define_insn "*xordi_sesidi_di"
2486 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2487 (xor:DI (sign_extend:DI
2488 (match_operand:SI 2 "s_register_operand" "r,r"))
2489 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2492 [(set_attr "length" "8")
2493 (set_attr "predicable" "yes")]
2496 (define_expand "xorsi3"
2497 [(set (match_operand:SI 0 "s_register_operand" "")
2498 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2499 (match_operand:SI 2 "arm_rhs_operand" "")))]
2502 if (GET_CODE (operands[2]) == CONST_INT)
2503 operands[2] = force_reg (SImode, operands[2]);
2507 (define_insn "*arm_xorsi3"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2510 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2512 "eor%?\\t%0, %1, %2"
2513 [(set_attr "predicable" "yes")]
2516 (define_insn "*thumb1_xorsi3"
2517 [(set (match_operand:SI 0 "register_operand" "=l")
2518 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2519 (match_operand:SI 2 "register_operand" "l")))]
2522 [(set_attr "length" "2")]
2525 (define_insn "*xorsi3_compare0"
2526 [(set (reg:CC_NOOV CC_REGNUM)
2527 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2528 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2530 (set (match_operand:SI 0 "s_register_operand" "=r")
2531 (xor:SI (match_dup 1) (match_dup 2)))]
2533 "eor%.\\t%0, %1, %2"
2534 [(set_attr "conds" "set")]
2537 (define_insn "*xorsi3_compare0_scratch"
2538 [(set (reg:CC_NOOV CC_REGNUM)
2539 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2540 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2544 [(set_attr "conds" "set")]
2547 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2548 ; (NOT D) we can sometimes merge the final NOT into one of the following
2552 [(set (match_operand:SI 0 "s_register_operand" "")
2553 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2554 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2555 (match_operand:SI 3 "arm_rhs_operand" "")))
2556 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2558 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2559 (not:SI (match_dup 3))))
2560 (set (match_dup 0) (not:SI (match_dup 4)))]
2564 (define_insn "*andsi_iorsi3_notsi"
2565 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2566 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2567 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2568 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2570 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2571 [(set_attr "length" "8")
2572 (set_attr "ce_count" "2")
2573 (set_attr "predicable" "yes")]
2576 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2577 ; insns are available?
2579 [(set (match_operand:SI 0 "s_register_operand" "")
2580 (match_operator:SI 1 "logical_binary_operator"
2581 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2582 (match_operand:SI 3 "const_int_operand" "")
2583 (match_operand:SI 4 "const_int_operand" ""))
2584 (match_operator:SI 9 "logical_binary_operator"
2585 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2586 (match_operand:SI 6 "const_int_operand" ""))
2587 (match_operand:SI 7 "s_register_operand" "")])]))
2588 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2590 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2591 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2594 [(ashift:SI (match_dup 2) (match_dup 4))
2598 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2601 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2605 [(set (match_operand:SI 0 "s_register_operand" "")
2606 (match_operator:SI 1 "logical_binary_operator"
2607 [(match_operator:SI 9 "logical_binary_operator"
2608 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2609 (match_operand:SI 6 "const_int_operand" ""))
2610 (match_operand:SI 7 "s_register_operand" "")])
2611 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2612 (match_operand:SI 3 "const_int_operand" "")
2613 (match_operand:SI 4 "const_int_operand" ""))]))
2614 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2616 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2617 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2620 [(ashift:SI (match_dup 2) (match_dup 4))
2624 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2627 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2631 [(set (match_operand:SI 0 "s_register_operand" "")
2632 (match_operator:SI 1 "logical_binary_operator"
2633 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2634 (match_operand:SI 3 "const_int_operand" "")
2635 (match_operand:SI 4 "const_int_operand" ""))
2636 (match_operator:SI 9 "logical_binary_operator"
2637 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2638 (match_operand:SI 6 "const_int_operand" ""))
2639 (match_operand:SI 7 "s_register_operand" "")])]))
2640 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2642 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2643 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2646 [(ashift:SI (match_dup 2) (match_dup 4))
2650 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2653 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2657 [(set (match_operand:SI 0 "s_register_operand" "")
2658 (match_operator:SI 1 "logical_binary_operator"
2659 [(match_operator:SI 9 "logical_binary_operator"
2660 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2661 (match_operand:SI 6 "const_int_operand" ""))
2662 (match_operand:SI 7 "s_register_operand" "")])
2663 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2664 (match_operand:SI 3 "const_int_operand" "")
2665 (match_operand:SI 4 "const_int_operand" ""))]))
2666 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2668 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2669 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2672 [(ashift:SI (match_dup 2) (match_dup 4))
2676 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2679 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2683 ;; Minimum and maximum insns
2685 (define_expand "smaxsi3"
2687 (set (match_operand:SI 0 "s_register_operand" "")
2688 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2689 (match_operand:SI 2 "arm_rhs_operand" "")))
2690 (clobber (reg:CC CC_REGNUM))])]
2693 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2695 /* No need for a clobber of the condition code register here. */
2696 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2697 gen_rtx_SMAX (SImode, operands[1],
2703 (define_insn "*smax_0"
2704 [(set (match_operand:SI 0 "s_register_operand" "=r")
2705 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2708 "bic%?\\t%0, %1, %1, asr #31"
2709 [(set_attr "predicable" "yes")]
2712 (define_insn "*smax_m1"
2713 [(set (match_operand:SI 0 "s_register_operand" "=r")
2714 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2717 "orr%?\\t%0, %1, %1, asr #31"
2718 [(set_attr "predicable" "yes")]
2721 (define_insn "*arm_smax_insn"
2722 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2723 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2724 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2725 (clobber (reg:CC CC_REGNUM))]
2728 cmp\\t%1, %2\;movlt\\t%0, %2
2729 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2730 [(set_attr "conds" "clob")
2731 (set_attr "length" "8,12")]
2734 (define_expand "sminsi3"
2736 (set (match_operand:SI 0 "s_register_operand" "")
2737 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2738 (match_operand:SI 2 "arm_rhs_operand" "")))
2739 (clobber (reg:CC CC_REGNUM))])]
2742 if (operands[2] == const0_rtx)
2744 /* No need for a clobber of the condition code register here. */
2745 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2746 gen_rtx_SMIN (SImode, operands[1],
2752 (define_insn "*smin_0"
2753 [(set (match_operand:SI 0 "s_register_operand" "=r")
2754 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2757 "and%?\\t%0, %1, %1, asr #31"
2758 [(set_attr "predicable" "yes")]
2761 (define_insn "*arm_smin_insn"
2762 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2763 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2764 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2765 (clobber (reg:CC CC_REGNUM))]
2768 cmp\\t%1, %2\;movge\\t%0, %2
2769 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2770 [(set_attr "conds" "clob")
2771 (set_attr "length" "8,12")]
2774 (define_expand "umaxsi3"
2776 (set (match_operand:SI 0 "s_register_operand" "")
2777 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2778 (match_operand:SI 2 "arm_rhs_operand" "")))
2779 (clobber (reg:CC CC_REGNUM))])]
2784 (define_insn "*arm_umaxsi3"
2785 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2786 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2787 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2788 (clobber (reg:CC CC_REGNUM))]
2791 cmp\\t%1, %2\;movcc\\t%0, %2
2792 cmp\\t%1, %2\;movcs\\t%0, %1
2793 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2794 [(set_attr "conds" "clob")
2795 (set_attr "length" "8,8,12")]
2798 (define_expand "uminsi3"
2800 (set (match_operand:SI 0 "s_register_operand" "")
2801 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2802 (match_operand:SI 2 "arm_rhs_operand" "")))
2803 (clobber (reg:CC CC_REGNUM))])]
2808 (define_insn "*arm_uminsi3"
2809 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2810 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2811 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2812 (clobber (reg:CC CC_REGNUM))]
2815 cmp\\t%1, %2\;movcs\\t%0, %2
2816 cmp\\t%1, %2\;movcc\\t%0, %1
2817 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2818 [(set_attr "conds" "clob")
2819 (set_attr "length" "8,8,12")]
2822 (define_insn "*store_minmaxsi"
2823 [(set (match_operand:SI 0 "memory_operand" "=m")
2824 (match_operator:SI 3 "minmax_operator"
2825 [(match_operand:SI 1 "s_register_operand" "r")
2826 (match_operand:SI 2 "s_register_operand" "r")]))
2827 (clobber (reg:CC CC_REGNUM))]
2830 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2831 operands[1], operands[2]);
2832 output_asm_insn (\"cmp\\t%1, %2\", operands);
2834 output_asm_insn (\"ite\t%d3\", operands);
2835 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2836 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2839 [(set_attr "conds" "clob")
2840 (set (attr "length")
2841 (if_then_else (eq_attr "is_thumb" "yes")
2844 (set_attr "type" "store1")]
2847 ; Reject the frame pointer in operand[1], since reloading this after
2848 ; it has been eliminated can cause carnage.
2849 (define_insn "*minmax_arithsi"
2850 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2851 (match_operator:SI 4 "shiftable_operator"
2852 [(match_operator:SI 5 "minmax_operator"
2853 [(match_operand:SI 2 "s_register_operand" "r,r")
2854 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2855 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2856 (clobber (reg:CC CC_REGNUM))]
2857 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2860 enum rtx_code code = GET_CODE (operands[4]);
2863 if (which_alternative != 0 || operands[3] != const0_rtx
2864 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2869 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2870 operands[2], operands[3]);
2871 output_asm_insn (\"cmp\\t%2, %3\", operands);
2875 output_asm_insn (\"ite\\t%d5\", operands);
2877 output_asm_insn (\"it\\t%d5\", operands);
2879 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
2881 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
2884 [(set_attr "conds" "clob")
2885 (set (attr "length")
2886 (if_then_else (eq_attr "is_thumb" "yes")
2892 ;; Shift and rotation insns
2894 (define_expand "ashldi3"
2895 [(set (match_operand:DI 0 "s_register_operand" "")
2896 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
2897 (match_operand:SI 2 "reg_or_int_operand" "")))]
2900 if (GET_CODE (operands[2]) == CONST_INT)
2902 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2904 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
2907 /* Ideally we shouldn't fail here if we could know that operands[1]
2908 ends up already living in an iwmmxt register. Otherwise it's
2909 cheaper to have the alternate code being generated than moving
2910 values to iwmmxt regs and back. */
2913 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
2918 (define_insn "arm_ashldi3_1bit"
2919 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2920 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2922 (clobber (reg:CC CC_REGNUM))]
2924 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
2925 [(set_attr "conds" "clob")
2926 (set_attr "length" "8")]
2929 (define_expand "ashlsi3"
2930 [(set (match_operand:SI 0 "s_register_operand" "")
2931 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
2932 (match_operand:SI 2 "arm_rhs_operand" "")))]
2935 if (GET_CODE (operands[2]) == CONST_INT
2936 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2938 emit_insn (gen_movsi (operands[0], const0_rtx));
2944 (define_insn "*thumb1_ashlsi3"
2945 [(set (match_operand:SI 0 "register_operand" "=l,l")
2946 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
2947 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2950 [(set_attr "length" "2")]
2953 (define_expand "ashrdi3"
2954 [(set (match_operand:DI 0 "s_register_operand" "")
2955 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
2956 (match_operand:SI 2 "reg_or_int_operand" "")))]
2959 if (GET_CODE (operands[2]) == CONST_INT)
2961 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2963 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
2966 /* Ideally we shouldn't fail here if we could know that operands[1]
2967 ends up already living in an iwmmxt register. Otherwise it's
2968 cheaper to have the alternate code being generated than moving
2969 values to iwmmxt regs and back. */
2972 else if (!TARGET_REALLY_IWMMXT)
2977 (define_insn "arm_ashrdi3_1bit"
2978 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2979 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2981 (clobber (reg:CC CC_REGNUM))]
2983 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
2984 [(set_attr "conds" "clob")
2985 (set_attr "length" "8")]
2988 (define_expand "ashrsi3"
2989 [(set (match_operand:SI 0 "s_register_operand" "")
2990 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
2991 (match_operand:SI 2 "arm_rhs_operand" "")))]
2994 if (GET_CODE (operands[2]) == CONST_INT
2995 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2996 operands[2] = GEN_INT (31);
3000 (define_insn "*thumb1_ashrsi3"
3001 [(set (match_operand:SI 0 "register_operand" "=l,l")
3002 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3003 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3006 [(set_attr "length" "2")]
3009 (define_expand "lshrdi3"
3010 [(set (match_operand:DI 0 "s_register_operand" "")
3011 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3012 (match_operand:SI 2 "reg_or_int_operand" "")))]
3015 if (GET_CODE (operands[2]) == CONST_INT)
3017 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3019 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3022 /* Ideally we shouldn't fail here if we could know that operands[1]
3023 ends up already living in an iwmmxt register. Otherwise it's
3024 cheaper to have the alternate code being generated than moving
3025 values to iwmmxt regs and back. */
3028 else if (!TARGET_REALLY_IWMMXT)
3033 (define_insn "arm_lshrdi3_1bit"
3034 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3035 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3037 (clobber (reg:CC CC_REGNUM))]
3039 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3040 [(set_attr "conds" "clob")
3041 (set_attr "length" "8")]
3044 (define_expand "lshrsi3"
3045 [(set (match_operand:SI 0 "s_register_operand" "")
3046 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3047 (match_operand:SI 2 "arm_rhs_operand" "")))]
3050 if (GET_CODE (operands[2]) == CONST_INT
3051 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3053 emit_insn (gen_movsi (operands[0], const0_rtx));
3059 (define_insn "*thumb1_lshrsi3"
3060 [(set (match_operand:SI 0 "register_operand" "=l,l")
3061 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3062 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3065 [(set_attr "length" "2")]
3068 (define_expand "rotlsi3"
3069 [(set (match_operand:SI 0 "s_register_operand" "")
3070 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3071 (match_operand:SI 2 "reg_or_int_operand" "")))]
3074 if (GET_CODE (operands[2]) == CONST_INT)
3075 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3078 rtx reg = gen_reg_rtx (SImode);
3079 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3085 (define_expand "rotrsi3"
3086 [(set (match_operand:SI 0 "s_register_operand" "")
3087 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3088 (match_operand:SI 2 "arm_rhs_operand" "")))]
3093 if (GET_CODE (operands[2]) == CONST_INT
3094 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3095 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3097 else /* TARGET_THUMB1 */
3099 if (GET_CODE (operands [2]) == CONST_INT)
3100 operands [2] = force_reg (SImode, operands[2]);
3105 (define_insn "*thumb1_rotrsi3"
3106 [(set (match_operand:SI 0 "register_operand" "=l")
3107 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3108 (match_operand:SI 2 "register_operand" "l")))]
3111 [(set_attr "length" "2")]
3114 (define_insn "*arm_shiftsi3"
3115 [(set (match_operand:SI 0 "s_register_operand" "=r")
3116 (match_operator:SI 3 "shift_operator"
3117 [(match_operand:SI 1 "s_register_operand" "r")
3118 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3120 "* return arm_output_shift(operands, 0);"
3121 [(set_attr "predicable" "yes")
3122 (set_attr "shift" "1")
3123 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3124 (const_string "alu_shift")
3125 (const_string "alu_shift_reg")))]
3128 (define_insn "*shiftsi3_compare0"
3129 [(set (reg:CC_NOOV CC_REGNUM)
3130 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3131 [(match_operand:SI 1 "s_register_operand" "r")
3132 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3134 (set (match_operand:SI 0 "s_register_operand" "=r")
3135 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3137 "* return arm_output_shift(operands, 1);"
3138 [(set_attr "conds" "set")
3139 (set_attr "shift" "1")
3140 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3141 (const_string "alu_shift")
3142 (const_string "alu_shift_reg")))]
3145 (define_insn "*shiftsi3_compare0_scratch"
3146 [(set (reg:CC_NOOV CC_REGNUM)
3147 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3148 [(match_operand:SI 1 "s_register_operand" "r")
3149 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3151 (clobber (match_scratch:SI 0 "=r"))]
3153 "* return arm_output_shift(operands, 1);"
3154 [(set_attr "conds" "set")
3155 (set_attr "shift" "1")]
3158 (define_insn "*arm_notsi_shiftsi"
3159 [(set (match_operand:SI 0 "s_register_operand" "=r")
3160 (not:SI (match_operator:SI 3 "shift_operator"
3161 [(match_operand:SI 1 "s_register_operand" "r")
3162 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3165 [(set_attr "predicable" "yes")
3166 (set_attr "shift" "1")
3167 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3168 (const_string "alu_shift")
3169 (const_string "alu_shift_reg")))]
3172 (define_insn "*arm_notsi_shiftsi_compare0"
3173 [(set (reg:CC_NOOV CC_REGNUM)
3174 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3175 [(match_operand:SI 1 "s_register_operand" "r")
3176 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3178 (set (match_operand:SI 0 "s_register_operand" "=r")
3179 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3182 [(set_attr "conds" "set")
3183 (set_attr "shift" "1")
3184 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3185 (const_string "alu_shift")
3186 (const_string "alu_shift_reg")))]
3189 (define_insn "*arm_not_shiftsi_compare0_scratch"
3190 [(set (reg:CC_NOOV CC_REGNUM)
3191 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3192 [(match_operand:SI 1 "s_register_operand" "r")
3193 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3195 (clobber (match_scratch:SI 0 "=r"))]
3198 [(set_attr "conds" "set")
3199 (set_attr "shift" "1")
3200 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3201 (const_string "alu_shift")
3202 (const_string "alu_shift_reg")))]
3205 ;; We don't really have extzv, but defining this using shifts helps
3206 ;; to reduce register pressure later on.
3208 (define_expand "extzv"
3210 (ashift:SI (match_operand:SI 1 "register_operand" "")
3211 (match_operand:SI 2 "const_int_operand" "")))
3212 (set (match_operand:SI 0 "register_operand" "")
3213 (lshiftrt:SI (match_dup 4)
3214 (match_operand:SI 3 "const_int_operand" "")))]
3218 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3219 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3221 operands[3] = GEN_INT (rshift);
3225 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3229 operands[2] = GEN_INT (lshift);
3230 operands[4] = gen_reg_rtx (SImode);
3235 ;; Unary arithmetic insns
3237 (define_expand "negdi2"
3239 [(set (match_operand:DI 0 "s_register_operand" "")
3240 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3241 (clobber (reg:CC CC_REGNUM))])]
3246 if (GET_CODE (operands[1]) != REG)
3247 operands[1] = force_reg (SImode, operands[1]);
3252 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3253 ;; The second alternative is to allow the common case of a *full* overlap.
3254 (define_insn "*arm_negdi2"
3255 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3256 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3257 (clobber (reg:CC CC_REGNUM))]
3259 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3260 [(set_attr "conds" "clob")
3261 (set_attr "length" "8")]
3264 (define_insn "*thumb1_negdi2"
3265 [(set (match_operand:DI 0 "register_operand" "=&l")
3266 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3267 (clobber (reg:CC CC_REGNUM))]
3269 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3270 [(set_attr "length" "6")]
3273 (define_expand "negsi2"
3274 [(set (match_operand:SI 0 "s_register_operand" "")
3275 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3280 (define_insn "*arm_negsi2"
3281 [(set (match_operand:SI 0 "s_register_operand" "=r")
3282 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3284 "rsb%?\\t%0, %1, #0"
3285 [(set_attr "predicable" "yes")]
3288 (define_insn "*thumb1_negsi2"
3289 [(set (match_operand:SI 0 "register_operand" "=l")
3290 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3293 [(set_attr "length" "2")]
3296 (define_expand "negsf2"
3297 [(set (match_operand:SF 0 "s_register_operand" "")
3298 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3299 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3303 (define_expand "negdf2"
3304 [(set (match_operand:DF 0 "s_register_operand" "")
3305 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3306 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3309 ;; abssi2 doesn't really clobber the condition codes if a different register
3310 ;; is being set. To keep things simple, assume during rtl manipulations that
3311 ;; it does, but tell the final scan operator the truth. Similarly for
3314 (define_expand "abssi2"
3316 [(set (match_operand:SI 0 "s_register_operand" "")
3317 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3318 (clobber (match_dup 2))])]
3322 operands[2] = gen_rtx_SCRATCH (SImode);
3324 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3327 (define_insn "*arm_abssi2"
3328 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3329 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3330 (clobber (reg:CC CC_REGNUM))]
3333 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3334 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3335 [(set_attr "conds" "clob,*")
3336 (set_attr "shift" "1")
3337 ;; predicable can't be set based on the variant, so left as no
3338 (set_attr "length" "8")]
3341 (define_insn_and_split "*thumb1_abssi2"
3342 [(set (match_operand:SI 0 "s_register_operand" "=l")
3343 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3344 (clobber (match_scratch:SI 2 "=&l"))]
3347 "TARGET_THUMB1 && reload_completed"
3348 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3349 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3350 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3352 [(set_attr "length" "6")]
3355 (define_insn "*arm_neg_abssi2"
3356 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3357 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3358 (clobber (reg:CC CC_REGNUM))]
3361 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3362 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3363 [(set_attr "conds" "clob,*")
3364 (set_attr "shift" "1")
3365 ;; predicable can't be set based on the variant, so left as no
3366 (set_attr "length" "8")]
3369 (define_insn_and_split "*thumb1_neg_abssi2"
3370 [(set (match_operand:SI 0 "s_register_operand" "=l")
3371 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3372 (clobber (match_scratch:SI 2 "=&l"))]
3375 "TARGET_THUMB1 && reload_completed"
3376 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3377 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3378 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3380 [(set_attr "length" "6")]
3383 (define_expand "abssf2"
3384 [(set (match_operand:SF 0 "s_register_operand" "")
3385 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3386 "TARGET_32BIT && TARGET_HARD_FLOAT"
3389 (define_expand "absdf2"
3390 [(set (match_operand:DF 0 "s_register_operand" "")
3391 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3392 "TARGET_32BIT && TARGET_HARD_FLOAT"
3395 (define_expand "sqrtsf2"
3396 [(set (match_operand:SF 0 "s_register_operand" "")
3397 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3398 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3401 (define_expand "sqrtdf2"
3402 [(set (match_operand:DF 0 "s_register_operand" "")
3403 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3404 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3407 (define_insn_and_split "one_cmpldi2"
3408 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3409 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3412 "TARGET_32BIT && reload_completed"
3413 [(set (match_dup 0) (not:SI (match_dup 1)))
3414 (set (match_dup 2) (not:SI (match_dup 3)))]
3417 operands[2] = gen_highpart (SImode, operands[0]);
3418 operands[0] = gen_lowpart (SImode, operands[0]);
3419 operands[3] = gen_highpart (SImode, operands[1]);
3420 operands[1] = gen_lowpart (SImode, operands[1]);
3422 [(set_attr "length" "8")
3423 (set_attr "predicable" "yes")]
3426 (define_expand "one_cmplsi2"
3427 [(set (match_operand:SI 0 "s_register_operand" "")
3428 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3433 (define_insn "*arm_one_cmplsi2"
3434 [(set (match_operand:SI 0 "s_register_operand" "=r")
3435 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3438 [(set_attr "predicable" "yes")]
3441 (define_insn "*thumb1_one_cmplsi2"
3442 [(set (match_operand:SI 0 "register_operand" "=l")
3443 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3446 [(set_attr "length" "2")]
3449 (define_insn "*notsi_compare0"
3450 [(set (reg:CC_NOOV CC_REGNUM)
3451 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3453 (set (match_operand:SI 0 "s_register_operand" "=r")
3454 (not:SI (match_dup 1)))]
3457 [(set_attr "conds" "set")]
3460 (define_insn "*notsi_compare0_scratch"
3461 [(set (reg:CC_NOOV CC_REGNUM)
3462 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3464 (clobber (match_scratch:SI 0 "=r"))]
3467 [(set_attr "conds" "set")]
3470 ;; Fixed <--> Floating conversion insns
3472 (define_expand "floatsisf2"
3473 [(set (match_operand:SF 0 "s_register_operand" "")
3474 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3475 "TARGET_32BIT && TARGET_HARD_FLOAT"
3477 if (TARGET_MAVERICK)
3479 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3484 (define_expand "floatsidf2"
3485 [(set (match_operand:DF 0 "s_register_operand" "")
3486 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3487 "TARGET_32BIT && TARGET_HARD_FLOAT"
3489 if (TARGET_MAVERICK)
3491 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3496 (define_expand "fix_truncsfsi2"
3497 [(set (match_operand:SI 0 "s_register_operand" "")
3498 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3499 "TARGET_32BIT && TARGET_HARD_FLOAT"
3501 if (TARGET_MAVERICK)
3503 if (!cirrus_fp_register (operands[0], SImode))
3504 operands[0] = force_reg (SImode, operands[0]);
3505 if (!cirrus_fp_register (operands[1], SFmode))
3506 operands[1] = force_reg (SFmode, operands[0]);
3507 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3512 (define_expand "fix_truncdfsi2"
3513 [(set (match_operand:SI 0 "s_register_operand" "")
3514 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3515 "TARGET_32BIT && TARGET_HARD_FLOAT"
3517 if (TARGET_MAVERICK)
3519 if (!cirrus_fp_register (operands[1], DFmode))
3520 operands[1] = force_reg (DFmode, operands[0]);
3521 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3528 (define_expand "truncdfsf2"
3529 [(set (match_operand:SF 0 "s_register_operand" "")
3531 (match_operand:DF 1 "s_register_operand" "")))]
3532 "TARGET_32BIT && TARGET_HARD_FLOAT"
3536 ;; Zero and sign extension instructions.
3538 (define_expand "zero_extendsidi2"
3539 [(set (match_operand:DI 0 "s_register_operand" "")
3540 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3545 (define_insn "*arm_zero_extendsidi2"
3546 [(set (match_operand:DI 0 "s_register_operand" "=r")
3547 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3550 if (REGNO (operands[1])
3551 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3552 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3553 return \"mov%?\\t%R0, #0\";
3555 [(set_attr "length" "8")
3556 (set_attr "predicable" "yes")]
3559 (define_expand "zero_extendqidi2"
3560 [(set (match_operand:DI 0 "s_register_operand" "")
3561 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3566 (define_insn "*arm_zero_extendqidi2"
3567 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3568 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3571 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3572 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3573 [(set_attr "length" "8")
3574 (set_attr "predicable" "yes")
3575 (set_attr "type" "*,load_byte")
3576 (set_attr "pool_range" "*,4092")
3577 (set_attr "neg_pool_range" "*,4084")]
3580 (define_expand "extendsidi2"
3581 [(set (match_operand:DI 0 "s_register_operand" "")
3582 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3587 (define_insn "*arm_extendsidi2"
3588 [(set (match_operand:DI 0 "s_register_operand" "=r")
3589 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3592 if (REGNO (operands[1])
3593 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3594 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3595 return \"mov%?\\t%R0, %Q0, asr #31\";
3597 [(set_attr "length" "8")
3598 (set_attr "shift" "1")
3599 (set_attr "predicable" "yes")]
3602 (define_expand "zero_extendhisi2"
3604 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3606 (set (match_operand:SI 0 "s_register_operand" "")
3607 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3611 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3613 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3614 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3618 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3620 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3624 if (!s_register_operand (operands[1], HImode))
3625 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3629 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3630 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3634 operands[1] = gen_lowpart (SImode, operands[1]);
3635 operands[2] = gen_reg_rtx (SImode);
3639 (define_insn "*thumb1_zero_extendhisi2"
3640 [(set (match_operand:SI 0 "register_operand" "=l")
3641 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3642 "TARGET_THUMB1 && !arm_arch6"
3644 rtx mem = XEXP (operands[1], 0);
3646 if (GET_CODE (mem) == CONST)
3647 mem = XEXP (mem, 0);
3649 if (GET_CODE (mem) == LABEL_REF)
3650 return \"ldr\\t%0, %1\";
3652 if (GET_CODE (mem) == PLUS)
3654 rtx a = XEXP (mem, 0);
3655 rtx b = XEXP (mem, 1);
3657 /* This can happen due to bugs in reload. */
3658 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3661 ops[0] = operands[0];
3664 output_asm_insn (\"mov %0, %1\", ops);
3666 XEXP (mem, 0) = operands[0];
3669 else if ( GET_CODE (a) == LABEL_REF
3670 && GET_CODE (b) == CONST_INT)
3671 return \"ldr\\t%0, %1\";
3674 return \"ldrh\\t%0, %1\";
3676 [(set_attr "length" "4")
3677 (set_attr "type" "load_byte")
3678 (set_attr "pool_range" "60")]
3681 (define_insn "*thumb1_zero_extendhisi2_v6"
3682 [(set (match_operand:SI 0 "register_operand" "=l,l")
3683 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3684 "TARGET_THUMB1 && arm_arch6"
3688 if (which_alternative == 0)
3689 return \"uxth\\t%0, %1\";
3691 mem = XEXP (operands[1], 0);
3693 if (GET_CODE (mem) == CONST)
3694 mem = XEXP (mem, 0);
3696 if (GET_CODE (mem) == LABEL_REF)
3697 return \"ldr\\t%0, %1\";
3699 if (GET_CODE (mem) == PLUS)
3701 rtx a = XEXP (mem, 0);
3702 rtx b = XEXP (mem, 1);
3704 /* This can happen due to bugs in reload. */
3705 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3708 ops[0] = operands[0];
3711 output_asm_insn (\"mov %0, %1\", ops);
3713 XEXP (mem, 0) = operands[0];
3716 else if ( GET_CODE (a) == LABEL_REF
3717 && GET_CODE (b) == CONST_INT)
3718 return \"ldr\\t%0, %1\";
3721 return \"ldrh\\t%0, %1\";
3723 [(set_attr "length" "2,4")
3724 (set_attr "type" "alu_shift,load_byte")
3725 (set_attr "pool_range" "*,60")]
3728 (define_insn "*arm_zero_extendhisi2"
3729 [(set (match_operand:SI 0 "s_register_operand" "=r")
3730 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3731 "TARGET_ARM && arm_arch4 && !arm_arch6"
3733 [(set_attr "type" "load_byte")
3734 (set_attr "predicable" "yes")
3735 (set_attr "pool_range" "256")
3736 (set_attr "neg_pool_range" "244")]
3739 (define_insn "*arm_zero_extendhisi2_v6"
3740 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3741 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3742 "TARGET_ARM && arm_arch6"
3746 [(set_attr "type" "alu_shift,load_byte")
3747 (set_attr "predicable" "yes")
3748 (set_attr "pool_range" "*,256")
3749 (set_attr "neg_pool_range" "*,244")]
3752 (define_insn "*arm_zero_extendhisi2addsi"
3753 [(set (match_operand:SI 0 "s_register_operand" "=r")
3754 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3755 (match_operand:SI 2 "s_register_operand" "r")))]
3757 "uxtah%?\\t%0, %2, %1"
3758 [(set_attr "type" "alu_shift")
3759 (set_attr "predicable" "yes")]
3762 (define_expand "zero_extendqisi2"
3763 [(set (match_operand:SI 0 "s_register_operand" "")
3764 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3767 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3771 emit_insn (gen_andsi3 (operands[0],
3772 gen_lowpart (SImode, operands[1]),
3775 else /* TARGET_THUMB */
3777 rtx temp = gen_reg_rtx (SImode);
3780 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3781 operands[1] = gen_lowpart (SImode, operands[1]);
3784 ops[1] = operands[1];
3785 ops[2] = GEN_INT (24);
3787 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3788 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3790 ops[0] = operands[0];
3792 ops[2] = GEN_INT (24);
3794 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3795 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3802 (define_insn "*thumb1_zero_extendqisi2"
3803 [(set (match_operand:SI 0 "register_operand" "=l")
3804 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3805 "TARGET_THUMB1 && !arm_arch6"
3807 [(set_attr "length" "2")
3808 (set_attr "type" "load_byte")
3809 (set_attr "pool_range" "32")]
3812 (define_insn "*thumb1_zero_extendqisi2_v6"
3813 [(set (match_operand:SI 0 "register_operand" "=l,l")
3814 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3815 "TARGET_THUMB1 && arm_arch6"
3819 [(set_attr "length" "2,2")
3820 (set_attr "type" "alu_shift,load_byte")
3821 (set_attr "pool_range" "*,32")]
3824 (define_insn "*arm_zero_extendqisi2"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r")
3826 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3827 "TARGET_ARM && !arm_arch6"
3828 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3829 [(set_attr "type" "load_byte")
3830 (set_attr "predicable" "yes")
3831 (set_attr "pool_range" "4096")
3832 (set_attr "neg_pool_range" "4084")]
3835 (define_insn "*arm_zero_extendqisi2_v6"
3836 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3837 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3838 "TARGET_ARM && arm_arch6"
3841 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3842 [(set_attr "type" "alu_shift,load_byte")
3843 (set_attr "predicable" "yes")
3844 (set_attr "pool_range" "*,4096")
3845 (set_attr "neg_pool_range" "*,4084")]
3848 (define_insn "*arm_zero_extendqisi2addsi"
3849 [(set (match_operand:SI 0 "s_register_operand" "=r")
3850 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
3851 (match_operand:SI 2 "s_register_operand" "r")))]
3853 "uxtab%?\\t%0, %2, %1"
3854 [(set_attr "predicable" "yes")
3855 (set_attr "type" "alu_shift")]
3859 [(set (match_operand:SI 0 "s_register_operand" "")
3860 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
3861 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3862 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
3863 [(set (match_dup 2) (match_dup 1))
3864 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3869 [(set (match_operand:SI 0 "s_register_operand" "")
3870 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
3871 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3872 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
3873 [(set (match_dup 2) (match_dup 1))
3874 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3878 (define_insn "*compareqi_eq0"
3879 [(set (reg:CC_Z CC_REGNUM)
3880 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
3884 [(set_attr "conds" "set")]
3887 (define_expand "extendhisi2"
3889 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3891 (set (match_operand:SI 0 "s_register_operand" "")
3892 (ashiftrt:SI (match_dup 2)
3897 if (GET_CODE (operands[1]) == MEM)
3901 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3906 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3907 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3912 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3914 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
3918 if (!s_register_operand (operands[1], HImode))
3919 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3924 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3926 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3927 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3932 operands[1] = gen_lowpart (SImode, operands[1]);
3933 operands[2] = gen_reg_rtx (SImode);
3937 (define_insn "thumb1_extendhisi2"
3938 [(set (match_operand:SI 0 "register_operand" "=l")
3939 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
3940 (clobber (match_scratch:SI 2 "=&l"))]
3941 "TARGET_THUMB1 && !arm_arch6"
3945 rtx mem = XEXP (operands[1], 0);
3947 /* This code used to try to use 'V', and fix the address only if it was
3948 offsettable, but this fails for e.g. REG+48 because 48 is outside the
3949 range of QImode offsets, and offsettable_address_p does a QImode
3952 if (GET_CODE (mem) == CONST)
3953 mem = XEXP (mem, 0);
3955 if (GET_CODE (mem) == LABEL_REF)
3956 return \"ldr\\t%0, %1\";
3958 if (GET_CODE (mem) == PLUS)
3960 rtx a = XEXP (mem, 0);
3961 rtx b = XEXP (mem, 1);
3963 if (GET_CODE (a) == LABEL_REF
3964 && GET_CODE (b) == CONST_INT)
3965 return \"ldr\\t%0, %1\";
3967 if (GET_CODE (b) == REG)
3968 return \"ldrsh\\t%0, %1\";
3976 ops[2] = const0_rtx;
3979 gcc_assert (GET_CODE (ops[1]) == REG);
3981 ops[0] = operands[0];
3982 ops[3] = operands[2];
3983 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
3986 [(set_attr "length" "4")
3987 (set_attr "type" "load_byte")
3988 (set_attr "pool_range" "1020")]
3991 ;; We used to have an early-clobber on the scratch register here.
3992 ;; However, there's a bug somewhere in reload which means that this
3993 ;; can be partially ignored during spill allocation if the memory
3994 ;; address also needs reloading; this causes us to die later on when
3995 ;; we try to verify the operands. Fortunately, we don't really need
3996 ;; the early-clobber: we can always use operand 0 if operand 2
3997 ;; overlaps the address.
3998 (define_insn "*thumb1_extendhisi2_insn_v6"
3999 [(set (match_operand:SI 0 "register_operand" "=l,l")
4000 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4001 (clobber (match_scratch:SI 2 "=X,l"))]
4002 "TARGET_THUMB1 && arm_arch6"
4008 if (which_alternative == 0)
4009 return \"sxth\\t%0, %1\";
4011 mem = XEXP (operands[1], 0);
4013 /* This code used to try to use 'V', and fix the address only if it was
4014 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4015 range of QImode offsets, and offsettable_address_p does a QImode
4018 if (GET_CODE (mem) == CONST)
4019 mem = XEXP (mem, 0);
4021 if (GET_CODE (mem) == LABEL_REF)
4022 return \"ldr\\t%0, %1\";
4024 if (GET_CODE (mem) == PLUS)
4026 rtx a = XEXP (mem, 0);
4027 rtx b = XEXP (mem, 1);
4029 if (GET_CODE (a) == LABEL_REF
4030 && GET_CODE (b) == CONST_INT)
4031 return \"ldr\\t%0, %1\";
4033 if (GET_CODE (b) == REG)
4034 return \"ldrsh\\t%0, %1\";
4042 ops[2] = const0_rtx;
4045 gcc_assert (GET_CODE (ops[1]) == REG);
4047 ops[0] = operands[0];
4048 if (reg_mentioned_p (operands[2], ops[1]))
4051 ops[3] = operands[2];
4052 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4055 [(set_attr "length" "2,4")
4056 (set_attr "type" "alu_shift,load_byte")
4057 (set_attr "pool_range" "*,1020")]
4060 ;; This pattern will only be used when ldsh is not available
4061 (define_expand "extendhisi2_mem"
4062 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4064 (zero_extend:SI (match_dup 7)))
4065 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4066 (set (match_operand:SI 0 "" "")
4067 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4072 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4074 mem1 = change_address (operands[1], QImode, addr);
4075 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4076 operands[0] = gen_lowpart (SImode, operands[0]);
4078 operands[2] = gen_reg_rtx (SImode);
4079 operands[3] = gen_reg_rtx (SImode);
4080 operands[6] = gen_reg_rtx (SImode);
4083 if (BYTES_BIG_ENDIAN)
4085 operands[4] = operands[2];
4086 operands[5] = operands[3];
4090 operands[4] = operands[3];
4091 operands[5] = operands[2];
4096 (define_insn "*arm_extendhisi2"
4097 [(set (match_operand:SI 0 "s_register_operand" "=r")
4098 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4099 "TARGET_ARM && arm_arch4 && !arm_arch6"
4100 "ldr%(sh%)\\t%0, %1"
4101 [(set_attr "type" "load_byte")
4102 (set_attr "predicable" "yes")
4103 (set_attr "pool_range" "256")
4104 (set_attr "neg_pool_range" "244")]
4107 ;; ??? Check Thumb-2 pool range
4108 (define_insn "*arm_extendhisi2_v6"
4109 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4110 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4111 "TARGET_32BIT && arm_arch6"
4115 [(set_attr "type" "alu_shift,load_byte")
4116 (set_attr "predicable" "yes")
4117 (set_attr "pool_range" "*,256")
4118 (set_attr "neg_pool_range" "*,244")]
4121 (define_insn "*arm_extendhisi2addsi"
4122 [(set (match_operand:SI 0 "s_register_operand" "=r")
4123 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4124 (match_operand:SI 2 "s_register_operand" "r")))]
4126 "sxtah%?\\t%0, %2, %1"
4129 (define_expand "extendqihi2"
4131 (ashift:SI (match_operand:QI 1 "general_operand" "")
4133 (set (match_operand:HI 0 "s_register_operand" "")
4134 (ashiftrt:SI (match_dup 2)
4139 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4141 emit_insn (gen_rtx_SET (VOIDmode,
4143 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4146 if (!s_register_operand (operands[1], QImode))
4147 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4148 operands[0] = gen_lowpart (SImode, operands[0]);
4149 operands[1] = gen_lowpart (SImode, operands[1]);
4150 operands[2] = gen_reg_rtx (SImode);
4154 (define_insn "*arm_extendqihi_insn"
4155 [(set (match_operand:HI 0 "s_register_operand" "=r")
4156 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4157 "TARGET_ARM && arm_arch4"
4158 "ldr%(sb%)\\t%0, %1"
4159 [(set_attr "type" "load_byte")
4160 (set_attr "predicable" "yes")
4161 (set_attr "pool_range" "256")
4162 (set_attr "neg_pool_range" "244")]
4165 (define_expand "extendqisi2"
4167 (ashift:SI (match_operand:QI 1 "general_operand" "")
4169 (set (match_operand:SI 0 "s_register_operand" "")
4170 (ashiftrt:SI (match_dup 2)
4175 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4177 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4178 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4182 if (!s_register_operand (operands[1], QImode))
4183 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4187 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4188 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4192 operands[1] = gen_lowpart (SImode, operands[1]);
4193 operands[2] = gen_reg_rtx (SImode);
4197 (define_insn "*arm_extendqisi"
4198 [(set (match_operand:SI 0 "s_register_operand" "=r")
4199 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4200 "TARGET_ARM && arm_arch4 && !arm_arch6"
4201 "ldr%(sb%)\\t%0, %1"
4202 [(set_attr "type" "load_byte")
4203 (set_attr "predicable" "yes")
4204 (set_attr "pool_range" "256")
4205 (set_attr "neg_pool_range" "244")]
4208 (define_insn "*arm_extendqisi_v6"
4209 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4210 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4211 "TARGET_ARM && arm_arch6"
4215 [(set_attr "type" "alu_shift,load_byte")
4216 (set_attr "predicable" "yes")
4217 (set_attr "pool_range" "*,256")
4218 (set_attr "neg_pool_range" "*,244")]
4221 (define_insn "*arm_extendqisi2addsi"
4222 [(set (match_operand:SI 0 "s_register_operand" "=r")
4223 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4224 (match_operand:SI 2 "s_register_operand" "r")))]
4226 "sxtab%?\\t%0, %2, %1"
4227 [(set_attr "type" "alu_shift")
4228 (set_attr "predicable" "yes")]
4231 (define_insn "*thumb1_extendqisi2"
4232 [(set (match_operand:SI 0 "register_operand" "=l,l")
4233 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4234 "TARGET_THUMB1 && !arm_arch6"
4238 rtx mem = XEXP (operands[1], 0);
4240 if (GET_CODE (mem) == CONST)
4241 mem = XEXP (mem, 0);
4243 if (GET_CODE (mem) == LABEL_REF)
4244 return \"ldr\\t%0, %1\";
4246 if (GET_CODE (mem) == PLUS
4247 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4248 return \"ldr\\t%0, %1\";
4250 if (which_alternative == 0)
4251 return \"ldrsb\\t%0, %1\";
4253 ops[0] = operands[0];
4255 if (GET_CODE (mem) == PLUS)
4257 rtx a = XEXP (mem, 0);
4258 rtx b = XEXP (mem, 1);
4263 if (GET_CODE (a) == REG)
4265 if (GET_CODE (b) == REG)
4266 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4267 else if (REGNO (a) == REGNO (ops[0]))
4269 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4270 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4271 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4274 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4278 gcc_assert (GET_CODE (b) == REG);
4279 if (REGNO (b) == REGNO (ops[0]))
4281 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4282 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4283 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4286 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4289 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4291 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4292 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4293 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4298 ops[2] = const0_rtx;
4300 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4304 [(set_attr "length" "2,6")
4305 (set_attr "type" "load_byte,load_byte")
4306 (set_attr "pool_range" "32,32")]
4309 (define_insn "*thumb1_extendqisi2_v6"
4310 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4311 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4312 "TARGET_THUMB1 && arm_arch6"
4318 if (which_alternative == 0)
4319 return \"sxtb\\t%0, %1\";
4321 mem = XEXP (operands[1], 0);
4323 if (GET_CODE (mem) == CONST)
4324 mem = XEXP (mem, 0);
4326 if (GET_CODE (mem) == LABEL_REF)
4327 return \"ldr\\t%0, %1\";
4329 if (GET_CODE (mem) == PLUS
4330 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4331 return \"ldr\\t%0, %1\";
4333 if (which_alternative == 0)
4334 return \"ldrsb\\t%0, %1\";
4336 ops[0] = operands[0];
4338 if (GET_CODE (mem) == PLUS)
4340 rtx a = XEXP (mem, 0);
4341 rtx b = XEXP (mem, 1);
4346 if (GET_CODE (a) == REG)
4348 if (GET_CODE (b) == REG)
4349 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4350 else if (REGNO (a) == REGNO (ops[0]))
4352 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4353 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4356 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4360 gcc_assert (GET_CODE (b) == REG);
4361 if (REGNO (b) == REGNO (ops[0]))
4363 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4364 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4367 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4370 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4372 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4373 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4378 ops[2] = const0_rtx;
4380 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4384 [(set_attr "length" "2,2,4")
4385 (set_attr "type" "alu_shift,load_byte,load_byte")
4386 (set_attr "pool_range" "*,32,32")]
4389 (define_expand "extendsfdf2"
4390 [(set (match_operand:DF 0 "s_register_operand" "")
4391 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4392 "TARGET_32BIT && TARGET_HARD_FLOAT"
4396 ;; Move insns (including loads and stores)
4398 ;; XXX Just some ideas about movti.
4399 ;; I don't think these are a good idea on the arm, there just aren't enough
4401 ;;(define_expand "loadti"
4402 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4403 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4406 ;;(define_expand "storeti"
4407 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4408 ;; (match_operand:TI 1 "s_register_operand" ""))]
4411 ;;(define_expand "movti"
4412 ;; [(set (match_operand:TI 0 "general_operand" "")
4413 ;; (match_operand:TI 1 "general_operand" ""))]
4419 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4420 ;; operands[1] = copy_to_reg (operands[1]);
4421 ;; if (GET_CODE (operands[0]) == MEM)
4422 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4423 ;; else if (GET_CODE (operands[1]) == MEM)
4424 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4428 ;; emit_insn (insn);
4432 ;; Recognize garbage generated above.
4435 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4436 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4440 ;; register mem = (which_alternative < 3);
4441 ;; register const char *template;
4443 ;; operands[mem] = XEXP (operands[mem], 0);
4444 ;; switch (which_alternative)
4446 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4447 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4448 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4449 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4450 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4451 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4453 ;; output_asm_insn (template, operands);
4457 (define_expand "movdi"
4458 [(set (match_operand:DI 0 "general_operand" "")
4459 (match_operand:DI 1 "general_operand" ""))]
4462 if (!no_new_pseudos)
4464 if (GET_CODE (operands[0]) != REG)
4465 operands[1] = force_reg (DImode, operands[1]);
4470 (define_insn "*arm_movdi"
4471 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4472 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4474 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4476 && ( register_operand (operands[0], DImode)
4477 || register_operand (operands[1], DImode))"
4479 switch (which_alternative)
4486 return output_move_double (operands);
4489 [(set_attr "length" "8,12,16,8,8")
4490 (set_attr "type" "*,*,*,load2,store2")
4491 (set_attr "pool_range" "*,*,*,1020,*")
4492 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4496 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4497 (match_operand:ANY64 1 "const_double_operand" ""))]
4500 && (arm_const_double_inline_cost (operands[1])
4501 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4504 arm_split_constant (SET, SImode, curr_insn,
4505 INTVAL (gen_lowpart (SImode, operands[1])),
4506 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4507 arm_split_constant (SET, SImode, curr_insn,
4508 INTVAL (gen_highpart_mode (SImode,
4509 GET_MODE (operands[0]),
4511 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4516 ; If optimizing for size, or if we have load delay slots, then
4517 ; we want to split the constant into two separate operations.
4518 ; In both cases this may split a trivial part into a single data op
4519 ; leaving a single complex constant to load. We can also get longer
4520 ; offsets in a LDR which means we get better chances of sharing the pool
4521 ; entries. Finally, we can normally do a better job of scheduling
4522 ; LDR instructions than we can with LDM.
4523 ; This pattern will only match if the one above did not.
4525 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4526 (match_operand:ANY64 1 "const_double_operand" ""))]
4527 "TARGET_ARM && reload_completed
4528 && arm_const_double_by_parts (operands[1])"
4529 [(set (match_dup 0) (match_dup 1))
4530 (set (match_dup 2) (match_dup 3))]
4532 operands[2] = gen_highpart (SImode, operands[0]);
4533 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4535 operands[0] = gen_lowpart (SImode, operands[0]);
4536 operands[1] = gen_lowpart (SImode, operands[1]);
4541 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4542 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4543 "TARGET_EITHER && reload_completed"
4544 [(set (match_dup 0) (match_dup 1))
4545 (set (match_dup 2) (match_dup 3))]
4547 operands[2] = gen_highpart (SImode, operands[0]);
4548 operands[3] = gen_highpart (SImode, operands[1]);
4549 operands[0] = gen_lowpart (SImode, operands[0]);
4550 operands[1] = gen_lowpart (SImode, operands[1]);
4552 /* Handle a partial overlap. */
4553 if (rtx_equal_p (operands[0], operands[3]))
4555 rtx tmp0 = operands[0];
4556 rtx tmp1 = operands[1];
4558 operands[0] = operands[2];
4559 operands[1] = operands[3];
4566 ;; We can't actually do base+index doubleword loads if the index and
4567 ;; destination overlap. Split here so that we at least have chance to
4570 [(set (match_operand:DI 0 "s_register_operand" "")
4571 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4572 (match_operand:SI 2 "s_register_operand" ""))))]
4574 && reg_overlap_mentioned_p (operands[0], operands[1])
4575 && reg_overlap_mentioned_p (operands[0], operands[2])"
4577 (plus:SI (match_dup 1)
4580 (mem:DI (match_dup 4)))]
4582 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4586 ;;; ??? This should have alternatives for constants.
4587 ;;; ??? This was originally identical to the movdf_insn pattern.
4588 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4589 ;;; thumb_reorg with a memory reference.
4590 (define_insn "*thumb1_movdi_insn"
4591 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4592 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4594 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4595 && ( register_operand (operands[0], DImode)
4596 || register_operand (operands[1], DImode))"
4599 switch (which_alternative)
4603 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4604 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4605 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4607 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4609 operands[1] = GEN_INT (- INTVAL (operands[1]));
4610 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4612 return \"ldmia\\t%1, {%0, %H0}\";
4614 return \"stmia\\t%0, {%1, %H1}\";
4616 return thumb_load_double_from_address (operands);
4618 operands[2] = gen_rtx_MEM (SImode,
4619 plus_constant (XEXP (operands[0], 0), 4));
4620 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4623 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4624 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4625 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4628 [(set_attr "length" "4,4,6,2,2,6,4,4")
4629 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4630 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4633 (define_expand "movsi"
4634 [(set (match_operand:SI 0 "general_operand" "")
4635 (match_operand:SI 1 "general_operand" ""))]
4640 /* Everything except mem = const or mem = mem can be done easily. */
4641 if (GET_CODE (operands[0]) == MEM)
4642 operands[1] = force_reg (SImode, operands[1]);
4643 if (arm_general_register_operand (operands[0], SImode)
4644 && GET_CODE (operands[1]) == CONST_INT
4645 && !(const_ok_for_arm (INTVAL (operands[1]))
4646 || const_ok_for_arm (~INTVAL (operands[1]))))
4648 arm_split_constant (SET, SImode, NULL_RTX,
4649 INTVAL (operands[1]), operands[0], NULL_RTX,
4650 optimize && !no_new_pseudos);
4654 else /* TARGET_THUMB1... */
4656 if (!no_new_pseudos)
4658 if (GET_CODE (operands[0]) != REG)
4659 operands[1] = force_reg (SImode, operands[1]);
4663 /* Recognize the case where operand[1] is a reference to thread-local
4664 data and load its address to a register. */
4665 if (arm_tls_referenced_p (operands[1]))
4667 rtx tmp = operands[1];
4670 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4672 addend = XEXP (XEXP (tmp, 0), 1);
4673 tmp = XEXP (XEXP (tmp, 0), 0);
4676 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4677 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4679 tmp = legitimize_tls_address (tmp, no_new_pseudos ? operands[0] : 0);
4682 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4683 tmp = force_operand (tmp, operands[0]);
4688 && (CONSTANT_P (operands[1])
4689 || symbol_mentioned_p (operands[1])
4690 || label_mentioned_p (operands[1])))
4691 operands[1] = legitimize_pic_address (operands[1], SImode,
4692 (no_new_pseudos ? operands[0] : 0));
4696 (define_insn "*arm_movsi_insn"
4697 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m")
4698 (match_operand:SI 1 "general_operand" "rI,K,N,mi,r"))]
4699 "TARGET_ARM && ! TARGET_IWMMXT
4700 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4701 && ( register_operand (operands[0], SImode)
4702 || register_operand (operands[1], SImode))"
4709 [(set_attr "type" "*,*,*,load1,store1")
4710 (set_attr "predicable" "yes")
4711 (set_attr "pool_range" "*,*,*,4096,*")
4712 (set_attr "neg_pool_range" "*,*,*,4084,*")]
4716 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4717 (match_operand:SI 1 "const_int_operand" ""))]
4719 && (!(const_ok_for_arm (INTVAL (operands[1]))
4720 || const_ok_for_arm (~INTVAL (operands[1]))))"
4721 [(clobber (const_int 0))]
4723 arm_split_constant (SET, SImode, NULL_RTX,
4724 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4729 (define_insn "*thumb1_movsi_insn"
4730 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
4731 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
4733 && ( register_operand (operands[0], SImode)
4734 || register_operand (operands[1], SImode))"
4745 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4746 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4747 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4751 [(set (match_operand:SI 0 "register_operand" "")
4752 (match_operand:SI 1 "const_int_operand" ""))]
4753 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4754 [(set (match_dup 0) (match_dup 1))
4755 (set (match_dup 0) (neg:SI (match_dup 0)))]
4756 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4760 [(set (match_operand:SI 0 "register_operand" "")
4761 (match_operand:SI 1 "const_int_operand" ""))]
4762 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4763 [(set (match_dup 0) (match_dup 1))
4764 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4767 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4768 unsigned HOST_WIDE_INT mask = 0xff;
4771 for (i = 0; i < 25; i++)
4772 if ((val & (mask << i)) == val)
4775 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4779 operands[1] = GEN_INT (val >> i);
4780 operands[2] = GEN_INT (i);
4784 ;; When generating pic, we need to load the symbol offset into a register.
4785 ;; So that the optimizer does not confuse this with a normal symbol load
4786 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4787 ;; since that is the only type of relocation we can use.
4789 ;; The rather odd constraints on the following are to force reload to leave
4790 ;; the insn alone, and to force the minipool generation pass to then move
4791 ;; the GOT symbol to memory.
4793 (define_insn "pic_load_addr_arm"
4794 [(set (match_operand:SI 0 "s_register_operand" "=r")
4795 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4796 "TARGET_ARM && flag_pic"
4798 [(set_attr "type" "load1")
4799 (set (attr "pool_range") (const_int 4096))
4800 (set (attr "neg_pool_range") (const_int 4084))]
4803 (define_insn "pic_load_addr_thumb1"
4804 [(set (match_operand:SI 0 "s_register_operand" "=l")
4805 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4806 "TARGET_THUMB1 && flag_pic"
4808 [(set_attr "type" "load1")
4809 (set (attr "pool_range") (const_int 1024))]
4812 ;; This variant is used for AOF assembly, since it needs to mention the
4813 ;; pic register in the rtl.
4814 (define_expand "pic_load_addr_based"
4815 [(set (match_operand:SI 0 "s_register_operand" "")
4816 (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
4817 "TARGET_ARM && flag_pic"
4818 "operands[2] = cfun->machine->pic_reg;"
4821 (define_insn "*pic_load_addr_based_insn"
4822 [(set (match_operand:SI 0 "s_register_operand" "=r")
4823 (unspec:SI [(match_operand 1 "" "")
4824 (match_operand 2 "s_register_operand" "r")]
4826 "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg"
4828 #ifdef AOF_ASSEMBLER
4829 operands[1] = aof_pic_entry (operands[1]);
4831 output_asm_insn (\"ldr%?\\t%0, %a1\", operands);
4834 [(set_attr "type" "load1")
4835 (set (attr "pool_range")
4836 (if_then_else (eq_attr "is_thumb" "yes")
4839 (set (attr "neg_pool_range")
4840 (if_then_else (eq_attr "is_thumb" "yes")
4845 (define_insn "pic_add_dot_plus_four"
4846 [(set (match_operand:SI 0 "register_operand" "=r")
4847 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4848 (const (plus:SI (pc) (const_int 4))))]
4850 (use (match_operand 2 "" ""))]
4853 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4854 INTVAL (operands[2]));
4855 return \"add\\t%0, %|pc\";
4857 [(set_attr "length" "2")]
4860 (define_insn "pic_add_dot_plus_eight"
4861 [(set (match_operand:SI 0 "register_operand" "=r")
4862 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4863 (const (plus:SI (pc) (const_int 8))))]
4865 (use (match_operand 2 "" ""))]
4868 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4869 INTVAL (operands[2]));
4870 return \"add%?\\t%0, %|pc, %1\";
4872 [(set_attr "predicable" "yes")]
4875 (define_insn "tls_load_dot_plus_eight"
4876 [(set (match_operand:SI 0 "register_operand" "+r")
4877 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4878 (const (plus:SI (pc) (const_int 8))))]
4880 (use (match_operand 2 "" ""))]
4883 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4884 INTVAL (operands[2]));
4885 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
4887 [(set_attr "predicable" "yes")]
4890 ;; PIC references to local variables can generate pic_add_dot_plus_eight
4891 ;; followed by a load. These sequences can be crunched down to
4892 ;; tls_load_dot_plus_eight by a peephole.
4895 [(parallel [(set (match_operand:SI 0 "register_operand" "")
4896 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
4897 (const (plus:SI (pc) (const_int 8))))]
4899 (use (label_ref (match_operand 1 "" "")))])
4900 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
4901 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
4902 [(parallel [(set (match_dup 2)
4903 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
4904 (const (plus:SI (pc) (const_int 8))))]
4906 (use (label_ref (match_dup 1)))])]
4910 (define_expand "builtin_setjmp_receiver"
4911 [(label_ref (match_operand 0 "" ""))]
4915 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
4917 if (arm_pic_register != INVALID_REGNUM)
4918 arm_load_pic_register (1UL << 3);
4922 ;; If copying one reg to another we can set the condition codes according to
4923 ;; its value. Such a move is common after a return from subroutine and the
4924 ;; result is being tested against zero.
4926 (define_insn "*movsi_compare0"
4927 [(set (reg:CC CC_REGNUM)
4928 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
4930 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4936 [(set_attr "conds" "set")]
4939 ;; Subroutine to store a half word from a register into memory.
4940 ;; Operand 0 is the source register (HImode)
4941 ;; Operand 1 is the destination address in a register (SImode)
4943 ;; In both this routine and the next, we must be careful not to spill
4944 ;; a memory address of reg+large_const into a separate PLUS insn, since this
4945 ;; can generate unrecognizable rtl.
4947 (define_expand "storehi"
4948 [;; store the low byte
4949 (set (match_operand 1 "" "") (match_dup 3))
4950 ;; extract the high byte
4952 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4953 ;; store the high byte
4954 (set (match_dup 4) (match_dup 5))]
4958 rtx op1 = operands[1];
4959 rtx addr = XEXP (op1, 0);
4960 enum rtx_code code = GET_CODE (addr);
4962 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4964 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
4966 operands[4] = adjust_address (op1, QImode, 1);
4967 operands[1] = adjust_address (operands[1], QImode, 0);
4968 operands[3] = gen_lowpart (QImode, operands[0]);
4969 operands[0] = gen_lowpart (SImode, operands[0]);
4970 operands[2] = gen_reg_rtx (SImode);
4971 operands[5] = gen_lowpart (QImode, operands[2]);
4975 (define_expand "storehi_bigend"
4976 [(set (match_dup 4) (match_dup 3))
4978 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4979 (set (match_operand 1 "" "") (match_dup 5))]
4983 rtx op1 = operands[1];
4984 rtx addr = XEXP (op1, 0);
4985 enum rtx_code code = GET_CODE (addr);
4987 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
4989 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
4991 operands[4] = adjust_address (op1, QImode, 1);
4992 operands[1] = adjust_address (operands[1], QImode, 0);
4993 operands[3] = gen_lowpart (QImode, operands[0]);
4994 operands[0] = gen_lowpart (SImode, operands[0]);
4995 operands[2] = gen_reg_rtx (SImode);
4996 operands[5] = gen_lowpart (QImode, operands[2]);
5000 ;; Subroutine to store a half word integer constant into memory.
5001 (define_expand "storeinthi"
5002 [(set (match_operand 0 "" "")
5003 (match_operand 1 "" ""))
5004 (set (match_dup 3) (match_dup 2))]
5008 HOST_WIDE_INT value = INTVAL (operands[1]);
5009 rtx addr = XEXP (operands[0], 0);
5010 rtx op0 = operands[0];
5011 enum rtx_code code = GET_CODE (addr);
5013 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5015 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5017 operands[1] = gen_reg_rtx (SImode);
5018 if (BYTES_BIG_ENDIAN)
5020 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5021 if ((value & 255) == ((value >> 8) & 255))
5022 operands[2] = operands[1];
5025 operands[2] = gen_reg_rtx (SImode);
5026 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5031 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5032 if ((value & 255) == ((value >> 8) & 255))
5033 operands[2] = operands[1];
5036 operands[2] = gen_reg_rtx (SImode);
5037 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5041 operands[3] = adjust_address (op0, QImode, 1);
5042 operands[0] = adjust_address (operands[0], QImode, 0);
5043 operands[2] = gen_lowpart (QImode, operands[2]);
5044 operands[1] = gen_lowpart (QImode, operands[1]);
5048 (define_expand "storehi_single_op"
5049 [(set (match_operand:HI 0 "memory_operand" "")
5050 (match_operand:HI 1 "general_operand" ""))]
5051 "TARGET_32BIT && arm_arch4"
5053 if (!s_register_operand (operands[1], HImode))
5054 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5058 (define_expand "movhi"
5059 [(set (match_operand:HI 0 "general_operand" "")
5060 (match_operand:HI 1 "general_operand" ""))]
5065 if (!no_new_pseudos)
5067 if (GET_CODE (operands[0]) == MEM)
5071 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5074 if (GET_CODE (operands[1]) == CONST_INT)
5075 emit_insn (gen_storeinthi (operands[0], operands[1]));
5078 if (GET_CODE (operands[1]) == MEM)
5079 operands[1] = force_reg (HImode, operands[1]);
5080 if (BYTES_BIG_ENDIAN)
5081 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5083 emit_insn (gen_storehi (operands[1], operands[0]));
5087 /* Sign extend a constant, and keep it in an SImode reg. */
5088 else if (GET_CODE (operands[1]) == CONST_INT)
5090 rtx reg = gen_reg_rtx (SImode);
5091 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5093 /* If the constant is already valid, leave it alone. */
5094 if (!const_ok_for_arm (val))
5096 /* If setting all the top bits will make the constant
5097 loadable in a single instruction, then set them.
5098 Otherwise, sign extend the number. */
5100 if (const_ok_for_arm (~(val | ~0xffff)))
5102 else if (val & 0x8000)
5106 emit_insn (gen_movsi (reg, GEN_INT (val)));
5107 operands[1] = gen_lowpart (HImode, reg);
5109 else if (arm_arch4 && optimize && !no_new_pseudos
5110 && GET_CODE (operands[1]) == MEM)
5112 rtx reg = gen_reg_rtx (SImode);
5114 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5115 operands[1] = gen_lowpart (HImode, reg);
5117 else if (!arm_arch4)
5119 if (GET_CODE (operands[1]) == MEM)
5122 rtx offset = const0_rtx;
5123 rtx reg = gen_reg_rtx (SImode);
5125 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5126 || (GET_CODE (base) == PLUS
5127 && (GET_CODE (offset = XEXP (base, 1))
5129 && ((INTVAL(offset) & 1) != 1)
5130 && GET_CODE (base = XEXP (base, 0)) == REG))
5131 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5135 new = widen_memory_access (operands[1], SImode,
5136 ((INTVAL (offset) & ~3)
5137 - INTVAL (offset)));
5138 emit_insn (gen_movsi (reg, new));
5139 if (((INTVAL (offset) & 2) != 0)
5140 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5142 rtx reg2 = gen_reg_rtx (SImode);
5144 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5149 emit_insn (gen_movhi_bytes (reg, operands[1]));
5151 operands[1] = gen_lowpart (HImode, reg);
5155 /* Handle loading a large integer during reload. */
5156 else if (GET_CODE (operands[1]) == CONST_INT
5157 && !const_ok_for_arm (INTVAL (operands[1]))
5158 && !const_ok_for_arm (~INTVAL (operands[1])))
5160 /* Writing a constant to memory needs a scratch, which should
5161 be handled with SECONDARY_RELOADs. */
5162 gcc_assert (GET_CODE (operands[0]) == REG);
5164 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5165 emit_insn (gen_movsi (operands[0], operands[1]));
5169 else if (TARGET_THUMB2)
5171 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5172 if (!no_new_pseudos)
5174 if (GET_CODE (operands[0]) != REG)
5175 operands[1] = force_reg (HImode, operands[1]);
5176 /* Zero extend a constant, and keep it in an SImode reg. */
5177 else if (GET_CODE (operands[1]) == CONST_INT)
5179 rtx reg = gen_reg_rtx (SImode);
5180 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5182 emit_insn (gen_movsi (reg, GEN_INT (val)));
5183 operands[1] = gen_lowpart (HImode, reg);
5187 else /* TARGET_THUMB1 */
5189 if (!no_new_pseudos)
5191 if (GET_CODE (operands[1]) == CONST_INT)
5193 rtx reg = gen_reg_rtx (SImode);
5195 emit_insn (gen_movsi (reg, operands[1]));
5196 operands[1] = gen_lowpart (HImode, reg);
5199 /* ??? We shouldn't really get invalid addresses here, but this can
5200 happen if we are passed a SP (never OK for HImode/QImode) or
5201 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5202 HImode/QImode) relative address. */
5203 /* ??? This should perhaps be fixed elsewhere, for instance, in
5204 fixup_stack_1, by checking for other kinds of invalid addresses,
5205 e.g. a bare reference to a virtual register. This may confuse the
5206 alpha though, which must handle this case differently. */
5207 if (GET_CODE (operands[0]) == MEM
5208 && !memory_address_p (GET_MODE (operands[0]),
5209 XEXP (operands[0], 0)))
5211 = replace_equiv_address (operands[0],
5212 copy_to_reg (XEXP (operands[0], 0)));
5214 if (GET_CODE (operands[1]) == MEM
5215 && !memory_address_p (GET_MODE (operands[1]),
5216 XEXP (operands[1], 0)))
5218 = replace_equiv_address (operands[1],
5219 copy_to_reg (XEXP (operands[1], 0)));
5221 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5223 rtx reg = gen_reg_rtx (SImode);
5225 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5226 operands[1] = gen_lowpart (HImode, reg);
5229 if (GET_CODE (operands[0]) == MEM)
5230 operands[1] = force_reg (HImode, operands[1]);
5232 else if (GET_CODE (operands[1]) == CONST_INT
5233 && !satisfies_constraint_I (operands[1]))
5235 /* Handle loading a large integer during reload. */
5237 /* Writing a constant to memory needs a scratch, which should
5238 be handled with SECONDARY_RELOADs. */
5239 gcc_assert (GET_CODE (operands[0]) == REG);
5241 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5242 emit_insn (gen_movsi (operands[0], operands[1]));
5249 (define_insn "*thumb1_movhi_insn"
5250 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5251 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5253 && ( register_operand (operands[0], HImode)
5254 || register_operand (operands[1], HImode))"
5256 switch (which_alternative)
5258 case 0: return \"add %0, %1, #0\";
5259 case 2: return \"strh %1, %0\";
5260 case 3: return \"mov %0, %1\";
5261 case 4: return \"mov %0, %1\";
5262 case 5: return \"mov %0, %1\";
5263 default: gcc_unreachable ();
5265 /* The stack pointer can end up being taken as an index register.
5266 Catch this case here and deal with it. */
5267 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5268 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5269 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5272 ops[0] = operands[0];
5273 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5275 output_asm_insn (\"mov %0, %1\", ops);
5277 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5280 return \"ldrh %0, %1\";
5282 [(set_attr "length" "2,4,2,2,2,2")
5283 (set_attr "type" "*,load1,store1,*,*,*")]
5287 (define_expand "movhi_bytes"
5288 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5290 (zero_extend:SI (match_dup 6)))
5291 (set (match_operand:SI 0 "" "")
5292 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5297 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5299 mem1 = change_address (operands[1], QImode, addr);
5300 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5301 operands[0] = gen_lowpart (SImode, operands[0]);
5303 operands[2] = gen_reg_rtx (SImode);
5304 operands[3] = gen_reg_rtx (SImode);
5307 if (BYTES_BIG_ENDIAN)
5309 operands[4] = operands[2];
5310 operands[5] = operands[3];
5314 operands[4] = operands[3];
5315 operands[5] = operands[2];
5320 (define_expand "movhi_bigend"
5322 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5325 (ashiftrt:SI (match_dup 2) (const_int 16)))
5326 (set (match_operand:HI 0 "s_register_operand" "")
5330 operands[2] = gen_reg_rtx (SImode);
5331 operands[3] = gen_reg_rtx (SImode);
5332 operands[4] = gen_lowpart (HImode, operands[3]);
5336 ;; Pattern to recognize insn generated default case above
5337 (define_insn "*movhi_insn_arch4"
5338 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5339 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5342 && (GET_CODE (operands[1]) != CONST_INT
5343 || const_ok_for_arm (INTVAL (operands[1]))
5344 || const_ok_for_arm (~INTVAL (operands[1])))"
5346 mov%?\\t%0, %1\\t%@ movhi
5347 mvn%?\\t%0, #%B1\\t%@ movhi
5348 str%(h%)\\t%1, %0\\t%@ movhi
5349 ldr%(h%)\\t%0, %1\\t%@ movhi"
5350 [(set_attr "type" "*,*,store1,load1")
5351 (set_attr "predicable" "yes")
5352 (set_attr "pool_range" "*,*,*,256")
5353 (set_attr "neg_pool_range" "*,*,*,244")]
5356 (define_insn "*movhi_bytes"
5357 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5358 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5361 mov%?\\t%0, %1\\t%@ movhi
5362 mvn%?\\t%0, #%B1\\t%@ movhi"
5363 [(set_attr "predicable" "yes")]
5366 (define_expand "thumb_movhi_clobber"
5367 [(set (match_operand:HI 0 "memory_operand" "")
5368 (match_operand:HI 1 "register_operand" ""))
5369 (clobber (match_operand:DI 2 "register_operand" ""))]
5372 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5373 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5375 emit_insn (gen_movhi (operands[0], operands[1]));
5378 /* XXX Fixme, need to handle other cases here as well. */
5383 ;; We use a DImode scratch because we may occasionally need an additional
5384 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5385 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5386 (define_expand "reload_outhi"
5387 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5388 (match_operand:HI 1 "s_register_operand" "r")
5389 (match_operand:DI 2 "s_register_operand" "=&l")])]
5392 arm_reload_out_hi (operands);
5394 thumb_reload_out_hi (operands);
5399 (define_expand "reload_inhi"
5400 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5401 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5402 (match_operand:DI 2 "s_register_operand" "=&r")])]
5406 arm_reload_in_hi (operands);
5408 thumb_reload_out_hi (operands);
5412 (define_expand "movqi"
5413 [(set (match_operand:QI 0 "general_operand" "")
5414 (match_operand:QI 1 "general_operand" ""))]
5417 /* Everything except mem = const or mem = mem can be done easily */
5419 if (!no_new_pseudos)
5421 if (GET_CODE (operands[1]) == CONST_INT)
5423 rtx reg = gen_reg_rtx (SImode);
5425 emit_insn (gen_movsi (reg, operands[1]));
5426 operands[1] = gen_lowpart (QImode, reg);
5431 /* ??? We shouldn't really get invalid addresses here, but this can
5432 happen if we are passed a SP (never OK for HImode/QImode) or
5433 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5434 HImode/QImode) relative address. */
5435 /* ??? This should perhaps be fixed elsewhere, for instance, in
5436 fixup_stack_1, by checking for other kinds of invalid addresses,
5437 e.g. a bare reference to a virtual register. This may confuse the
5438 alpha though, which must handle this case differently. */
5439 if (GET_CODE (operands[0]) == MEM
5440 && !memory_address_p (GET_MODE (operands[0]),
5441 XEXP (operands[0], 0)))
5443 = replace_equiv_address (operands[0],
5444 copy_to_reg (XEXP (operands[0], 0)));
5445 if (GET_CODE (operands[1]) == MEM
5446 && !memory_address_p (GET_MODE (operands[1]),
5447 XEXP (operands[1], 0)))
5449 = replace_equiv_address (operands[1],
5450 copy_to_reg (XEXP (operands[1], 0)));
5453 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5455 rtx reg = gen_reg_rtx (SImode);
5457 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5458 operands[1] = gen_lowpart (QImode, reg);
5461 if (GET_CODE (operands[0]) == MEM)
5462 operands[1] = force_reg (QImode, operands[1]);
5464 else if (TARGET_THUMB
5465 && GET_CODE (operands[1]) == CONST_INT
5466 && !satisfies_constraint_I (operands[1]))
5468 /* Handle loading a large integer during reload. */
5470 /* Writing a constant to memory needs a scratch, which should
5471 be handled with SECONDARY_RELOADs. */
5472 gcc_assert (GET_CODE (operands[0]) == REG);
5474 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5475 emit_insn (gen_movsi (operands[0], operands[1]));
5482 (define_insn "*arm_movqi_insn"
5483 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5484 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5486 && ( register_operand (operands[0], QImode)
5487 || register_operand (operands[1], QImode))"
5493 [(set_attr "type" "*,*,load1,store1")
5494 (set_attr "predicable" "yes")]
5497 (define_insn "*thumb1_movqi_insn"
5498 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5499 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5501 && ( register_operand (operands[0], QImode)
5502 || register_operand (operands[1], QImode))"
5510 [(set_attr "length" "2")
5511 (set_attr "type" "*,load1,store1,*,*,*")
5512 (set_attr "pool_range" "*,32,*,*,*,*")]
5515 (define_expand "movsf"
5516 [(set (match_operand:SF 0 "general_operand" "")
5517 (match_operand:SF 1 "general_operand" ""))]
5522 if (GET_CODE (operands[0]) == MEM)
5523 operands[1] = force_reg (SFmode, operands[1]);
5525 else /* TARGET_THUMB1 */
5527 if (!no_new_pseudos)
5529 if (GET_CODE (operands[0]) != REG)
5530 operands[1] = force_reg (SFmode, operands[1]);
5536 ;; Transform a floating-point move of a constant into a core register into
5537 ;; an SImode operation.
5539 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5540 (match_operand:SF 1 "immediate_operand" ""))]
5543 && GET_CODE (operands[1]) == CONST_DOUBLE"
5544 [(set (match_dup 2) (match_dup 3))]
5546 operands[2] = gen_lowpart (SImode, operands[0]);
5547 operands[3] = gen_lowpart (SImode, operands[1]);
5548 if (operands[2] == 0 || operands[3] == 0)
5553 (define_insn "*arm_movsf_soft_insn"
5554 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5555 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5557 && TARGET_SOFT_FLOAT
5558 && (GET_CODE (operands[0]) != MEM
5559 || register_operand (operands[1], SFmode))"
5562 ldr%?\\t%0, %1\\t%@ float
5563 str%?\\t%1, %0\\t%@ float"
5564 [(set_attr "length" "4,4,4")
5565 (set_attr "predicable" "yes")
5566 (set_attr "type" "*,load1,store1")
5567 (set_attr "pool_range" "*,4096,*")
5568 (set_attr "neg_pool_range" "*,4084,*")]
5571 ;;; ??? This should have alternatives for constants.
5572 (define_insn "*thumb1_movsf_insn"
5573 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5574 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5576 && ( register_operand (operands[0], SFmode)
5577 || register_operand (operands[1], SFmode))"
5586 [(set_attr "length" "2")
5587 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5588 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5591 (define_expand "movdf"
5592 [(set (match_operand:DF 0 "general_operand" "")
5593 (match_operand:DF 1 "general_operand" ""))]
5598 if (GET_CODE (operands[0]) == MEM)
5599 operands[1] = force_reg (DFmode, operands[1]);
5601 else /* TARGET_THUMB */
5603 if (!no_new_pseudos)
5605 if (GET_CODE (operands[0]) != REG)
5606 operands[1] = force_reg (DFmode, operands[1]);
5612 ;; Reloading a df mode value stored in integer regs to memory can require a
5614 (define_expand "reload_outdf"
5615 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5616 (match_operand:DF 1 "s_register_operand" "r")
5617 (match_operand:SI 2 "s_register_operand" "=&r")]
5621 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5624 operands[2] = XEXP (operands[0], 0);
5625 else if (code == POST_INC || code == PRE_DEC)
5627 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5628 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5629 emit_insn (gen_movdi (operands[0], operands[1]));
5632 else if (code == PRE_INC)
5634 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5636 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5639 else if (code == POST_DEC)
5640 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5642 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5643 XEXP (XEXP (operands[0], 0), 1)));
5645 emit_insn (gen_rtx_SET (VOIDmode,
5646 replace_equiv_address (operands[0], operands[2]),
5649 if (code == POST_DEC)
5650 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5656 (define_insn "*movdf_soft_insn"
5657 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5658 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5659 "TARGET_ARM && TARGET_SOFT_FLOAT
5660 && ( register_operand (operands[0], DFmode)
5661 || register_operand (operands[1], DFmode))"
5663 switch (which_alternative)
5670 return output_move_double (operands);
5673 [(set_attr "length" "8,12,16,8,8")
5674 (set_attr "type" "*,*,*,load2,store2")
5675 (set_attr "pool_range" "1020")
5676 (set_attr "neg_pool_range" "1008")]
5679 ;;; ??? This should have alternatives for constants.
5680 ;;; ??? This was originally identical to the movdi_insn pattern.
5681 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5682 ;;; thumb_reorg with a memory reference.
5683 (define_insn "*thumb_movdf_insn"
5684 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5685 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5687 && ( register_operand (operands[0], DFmode)
5688 || register_operand (operands[1], DFmode))"
5690 switch (which_alternative)
5694 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5695 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5696 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5698 return \"ldmia\\t%1, {%0, %H0}\";
5700 return \"stmia\\t%0, {%1, %H1}\";
5702 return thumb_load_double_from_address (operands);
5704 operands[2] = gen_rtx_MEM (SImode,
5705 plus_constant (XEXP (operands[0], 0), 4));
5706 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5709 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5710 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5711 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5714 [(set_attr "length" "4,2,2,6,4,4")
5715 (set_attr "type" "*,load2,store2,load2,store2,*")
5716 (set_attr "pool_range" "*,*,*,1020,*,*")]
5719 (define_expand "movxf"
5720 [(set (match_operand:XF 0 "general_operand" "")
5721 (match_operand:XF 1 "general_operand" ""))]
5722 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5724 if (GET_CODE (operands[0]) == MEM)
5725 operands[1] = force_reg (XFmode, operands[1]);
5730 (define_expand "movv2si"
5731 [(set (match_operand:V2SI 0 "nonimmediate_operand" "")
5732 (match_operand:V2SI 1 "general_operand" ""))]
5733 "TARGET_REALLY_IWMMXT"
5737 (define_expand "movv4hi"
5738 [(set (match_operand:V4HI 0 "nonimmediate_operand" "")
5739 (match_operand:V4HI 1 "general_operand" ""))]
5740 "TARGET_REALLY_IWMMXT"
5744 (define_expand "movv8qi"
5745 [(set (match_operand:V8QI 0 "nonimmediate_operand" "")
5746 (match_operand:V8QI 1 "general_operand" ""))]
5747 "TARGET_REALLY_IWMMXT"
5752 ;; load- and store-multiple insns
5753 ;; The arm can load/store any set of registers, provided that they are in
5754 ;; ascending order; but that is beyond GCC so stick with what it knows.
5756 (define_expand "load_multiple"
5757 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5758 (match_operand:SI 1 "" ""))
5759 (use (match_operand:SI 2 "" ""))])]
5762 HOST_WIDE_INT offset = 0;
5764 /* Support only fixed point registers. */
5765 if (GET_CODE (operands[2]) != CONST_INT
5766 || INTVAL (operands[2]) > 14
5767 || INTVAL (operands[2]) < 2
5768 || GET_CODE (operands[1]) != MEM
5769 || GET_CODE (operands[0]) != REG
5770 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5771 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5775 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5776 force_reg (SImode, XEXP (operands[1], 0)),
5777 TRUE, FALSE, operands[1], &offset);
5780 ;; Load multiple with write-back
5782 (define_insn "*ldmsi_postinc4"
5783 [(match_parallel 0 "load_multiple_operation"
5784 [(set (match_operand:SI 1 "s_register_operand" "=r")
5785 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5787 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5788 (mem:SI (match_dup 2)))
5789 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5790 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5791 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5792 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5793 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5794 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5795 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5796 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5797 [(set_attr "type" "load4")
5798 (set_attr "predicable" "yes")]
5801 (define_insn "*ldmsi_postinc4_thumb1"
5802 [(match_parallel 0 "load_multiple_operation"
5803 [(set (match_operand:SI 1 "s_register_operand" "=l")
5804 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5806 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5807 (mem:SI (match_dup 2)))
5808 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5809 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5810 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5811 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5812 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5813 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5814 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5815 "ldmia\\t%1!, {%3, %4, %5, %6}"
5816 [(set_attr "type" "load4")]
5819 (define_insn "*ldmsi_postinc3"
5820 [(match_parallel 0 "load_multiple_operation"
5821 [(set (match_operand:SI 1 "s_register_operand" "=r")
5822 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5824 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5825 (mem:SI (match_dup 2)))
5826 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5827 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5828 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5829 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5830 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5831 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5832 [(set_attr "type" "load3")
5833 (set_attr "predicable" "yes")]
5836 (define_insn "*ldmsi_postinc2"
5837 [(match_parallel 0 "load_multiple_operation"
5838 [(set (match_operand:SI 1 "s_register_operand" "=r")
5839 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5841 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5842 (mem:SI (match_dup 2)))
5843 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5844 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5845 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5846 "ldm%(ia%)\\t%1!, {%3, %4}"
5847 [(set_attr "type" "load2")
5848 (set_attr "predicable" "yes")]
5851 ;; Ordinary load multiple
5853 (define_insn "*ldmsi4"
5854 [(match_parallel 0 "load_multiple_operation"
5855 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5856 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5857 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5858 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5859 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5860 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5861 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5862 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5863 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5864 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
5865 [(set_attr "type" "load4")
5866 (set_attr "predicable" "yes")]
5869 (define_insn "*ldmsi3"
5870 [(match_parallel 0 "load_multiple_operation"
5871 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5872 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5873 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5874 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5875 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5876 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
5877 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5878 "ldm%(ia%)\\t%1, {%2, %3, %4}"
5879 [(set_attr "type" "load3")
5880 (set_attr "predicable" "yes")]
5883 (define_insn "*ldmsi2"
5884 [(match_parallel 0 "load_multiple_operation"
5885 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5886 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5887 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5888 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
5889 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
5890 "ldm%(ia%)\\t%1, {%2, %3}"
5891 [(set_attr "type" "load2")
5892 (set_attr "predicable" "yes")]
5895 (define_expand "store_multiple"
5896 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5897 (match_operand:SI 1 "" ""))
5898 (use (match_operand:SI 2 "" ""))])]
5901 HOST_WIDE_INT offset = 0;
5903 /* Support only fixed point registers. */
5904 if (GET_CODE (operands[2]) != CONST_INT
5905 || INTVAL (operands[2]) > 14
5906 || INTVAL (operands[2]) < 2
5907 || GET_CODE (operands[1]) != REG
5908 || GET_CODE (operands[0]) != MEM
5909 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
5910 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5914 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
5915 force_reg (SImode, XEXP (operands[0], 0)),
5916 TRUE, FALSE, operands[0], &offset);
5919 ;; Store multiple with write-back
5921 (define_insn "*stmsi_postinc4"
5922 [(match_parallel 0 "store_multiple_operation"
5923 [(set (match_operand:SI 1 "s_register_operand" "=r")
5924 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5926 (set (mem:SI (match_dup 2))
5927 (match_operand:SI 3 "arm_hard_register_operand" ""))
5928 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5929 (match_operand:SI 4 "arm_hard_register_operand" ""))
5930 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5931 (match_operand:SI 5 "arm_hard_register_operand" ""))
5932 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5933 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5934 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5935 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5936 [(set_attr "predicable" "yes")
5937 (set_attr "type" "store4")]
5940 (define_insn "*stmsi_postinc4_thumb1"
5941 [(match_parallel 0 "store_multiple_operation"
5942 [(set (match_operand:SI 1 "s_register_operand" "=l")
5943 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5945 (set (mem:SI (match_dup 2))
5946 (match_operand:SI 3 "arm_hard_register_operand" ""))
5947 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5948 (match_operand:SI 4 "arm_hard_register_operand" ""))
5949 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5950 (match_operand:SI 5 "arm_hard_register_operand" ""))
5951 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5952 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5953 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5954 "stmia\\t%1!, {%3, %4, %5, %6}"
5955 [(set_attr "type" "store4")]
5958 (define_insn "*stmsi_postinc3"
5959 [(match_parallel 0 "store_multiple_operation"
5960 [(set (match_operand:SI 1 "s_register_operand" "=r")
5961 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5963 (set (mem:SI (match_dup 2))
5964 (match_operand:SI 3 "arm_hard_register_operand" ""))
5965 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5966 (match_operand:SI 4 "arm_hard_register_operand" ""))
5967 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5968 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
5969 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5970 "stm%(ia%)\\t%1!, {%3, %4, %5}"
5971 [(set_attr "predicable" "yes")
5972 (set_attr "type" "store3")]
5975 (define_insn "*stmsi_postinc2"
5976 [(match_parallel 0 "store_multiple_operation"
5977 [(set (match_operand:SI 1 "s_register_operand" "=r")
5978 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5980 (set (mem:SI (match_dup 2))
5981 (match_operand:SI 3 "arm_hard_register_operand" ""))
5982 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5983 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
5984 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5985 "stm%(ia%)\\t%1!, {%3, %4}"
5986 [(set_attr "predicable" "yes")
5987 (set_attr "type" "store2")]
5990 ;; Ordinary store multiple
5992 (define_insn "*stmsi4"
5993 [(match_parallel 0 "store_multiple_operation"
5994 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
5995 (match_operand:SI 2 "arm_hard_register_operand" ""))
5996 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
5997 (match_operand:SI 3 "arm_hard_register_operand" ""))
5998 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
5999 (match_operand:SI 4 "arm_hard_register_operand" ""))
6000 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6001 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6002 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6003 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6004 [(set_attr "predicable" "yes")
6005 (set_attr "type" "store4")]
6008 (define_insn "*stmsi3"
6009 [(match_parallel 0 "store_multiple_operation"
6010 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6011 (match_operand:SI 2 "arm_hard_register_operand" ""))
6012 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6013 (match_operand:SI 3 "arm_hard_register_operand" ""))
6014 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6015 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6016 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6017 "stm%(ia%)\\t%1, {%2, %3, %4}"
6018 [(set_attr "predicable" "yes")
6019 (set_attr "type" "store3")]
6022 (define_insn "*stmsi2"
6023 [(match_parallel 0 "store_multiple_operation"
6024 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6025 (match_operand:SI 2 "arm_hard_register_operand" ""))
6026 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6027 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6028 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6029 "stm%(ia%)\\t%1, {%2, %3}"
6030 [(set_attr "predicable" "yes")
6031 (set_attr "type" "store2")]
6034 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6035 ;; We could let this apply for blocks of less than this, but it clobbers so
6036 ;; many registers that there is then probably a better way.
6038 (define_expand "movmemqi"
6039 [(match_operand:BLK 0 "general_operand" "")
6040 (match_operand:BLK 1 "general_operand" "")
6041 (match_operand:SI 2 "const_int_operand" "")
6042 (match_operand:SI 3 "const_int_operand" "")]
6047 if (arm_gen_movmemqi (operands))
6051 else /* TARGET_THUMB1 */
6053 if ( INTVAL (operands[3]) != 4
6054 || INTVAL (operands[2]) > 48)
6057 thumb_expand_movmemqi (operands);
6063 ;; Thumb block-move insns
6065 (define_insn "movmem12b"
6066 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6067 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6068 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6069 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6070 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6071 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6072 (set (match_operand:SI 0 "register_operand" "=l")
6073 (plus:SI (match_dup 2) (const_int 12)))
6074 (set (match_operand:SI 1 "register_operand" "=l")
6075 (plus:SI (match_dup 3) (const_int 12)))
6076 (clobber (match_scratch:SI 4 "=&l"))
6077 (clobber (match_scratch:SI 5 "=&l"))
6078 (clobber (match_scratch:SI 6 "=&l"))]
6080 "* return thumb_output_move_mem_multiple (3, operands);"
6081 [(set_attr "length" "4")
6082 ; This isn't entirely accurate... It loads as well, but in terms of
6083 ; scheduling the following insn it is better to consider it as a store
6084 (set_attr "type" "store3")]
6087 (define_insn "movmem8b"
6088 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6089 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6090 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6091 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6092 (set (match_operand:SI 0 "register_operand" "=l")
6093 (plus:SI (match_dup 2) (const_int 8)))
6094 (set (match_operand:SI 1 "register_operand" "=l")
6095 (plus:SI (match_dup 3) (const_int 8)))
6096 (clobber (match_scratch:SI 4 "=&l"))
6097 (clobber (match_scratch:SI 5 "=&l"))]
6099 "* return thumb_output_move_mem_multiple (2, operands);"
6100 [(set_attr "length" "4")
6101 ; This isn't entirely accurate... It loads as well, but in terms of
6102 ; scheduling the following insn it is better to consider it as a store
6103 (set_attr "type" "store2")]
6108 ;; Compare & branch insns
6109 ;; The range calculations are based as follows:
6110 ;; For forward branches, the address calculation returns the address of
6111 ;; the next instruction. This is 2 beyond the branch instruction.
6112 ;; For backward branches, the address calculation returns the address of
6113 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6114 ;; instruction for the shortest sequence, and 4 before the branch instruction
6115 ;; if we have to jump around an unconditional branch.
6116 ;; To the basic branch range the PC offset must be added (this is +4).
6117 ;; So for forward branches we have
6118 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6119 ;; And for backward branches we have
6120 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6122 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6123 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6125 (define_expand "cbranchsi4"
6126 [(set (pc) (if_then_else
6127 (match_operator 0 "arm_comparison_operator"
6128 [(match_operand:SI 1 "s_register_operand" "")
6129 (match_operand:SI 2 "nonmemory_operand" "")])
6130 (label_ref (match_operand 3 "" ""))
6134 if (thumb1_cmpneg_operand (operands[2], SImode))
6136 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6137 operands[3], operands[0]));
6140 if (!thumb1_cmp_operand (operands[2], SImode))
6141 operands[2] = force_reg (SImode, operands[2]);
6144 (define_insn "*cbranchsi4_insn"
6145 [(set (pc) (if_then_else
6146 (match_operator 0 "arm_comparison_operator"
6147 [(match_operand:SI 1 "s_register_operand" "l,*h")
6148 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6149 (label_ref (match_operand 3 "" ""))
6153 output_asm_insn (\"cmp\\t%1, %2\", operands);
6155 switch (get_attr_length (insn))
6157 case 4: return \"b%d0\\t%l3\";
6158 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6159 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6162 [(set (attr "far_jump")
6164 (eq_attr "length" "8")
6165 (const_string "yes")
6166 (const_string "no")))
6167 (set (attr "length")
6169 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6170 (le (minus (match_dup 3) (pc)) (const_int 256)))
6173 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6174 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6179 (define_insn "cbranchsi4_scratch"
6180 [(set (pc) (if_then_else
6181 (match_operator 4 "arm_comparison_operator"
6182 [(match_operand:SI 1 "s_register_operand" "l,0")
6183 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6184 (label_ref (match_operand 3 "" ""))
6186 (clobber (match_scratch:SI 0 "=l,l"))]
6189 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6191 switch (get_attr_length (insn))
6193 case 4: return \"b%d4\\t%l3\";
6194 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6195 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6198 [(set (attr "far_jump")
6200 (eq_attr "length" "8")
6201 (const_string "yes")
6202 (const_string "no")))
6203 (set (attr "length")
6205 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6206 (le (minus (match_dup 3) (pc)) (const_int 256)))
6209 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6210 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6214 (define_insn "*movsi_cbranchsi4"
6217 (match_operator 3 "arm_comparison_operator"
6218 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6220 (label_ref (match_operand 2 "" ""))
6222 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6226 if (which_alternative == 0)
6227 output_asm_insn (\"cmp\t%0, #0\", operands);
6228 else if (which_alternative == 1)
6229 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6232 output_asm_insn (\"cmp\t%1, #0\", operands);
6233 if (which_alternative == 2)
6234 output_asm_insn (\"mov\t%0, %1\", operands);
6236 output_asm_insn (\"str\t%1, %0\", operands);
6238 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6240 case 4: return \"b%d3\\t%l2\";
6241 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6242 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6245 [(set (attr "far_jump")
6247 (ior (and (gt (symbol_ref ("which_alternative"))
6249 (eq_attr "length" "8"))
6250 (eq_attr "length" "10"))
6251 (const_string "yes")
6252 (const_string "no")))
6253 (set (attr "length")
6255 (le (symbol_ref ("which_alternative"))
6258 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6259 (le (minus (match_dup 2) (pc)) (const_int 256)))
6262 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6263 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6267 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6268 (le (minus (match_dup 2) (pc)) (const_int 256)))
6271 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6272 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6277 (define_insn "*negated_cbranchsi4"
6280 (match_operator 0 "equality_operator"
6281 [(match_operand:SI 1 "s_register_operand" "l")
6282 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6283 (label_ref (match_operand 3 "" ""))
6287 output_asm_insn (\"cmn\\t%1, %2\", operands);
6288 switch (get_attr_length (insn))
6290 case 4: return \"b%d0\\t%l3\";
6291 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6292 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6295 [(set (attr "far_jump")
6297 (eq_attr "length" "8")
6298 (const_string "yes")
6299 (const_string "no")))
6300 (set (attr "length")
6302 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6303 (le (minus (match_dup 3) (pc)) (const_int 256)))
6306 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6307 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6312 (define_insn "*tbit_cbranch"
6315 (match_operator 0 "equality_operator"
6316 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6318 (match_operand:SI 2 "const_int_operand" "i"))
6320 (label_ref (match_operand 3 "" ""))
6322 (clobber (match_scratch:SI 4 "=l"))]
6327 op[0] = operands[4];
6328 op[1] = operands[1];
6329 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6331 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6332 switch (get_attr_length (insn))
6334 case 4: return \"b%d0\\t%l3\";
6335 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6336 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6339 [(set (attr "far_jump")
6341 (eq_attr "length" "8")
6342 (const_string "yes")
6343 (const_string "no")))
6344 (set (attr "length")
6346 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6347 (le (minus (match_dup 3) (pc)) (const_int 256)))
6350 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6351 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6356 (define_insn "*tlobits_cbranch"
6359 (match_operator 0 "equality_operator"
6360 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6361 (match_operand:SI 2 "const_int_operand" "i")
6364 (label_ref (match_operand 3 "" ""))
6366 (clobber (match_scratch:SI 4 "=l"))]
6371 op[0] = operands[4];
6372 op[1] = operands[1];
6373 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6375 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6376 switch (get_attr_length (insn))
6378 case 4: return \"b%d0\\t%l3\";
6379 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6380 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6383 [(set (attr "far_jump")
6385 (eq_attr "length" "8")
6386 (const_string "yes")
6387 (const_string "no")))
6388 (set (attr "length")
6390 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6391 (le (minus (match_dup 3) (pc)) (const_int 256)))
6394 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6395 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6400 (define_insn "*tstsi3_cbranch"
6403 (match_operator 3 "equality_operator"
6404 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6405 (match_operand:SI 1 "s_register_operand" "l"))
6407 (label_ref (match_operand 2 "" ""))
6412 output_asm_insn (\"tst\\t%0, %1\", operands);
6413 switch (get_attr_length (insn))
6415 case 4: return \"b%d3\\t%l2\";
6416 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6417 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6420 [(set (attr "far_jump")
6422 (eq_attr "length" "8")
6423 (const_string "yes")
6424 (const_string "no")))
6425 (set (attr "length")
6427 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6428 (le (minus (match_dup 2) (pc)) (const_int 256)))
6431 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6432 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6437 (define_insn "*andsi3_cbranch"
6440 (match_operator 5 "equality_operator"
6441 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6442 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6444 (label_ref (match_operand 4 "" ""))
6446 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6447 (and:SI (match_dup 2) (match_dup 3)))
6448 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6452 if (which_alternative == 0)
6453 output_asm_insn (\"and\\t%0, %3\", operands);
6454 else if (which_alternative == 1)
6456 output_asm_insn (\"and\\t%1, %3\", operands);
6457 output_asm_insn (\"mov\\t%0, %1\", operands);
6461 output_asm_insn (\"and\\t%1, %3\", operands);
6462 output_asm_insn (\"str\\t%1, %0\", operands);
6465 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6467 case 4: return \"b%d5\\t%l4\";
6468 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6469 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6472 [(set (attr "far_jump")
6474 (ior (and (eq (symbol_ref ("which_alternative"))
6476 (eq_attr "length" "8"))
6477 (eq_attr "length" "10"))
6478 (const_string "yes")
6479 (const_string "no")))
6480 (set (attr "length")
6482 (eq (symbol_ref ("which_alternative"))
6485 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6486 (le (minus (match_dup 4) (pc)) (const_int 256)))
6489 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6490 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6494 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6495 (le (minus (match_dup 4) (pc)) (const_int 256)))
6498 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6499 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6504 (define_insn "*orrsi3_cbranch_scratch"
6507 (match_operator 4 "equality_operator"
6508 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6509 (match_operand:SI 2 "s_register_operand" "l"))
6511 (label_ref (match_operand 3 "" ""))
6513 (clobber (match_scratch:SI 0 "=l"))]
6517 output_asm_insn (\"orr\\t%0, %2\", operands);
6518 switch (get_attr_length (insn))
6520 case 4: return \"b%d4\\t%l3\";
6521 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6522 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6525 [(set (attr "far_jump")
6527 (eq_attr "length" "8")
6528 (const_string "yes")
6529 (const_string "no")))
6530 (set (attr "length")
6532 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6533 (le (minus (match_dup 3) (pc)) (const_int 256)))
6536 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6537 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6542 (define_insn "*orrsi3_cbranch"
6545 (match_operator 5 "equality_operator"
6546 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6547 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6549 (label_ref (match_operand 4 "" ""))
6551 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6552 (ior:SI (match_dup 2) (match_dup 3)))
6553 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6557 if (which_alternative == 0)
6558 output_asm_insn (\"orr\\t%0, %3\", operands);
6559 else if (which_alternative == 1)
6561 output_asm_insn (\"orr\\t%1, %3\", operands);
6562 output_asm_insn (\"mov\\t%0, %1\", operands);
6566 output_asm_insn (\"orr\\t%1, %3\", operands);
6567 output_asm_insn (\"str\\t%1, %0\", operands);
6570 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6572 case 4: return \"b%d5\\t%l4\";
6573 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6574 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6577 [(set (attr "far_jump")
6579 (ior (and (eq (symbol_ref ("which_alternative"))
6581 (eq_attr "length" "8"))
6582 (eq_attr "length" "10"))
6583 (const_string "yes")
6584 (const_string "no")))
6585 (set (attr "length")
6587 (eq (symbol_ref ("which_alternative"))
6590 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6591 (le (minus (match_dup 4) (pc)) (const_int 256)))
6594 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6595 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6599 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6600 (le (minus (match_dup 4) (pc)) (const_int 256)))
6603 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6604 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6609 (define_insn "*xorsi3_cbranch_scratch"
6612 (match_operator 4 "equality_operator"
6613 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6614 (match_operand:SI 2 "s_register_operand" "l"))
6616 (label_ref (match_operand 3 "" ""))
6618 (clobber (match_scratch:SI 0 "=l"))]
6622 output_asm_insn (\"eor\\t%0, %2\", operands);
6623 switch (get_attr_length (insn))
6625 case 4: return \"b%d4\\t%l3\";
6626 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6627 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6630 [(set (attr "far_jump")
6632 (eq_attr "length" "8")
6633 (const_string "yes")
6634 (const_string "no")))
6635 (set (attr "length")
6637 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6638 (le (minus (match_dup 3) (pc)) (const_int 256)))
6641 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6642 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6647 (define_insn "*xorsi3_cbranch"
6650 (match_operator 5 "equality_operator"
6651 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6652 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6654 (label_ref (match_operand 4 "" ""))
6656 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6657 (xor:SI (match_dup 2) (match_dup 3)))
6658 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6662 if (which_alternative == 0)
6663 output_asm_insn (\"eor\\t%0, %3\", operands);
6664 else if (which_alternative == 1)
6666 output_asm_insn (\"eor\\t%1, %3\", operands);
6667 output_asm_insn (\"mov\\t%0, %1\", operands);
6671 output_asm_insn (\"eor\\t%1, %3\", operands);
6672 output_asm_insn (\"str\\t%1, %0\", operands);
6675 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6677 case 4: return \"b%d5\\t%l4\";
6678 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6679 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6682 [(set (attr "far_jump")
6684 (ior (and (eq (symbol_ref ("which_alternative"))
6686 (eq_attr "length" "8"))
6687 (eq_attr "length" "10"))
6688 (const_string "yes")
6689 (const_string "no")))
6690 (set (attr "length")
6692 (eq (symbol_ref ("which_alternative"))
6695 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6696 (le (minus (match_dup 4) (pc)) (const_int 256)))
6699 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6700 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6704 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6705 (le (minus (match_dup 4) (pc)) (const_int 256)))
6708 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6709 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6714 (define_insn "*bicsi3_cbranch_scratch"
6717 (match_operator 4 "equality_operator"
6718 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6719 (match_operand:SI 1 "s_register_operand" "0"))
6721 (label_ref (match_operand 3 "" ""))
6723 (clobber (match_scratch:SI 0 "=l"))]
6727 output_asm_insn (\"bic\\t%0, %2\", operands);
6728 switch (get_attr_length (insn))
6730 case 4: return \"b%d4\\t%l3\";
6731 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6732 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6735 [(set (attr "far_jump")
6737 (eq_attr "length" "8")
6738 (const_string "yes")
6739 (const_string "no")))
6740 (set (attr "length")
6742 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6743 (le (minus (match_dup 3) (pc)) (const_int 256)))
6746 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6747 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6752 (define_insn "*bicsi3_cbranch"
6755 (match_operator 5 "equality_operator"
6756 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6757 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6759 (label_ref (match_operand 4 "" ""))
6761 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6762 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6763 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6767 if (which_alternative == 0)
6768 output_asm_insn (\"bic\\t%0, %3\", operands);
6769 else if (which_alternative <= 2)
6771 output_asm_insn (\"bic\\t%1, %3\", operands);
6772 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6773 conditions again, since we're only testing for equality. */
6774 output_asm_insn (\"mov\\t%0, %1\", operands);
6778 output_asm_insn (\"bic\\t%1, %3\", operands);
6779 output_asm_insn (\"str\\t%1, %0\", operands);
6782 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6784 case 4: return \"b%d5\\t%l4\";
6785 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6786 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6789 [(set (attr "far_jump")
6791 (ior (and (eq (symbol_ref ("which_alternative"))
6793 (eq_attr "length" "8"))
6794 (eq_attr "length" "10"))
6795 (const_string "yes")
6796 (const_string "no")))
6797 (set (attr "length")
6799 (eq (symbol_ref ("which_alternative"))
6802 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6803 (le (minus (match_dup 4) (pc)) (const_int 256)))
6806 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6807 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6811 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6812 (le (minus (match_dup 4) (pc)) (const_int 256)))
6815 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6816 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6821 (define_insn "*cbranchne_decr1"
6823 (if_then_else (match_operator 3 "equality_operator"
6824 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6826 (label_ref (match_operand 4 "" ""))
6828 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6829 (plus:SI (match_dup 2) (const_int -1)))
6830 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6835 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6837 VOIDmode, operands[2], const1_rtx);
6838 cond[1] = operands[4];
6840 if (which_alternative == 0)
6841 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6842 else if (which_alternative == 1)
6844 /* We must provide an alternative for a hi reg because reload
6845 cannot handle output reloads on a jump instruction, but we
6846 can't subtract into that. Fortunately a mov from lo to hi
6847 does not clobber the condition codes. */
6848 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6849 output_asm_insn (\"mov\\t%0, %1\", operands);
6853 /* Similarly, but the target is memory. */
6854 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6855 output_asm_insn (\"str\\t%1, %0\", operands);
6858 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6861 output_asm_insn (\"b%d0\\t%l1\", cond);
6864 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6865 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6867 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6868 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6872 [(set (attr "far_jump")
6874 (ior (and (eq (symbol_ref ("which_alternative"))
6876 (eq_attr "length" "8"))
6877 (eq_attr "length" "10"))
6878 (const_string "yes")
6879 (const_string "no")))
6880 (set_attr_alternative "length"
6884 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6885 (le (minus (match_dup 4) (pc)) (const_int 256)))
6888 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6889 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6894 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6895 (le (minus (match_dup 4) (pc)) (const_int 256)))
6898 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6899 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6904 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6905 (le (minus (match_dup 4) (pc)) (const_int 256)))
6908 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6909 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6914 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6915 (le (minus (match_dup 4) (pc)) (const_int 256)))
6918 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6919 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6924 (define_insn "*addsi3_cbranch"
6927 (match_operator 4 "comparison_operator"
6929 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
6930 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
6932 (label_ref (match_operand 5 "" ""))
6935 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6936 (plus:SI (match_dup 2) (match_dup 3)))
6937 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
6939 && (GET_CODE (operands[4]) == EQ
6940 || GET_CODE (operands[4]) == NE
6941 || GET_CODE (operands[4]) == GE
6942 || GET_CODE (operands[4]) == LT)"
6948 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
6949 cond[1] = operands[2];
6950 cond[2] = operands[3];
6952 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6953 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6955 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6957 if (which_alternative >= 3
6958 && which_alternative < 4)
6959 output_asm_insn (\"mov\\t%0, %1\", operands);
6960 else if (which_alternative >= 4)
6961 output_asm_insn (\"str\\t%1, %0\", operands);
6963 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
6966 return \"b%d4\\t%l5\";
6968 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6970 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6974 [(set (attr "far_jump")
6976 (ior (and (lt (symbol_ref ("which_alternative"))
6978 (eq_attr "length" "8"))
6979 (eq_attr "length" "10"))
6980 (const_string "yes")
6981 (const_string "no")))
6982 (set (attr "length")
6984 (lt (symbol_ref ("which_alternative"))
6987 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
6988 (le (minus (match_dup 5) (pc)) (const_int 256)))
6991 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
6992 (le (minus (match_dup 5) (pc)) (const_int 2048)))
6996 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
6997 (le (minus (match_dup 5) (pc)) (const_int 256)))
7000 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7001 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7006 (define_insn "*addsi3_cbranch_scratch"
7009 (match_operator 3 "comparison_operator"
7011 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7012 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7014 (label_ref (match_operand 4 "" ""))
7016 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7018 && (GET_CODE (operands[3]) == EQ
7019 || GET_CODE (operands[3]) == NE
7020 || GET_CODE (operands[3]) == GE
7021 || GET_CODE (operands[3]) == LT)"
7024 switch (which_alternative)
7027 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7030 output_asm_insn (\"cmn\t%1, %2\", operands);
7033 if (INTVAL (operands[2]) < 0)
7034 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7036 output_asm_insn (\"add\t%0, %1, %2\", operands);
7039 if (INTVAL (operands[2]) < 0)
7040 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7042 output_asm_insn (\"add\t%0, %0, %2\", operands);
7046 switch (get_attr_length (insn))
7049 return \"b%d3\\t%l4\";
7051 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7053 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7057 [(set (attr "far_jump")
7059 (eq_attr "length" "8")
7060 (const_string "yes")
7061 (const_string "no")))
7062 (set (attr "length")
7064 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7065 (le (minus (match_dup 4) (pc)) (const_int 256)))
7068 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7069 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7074 (define_insn "*subsi3_cbranch"
7077 (match_operator 4 "comparison_operator"
7079 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7080 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7082 (label_ref (match_operand 5 "" ""))
7084 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7085 (minus:SI (match_dup 2) (match_dup 3)))
7086 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7088 && (GET_CODE (operands[4]) == EQ
7089 || GET_CODE (operands[4]) == NE
7090 || GET_CODE (operands[4]) == GE
7091 || GET_CODE (operands[4]) == LT)"
7094 if (which_alternative == 0)
7095 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7096 else if (which_alternative == 1)
7098 /* We must provide an alternative for a hi reg because reload
7099 cannot handle output reloads on a jump instruction, but we
7100 can't subtract into that. Fortunately a mov from lo to hi
7101 does not clobber the condition codes. */
7102 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7103 output_asm_insn (\"mov\\t%0, %1\", operands);
7107 /* Similarly, but the target is memory. */
7108 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7109 output_asm_insn (\"str\\t%1, %0\", operands);
7112 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7115 return \"b%d4\\t%l5\";
7117 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7119 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7123 [(set (attr "far_jump")
7125 (ior (and (eq (symbol_ref ("which_alternative"))
7127 (eq_attr "length" "8"))
7128 (eq_attr "length" "10"))
7129 (const_string "yes")
7130 (const_string "no")))
7131 (set (attr "length")
7133 (eq (symbol_ref ("which_alternative"))
7136 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7137 (le (minus (match_dup 5) (pc)) (const_int 256)))
7140 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7141 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7145 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7146 (le (minus (match_dup 5) (pc)) (const_int 256)))
7149 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7150 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7155 (define_insn "*subsi3_cbranch_scratch"
7158 (match_operator 0 "arm_comparison_operator"
7159 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7160 (match_operand:SI 2 "nonmemory_operand" "l"))
7162 (label_ref (match_operand 3 "" ""))
7165 && (GET_CODE (operands[0]) == EQ
7166 || GET_CODE (operands[0]) == NE
7167 || GET_CODE (operands[0]) == GE
7168 || GET_CODE (operands[0]) == LT)"
7170 output_asm_insn (\"cmp\\t%1, %2\", operands);
7171 switch (get_attr_length (insn))
7173 case 4: return \"b%d0\\t%l3\";
7174 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7175 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7178 [(set (attr "far_jump")
7180 (eq_attr "length" "8")
7181 (const_string "yes")
7182 (const_string "no")))
7183 (set (attr "length")
7185 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7186 (le (minus (match_dup 3) (pc)) (const_int 256)))
7189 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7190 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7195 ;; Comparison and test insns
7197 (define_expand "cmpsi"
7198 [(match_operand:SI 0 "s_register_operand" "")
7199 (match_operand:SI 1 "arm_add_operand" "")]
7202 arm_compare_op0 = operands[0];
7203 arm_compare_op1 = operands[1];
7208 (define_expand "cmpsf"
7209 [(match_operand:SF 0 "s_register_operand" "")
7210 (match_operand:SF 1 "arm_float_compare_operand" "")]
7211 "TARGET_32BIT && TARGET_HARD_FLOAT"
7213 arm_compare_op0 = operands[0];
7214 arm_compare_op1 = operands[1];
7219 (define_expand "cmpdf"
7220 [(match_operand:DF 0 "s_register_operand" "")
7221 (match_operand:DF 1 "arm_float_compare_operand" "")]
7222 "TARGET_32BIT && TARGET_HARD_FLOAT"
7224 arm_compare_op0 = operands[0];
7225 arm_compare_op1 = operands[1];
7230 (define_insn "*arm_cmpsi_insn"
7231 [(set (reg:CC CC_REGNUM)
7232 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7233 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7238 [(set_attr "conds" "set")]
7241 (define_insn "*arm_cmpsi_shiftsi"
7242 [(set (reg:CC CC_REGNUM)
7243 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7244 (match_operator:SI 3 "shift_operator"
7245 [(match_operand:SI 1 "s_register_operand" "r")
7246 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7249 [(set_attr "conds" "set")
7250 (set_attr "shift" "1")
7251 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7252 (const_string "alu_shift")
7253 (const_string "alu_shift_reg")))]
7256 (define_insn "*arm_cmpsi_shiftsi_swp"
7257 [(set (reg:CC_SWP CC_REGNUM)
7258 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7259 [(match_operand:SI 1 "s_register_operand" "r")
7260 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7261 (match_operand:SI 0 "s_register_operand" "r")))]
7264 [(set_attr "conds" "set")
7265 (set_attr "shift" "1")
7266 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7267 (const_string "alu_shift")
7268 (const_string "alu_shift_reg")))]
7271 (define_insn "*arm_cmpsi_negshiftsi_si"
7272 [(set (reg:CC_Z CC_REGNUM)
7274 (neg:SI (match_operator:SI 1 "shift_operator"
7275 [(match_operand:SI 2 "s_register_operand" "r")
7276 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7277 (match_operand:SI 0 "s_register_operand" "r")))]
7280 [(set_attr "conds" "set")
7281 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7282 (const_string "alu_shift")
7283 (const_string "alu_shift_reg")))]
7286 ;; Cirrus SF compare instruction
7287 (define_insn "*cirrus_cmpsf"
7288 [(set (reg:CCFP CC_REGNUM)
7289 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7290 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7291 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7292 "cfcmps%?\\tr15, %V0, %V1"
7293 [(set_attr "type" "mav_farith")
7294 (set_attr "cirrus" "compare")]
7297 ;; Cirrus DF compare instruction
7298 (define_insn "*cirrus_cmpdf"
7299 [(set (reg:CCFP CC_REGNUM)
7300 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7301 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7302 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7303 "cfcmpd%?\\tr15, %V0, %V1"
7304 [(set_attr "type" "mav_farith")
7305 (set_attr "cirrus" "compare")]
7308 ;; Cirrus DI compare instruction
7309 (define_expand "cmpdi"
7310 [(match_operand:DI 0 "cirrus_fp_register" "")
7311 (match_operand:DI 1 "cirrus_fp_register" "")]
7312 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7314 arm_compare_op0 = operands[0];
7315 arm_compare_op1 = operands[1];
7319 (define_insn "*cirrus_cmpdi"
7320 [(set (reg:CC CC_REGNUM)
7321 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7322 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7323 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7324 "cfcmp64%?\\tr15, %V0, %V1"
7325 [(set_attr "type" "mav_farith")
7326 (set_attr "cirrus" "compare")]
7329 ; This insn allows redundant compares to be removed by cse, nothing should
7330 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7331 ; is deleted later on. The match_dup will match the mode here, so that
7332 ; mode changes of the condition codes aren't lost by this even though we don't
7333 ; specify what they are.
7335 (define_insn "*deleted_compare"
7336 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7338 "\\t%@ deleted compare"
7339 [(set_attr "conds" "set")
7340 (set_attr "length" "0")]
7344 ;; Conditional branch insns
7346 (define_expand "beq"
7348 (if_then_else (eq (match_dup 1) (const_int 0))
7349 (label_ref (match_operand 0 "" ""))
7352 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7355 (define_expand "bne"
7357 (if_then_else (ne (match_dup 1) (const_int 0))
7358 (label_ref (match_operand 0 "" ""))
7361 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7364 (define_expand "bgt"
7366 (if_then_else (gt (match_dup 1) (const_int 0))
7367 (label_ref (match_operand 0 "" ""))
7370 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7373 (define_expand "ble"
7375 (if_then_else (le (match_dup 1) (const_int 0))
7376 (label_ref (match_operand 0 "" ""))
7379 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7382 (define_expand "bge"
7384 (if_then_else (ge (match_dup 1) (const_int 0))
7385 (label_ref (match_operand 0 "" ""))
7388 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7391 (define_expand "blt"
7393 (if_then_else (lt (match_dup 1) (const_int 0))
7394 (label_ref (match_operand 0 "" ""))
7397 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7400 (define_expand "bgtu"
7402 (if_then_else (gtu (match_dup 1) (const_int 0))
7403 (label_ref (match_operand 0 "" ""))
7406 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7409 (define_expand "bleu"
7411 (if_then_else (leu (match_dup 1) (const_int 0))
7412 (label_ref (match_operand 0 "" ""))
7415 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7418 (define_expand "bgeu"
7420 (if_then_else (geu (match_dup 1) (const_int 0))
7421 (label_ref (match_operand 0 "" ""))
7424 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7427 (define_expand "bltu"
7429 (if_then_else (ltu (match_dup 1) (const_int 0))
7430 (label_ref (match_operand 0 "" ""))
7433 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7436 (define_expand "bunordered"
7438 (if_then_else (unordered (match_dup 1) (const_int 0))
7439 (label_ref (match_operand 0 "" ""))
7441 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7442 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7446 (define_expand "bordered"
7448 (if_then_else (ordered (match_dup 1) (const_int 0))
7449 (label_ref (match_operand 0 "" ""))
7451 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7452 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7456 (define_expand "bungt"
7458 (if_then_else (ungt (match_dup 1) (const_int 0))
7459 (label_ref (match_operand 0 "" ""))
7461 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7462 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7465 (define_expand "bunlt"
7467 (if_then_else (unlt (match_dup 1) (const_int 0))
7468 (label_ref (match_operand 0 "" ""))
7470 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7471 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7474 (define_expand "bunge"
7476 (if_then_else (unge (match_dup 1) (const_int 0))
7477 (label_ref (match_operand 0 "" ""))
7479 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7480 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7483 (define_expand "bunle"
7485 (if_then_else (unle (match_dup 1) (const_int 0))
7486 (label_ref (match_operand 0 "" ""))
7488 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7489 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7492 ;; The following two patterns need two branch instructions, since there is
7493 ;; no single instruction that will handle all cases.
7494 (define_expand "buneq"
7496 (if_then_else (uneq (match_dup 1) (const_int 0))
7497 (label_ref (match_operand 0 "" ""))
7499 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7500 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7503 (define_expand "bltgt"
7505 (if_then_else (ltgt (match_dup 1) (const_int 0))
7506 (label_ref (match_operand 0 "" ""))
7508 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7509 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7513 ;; Patterns to match conditional branch insns.
7516 ; Special pattern to match UNEQ.
7517 (define_insn "*arm_buneq"
7519 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7520 (label_ref (match_operand 0 "" ""))
7522 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7524 gcc_assert (!arm_ccfsm_state);
7526 return \"bvs\\t%l0\;beq\\t%l0\";
7528 [(set_attr "conds" "jump_clob")
7529 (set_attr "length" "8")]
7532 ; Special pattern to match LTGT.
7533 (define_insn "*arm_bltgt"
7535 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7536 (label_ref (match_operand 0 "" ""))
7538 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7540 gcc_assert (!arm_ccfsm_state);
7542 return \"bmi\\t%l0\;bgt\\t%l0\";
7544 [(set_attr "conds" "jump_clob")
7545 (set_attr "length" "8")]
7548 (define_insn "*arm_cond_branch"
7550 (if_then_else (match_operator 1 "arm_comparison_operator"
7551 [(match_operand 2 "cc_register" "") (const_int 0)])
7552 (label_ref (match_operand 0 "" ""))
7556 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7558 arm_ccfsm_state += 2;
7561 return \"b%d1\\t%l0\";
7563 [(set_attr "conds" "use")
7564 (set_attr "type" "branch")]
7567 ; Special pattern to match reversed UNEQ.
7568 (define_insn "*arm_buneq_reversed"
7570 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7572 (label_ref (match_operand 0 "" ""))))]
7573 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7575 gcc_assert (!arm_ccfsm_state);
7577 return \"bmi\\t%l0\;bgt\\t%l0\";
7579 [(set_attr "conds" "jump_clob")
7580 (set_attr "length" "8")]
7583 ; Special pattern to match reversed LTGT.
7584 (define_insn "*arm_bltgt_reversed"
7586 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7588 (label_ref (match_operand 0 "" ""))))]
7589 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7591 gcc_assert (!arm_ccfsm_state);
7593 return \"bvs\\t%l0\;beq\\t%l0\";
7595 [(set_attr "conds" "jump_clob")
7596 (set_attr "length" "8")]
7599 (define_insn "*arm_cond_branch_reversed"
7601 (if_then_else (match_operator 1 "arm_comparison_operator"
7602 [(match_operand 2 "cc_register" "") (const_int 0)])
7604 (label_ref (match_operand 0 "" ""))))]
7607 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7609 arm_ccfsm_state += 2;
7612 return \"b%D1\\t%l0\";
7614 [(set_attr "conds" "use")
7615 (set_attr "type" "branch")]
7622 (define_expand "seq"
7623 [(set (match_operand:SI 0 "s_register_operand" "")
7624 (eq:SI (match_dup 1) (const_int 0)))]
7626 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7629 (define_expand "sne"
7630 [(set (match_operand:SI 0 "s_register_operand" "")
7631 (ne:SI (match_dup 1) (const_int 0)))]
7633 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7636 (define_expand "sgt"
7637 [(set (match_operand:SI 0 "s_register_operand" "")
7638 (gt:SI (match_dup 1) (const_int 0)))]
7640 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7643 (define_expand "sle"
7644 [(set (match_operand:SI 0 "s_register_operand" "")
7645 (le:SI (match_dup 1) (const_int 0)))]
7647 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7650 (define_expand "sge"
7651 [(set (match_operand:SI 0 "s_register_operand" "")
7652 (ge:SI (match_dup 1) (const_int 0)))]
7654 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7657 (define_expand "slt"
7658 [(set (match_operand:SI 0 "s_register_operand" "")
7659 (lt:SI (match_dup 1) (const_int 0)))]
7661 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7664 (define_expand "sgtu"
7665 [(set (match_operand:SI 0 "s_register_operand" "")
7666 (gtu:SI (match_dup 1) (const_int 0)))]
7668 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7671 (define_expand "sleu"
7672 [(set (match_operand:SI 0 "s_register_operand" "")
7673 (leu:SI (match_dup 1) (const_int 0)))]
7675 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7678 (define_expand "sgeu"
7679 [(set (match_operand:SI 0 "s_register_operand" "")
7680 (geu:SI (match_dup 1) (const_int 0)))]
7682 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7685 (define_expand "sltu"
7686 [(set (match_operand:SI 0 "s_register_operand" "")
7687 (ltu:SI (match_dup 1) (const_int 0)))]
7689 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7692 (define_expand "sunordered"
7693 [(set (match_operand:SI 0 "s_register_operand" "")
7694 (unordered:SI (match_dup 1) (const_int 0)))]
7695 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7696 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7700 (define_expand "sordered"
7701 [(set (match_operand:SI 0 "s_register_operand" "")
7702 (ordered:SI (match_dup 1) (const_int 0)))]
7703 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7704 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7708 (define_expand "sungt"
7709 [(set (match_operand:SI 0 "s_register_operand" "")
7710 (ungt:SI (match_dup 1) (const_int 0)))]
7711 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7712 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7716 (define_expand "sunge"
7717 [(set (match_operand:SI 0 "s_register_operand" "")
7718 (unge:SI (match_dup 1) (const_int 0)))]
7719 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7720 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7724 (define_expand "sunlt"
7725 [(set (match_operand:SI 0 "s_register_operand" "")
7726 (unlt:SI (match_dup 1) (const_int 0)))]
7727 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7728 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7732 (define_expand "sunle"
7733 [(set (match_operand:SI 0 "s_register_operand" "")
7734 (unle:SI (match_dup 1) (const_int 0)))]
7735 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7736 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7740 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7741 ;;; simple ARM instructions.
7743 ; (define_expand "suneq"
7744 ; [(set (match_operand:SI 0 "s_register_operand" "")
7745 ; (uneq:SI (match_dup 1) (const_int 0)))]
7746 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7747 ; "gcc_unreachable ();"
7750 ; (define_expand "sltgt"
7751 ; [(set (match_operand:SI 0 "s_register_operand" "")
7752 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7753 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7754 ; "gcc_unreachable ();"
7757 (define_insn "*mov_scc"
7758 [(set (match_operand:SI 0 "s_register_operand" "=r")
7759 (match_operator:SI 1 "arm_comparison_operator"
7760 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7762 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7763 [(set_attr "conds" "use")
7764 (set_attr "length" "8")]
7767 (define_insn "*mov_negscc"
7768 [(set (match_operand:SI 0 "s_register_operand" "=r")
7769 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7770 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7772 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7773 [(set_attr "conds" "use")
7774 (set_attr "length" "8")]
7777 (define_insn "*mov_notscc"
7778 [(set (match_operand:SI 0 "s_register_operand" "=r")
7779 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7780 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7782 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7783 [(set_attr "conds" "use")
7784 (set_attr "length" "8")]
7787 (define_expand "cstoresi4"
7788 [(set (match_operand:SI 0 "s_register_operand" "")
7789 (match_operator:SI 1 "arm_comparison_operator"
7790 [(match_operand:SI 2 "s_register_operand" "")
7791 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7794 rtx op3, scratch, scratch2;
7796 if (operands[3] == const0_rtx)
7798 switch (GET_CODE (operands[1]))
7801 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7805 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7809 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7810 NULL_RTX, 0, OPTAB_WIDEN);
7811 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7812 NULL_RTX, 0, OPTAB_WIDEN);
7813 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7814 operands[0], 1, OPTAB_WIDEN);
7818 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7820 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7821 NULL_RTX, 1, OPTAB_WIDEN);
7825 scratch = expand_binop (SImode, ashr_optab, operands[2],
7826 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7827 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7828 NULL_RTX, 0, OPTAB_WIDEN);
7829 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7833 /* LT is handled by generic code. No need for unsigned with 0. */
7840 switch (GET_CODE (operands[1]))
7843 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7844 NULL_RTX, 0, OPTAB_WIDEN);
7845 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7849 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7850 NULL_RTX, 0, OPTAB_WIDEN);
7851 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7855 op3 = force_reg (SImode, operands[3]);
7857 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7858 NULL_RTX, 1, OPTAB_WIDEN);
7859 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7860 NULL_RTX, 0, OPTAB_WIDEN);
7861 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7867 if (!thumb1_cmp_operand (op3, SImode))
7868 op3 = force_reg (SImode, op3);
7869 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7870 NULL_RTX, 0, OPTAB_WIDEN);
7871 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7872 NULL_RTX, 1, OPTAB_WIDEN);
7873 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7878 op3 = force_reg (SImode, operands[3]);
7879 scratch = force_reg (SImode, const0_rtx);
7880 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7886 if (!thumb1_cmp_operand (op3, SImode))
7887 op3 = force_reg (SImode, op3);
7888 scratch = force_reg (SImode, const0_rtx);
7889 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7895 if (!thumb1_cmp_operand (op3, SImode))
7896 op3 = force_reg (SImode, op3);
7897 scratch = gen_reg_rtx (SImode);
7898 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
7899 emit_insn (gen_negsi2 (operands[0], scratch));
7903 op3 = force_reg (SImode, operands[3]);
7904 scratch = gen_reg_rtx (SImode);
7905 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
7906 emit_insn (gen_negsi2 (operands[0], scratch));
7909 /* No good sequences for GT, LT. */
7916 (define_expand "cstoresi_eq0_thumb1"
7918 [(set (match_operand:SI 0 "s_register_operand" "")
7919 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7921 (clobber (match_dup:SI 2))])]
7923 "operands[2] = gen_reg_rtx (SImode);"
7926 (define_expand "cstoresi_ne0_thumb1"
7928 [(set (match_operand:SI 0 "s_register_operand" "")
7929 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7931 (clobber (match_dup:SI 2))])]
7933 "operands[2] = gen_reg_rtx (SImode);"
7936 (define_insn "*cstoresi_eq0_thumb1_insn"
7937 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7938 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7940 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7943 neg\\t%0, %1\;adc\\t%0, %0, %1
7944 neg\\t%2, %1\;adc\\t%0, %1, %2"
7945 [(set_attr "length" "4")]
7948 (define_insn "*cstoresi_ne0_thumb1_insn"
7949 [(set (match_operand:SI 0 "s_register_operand" "=l")
7950 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7952 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7954 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7955 [(set_attr "length" "4")]
7958 (define_insn "cstoresi_nltu_thumb1"
7959 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7960 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7961 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7963 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7964 [(set_attr "length" "4")]
7967 ;; Used as part of the expansion of thumb les sequence.
7968 (define_insn "thumb1_addsi3_addgeu"
7969 [(set (match_operand:SI 0 "s_register_operand" "=l")
7970 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7971 (match_operand:SI 2 "s_register_operand" "l"))
7972 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7973 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7975 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7976 [(set_attr "length" "4")]
7980 ;; Conditional move insns
7982 (define_expand "movsicc"
7983 [(set (match_operand:SI 0 "s_register_operand" "")
7984 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7985 (match_operand:SI 2 "arm_not_operand" "")
7986 (match_operand:SI 3 "arm_not_operand" "")))]
7990 enum rtx_code code = GET_CODE (operands[1]);
7993 if (code == UNEQ || code == LTGT)
7996 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
7997 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8001 (define_expand "movsfcc"
8002 [(set (match_operand:SF 0 "s_register_operand" "")
8003 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8004 (match_operand:SF 2 "s_register_operand" "")
8005 (match_operand:SF 3 "nonmemory_operand" "")))]
8009 enum rtx_code code = GET_CODE (operands[1]);
8012 if (code == UNEQ || code == LTGT)
8015 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8016 Otherwise, ensure it is a valid FP add operand */
8017 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8018 || (!arm_float_add_operand (operands[3], SFmode)))
8019 operands[3] = force_reg (SFmode, operands[3]);
8021 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8022 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8026 (define_expand "movdfcc"
8027 [(set (match_operand:DF 0 "s_register_operand" "")
8028 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8029 (match_operand:DF 2 "s_register_operand" "")
8030 (match_operand:DF 3 "arm_float_add_operand" "")))]
8031 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8034 enum rtx_code code = GET_CODE (operands[1]);
8037 if (code == UNEQ || code == LTGT)
8040 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8041 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8045 (define_insn "*movsicc_insn"
8046 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8048 (match_operator 3 "arm_comparison_operator"
8049 [(match_operand 4 "cc_register" "") (const_int 0)])
8050 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8051 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8058 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8059 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8060 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8061 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8062 [(set_attr "length" "4,4,4,4,8,8,8,8")
8063 (set_attr "conds" "use")]
8066 (define_insn "*movsfcc_soft_insn"
8067 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8068 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8069 [(match_operand 4 "cc_register" "") (const_int 0)])
8070 (match_operand:SF 1 "s_register_operand" "0,r")
8071 (match_operand:SF 2 "s_register_operand" "r,0")))]
8072 "TARGET_ARM && TARGET_SOFT_FLOAT"
8076 [(set_attr "conds" "use")]
8080 ;; Jump and linkage insns
8082 (define_expand "jump"
8084 (label_ref (match_operand 0 "" "")))]
8089 (define_insn "*arm_jump"
8091 (label_ref (match_operand 0 "" "")))]
8095 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8097 arm_ccfsm_state += 2;
8100 return \"b%?\\t%l0\";
8103 [(set_attr "predicable" "yes")]
8106 (define_insn "*thumb_jump"
8108 (label_ref (match_operand 0 "" "")))]
8111 if (get_attr_length (insn) == 2)
8113 return \"bl\\t%l0\\t%@ far jump\";
8115 [(set (attr "far_jump")
8117 (eq_attr "length" "4")
8118 (const_string "yes")
8119 (const_string "no")))
8120 (set (attr "length")
8122 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8123 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8128 (define_expand "call"
8129 [(parallel [(call (match_operand 0 "memory_operand" "")
8130 (match_operand 1 "general_operand" ""))
8131 (use (match_operand 2 "" ""))
8132 (clobber (reg:SI LR_REGNUM))])]
8138 /* In an untyped call, we can get NULL for operand 2. */
8139 if (operands[2] == NULL_RTX)
8140 operands[2] = const0_rtx;
8142 /* This is to decide if we should generate indirect calls by loading the
8143 32 bit address of the callee into a register before performing the
8144 branch and link. operand[2] encodes the long_call/short_call
8145 attribute of the function being called. This attribute is set whenever
8146 __attribute__((long_call/short_call)) or #pragma long_call/no_long_call
8147 is used, and the short_call attribute can also be set if function is
8148 declared as static or if it has already been defined in the current
8149 compilation unit. See arm.c and arm.h for info about this. The third
8150 parameter to arm_is_longcall_p is used to tell it which pattern
8152 callee = XEXP (operands[0], 0);
8154 if ((GET_CODE (callee) == SYMBOL_REF
8155 && arm_is_longcall_p (operands[0], INTVAL (operands[2]), 0))
8156 || (GET_CODE (callee) != SYMBOL_REF
8157 && GET_CODE (callee) != REG))
8158 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8162 (define_insn "*call_reg_armv5"
8163 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8164 (match_operand 1 "" ""))
8165 (use (match_operand 2 "" ""))
8166 (clobber (reg:SI LR_REGNUM))]
8167 "TARGET_ARM && arm_arch5"
8169 [(set_attr "type" "call")]
8172 (define_insn "*call_reg_arm"
8173 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8174 (match_operand 1 "" ""))
8175 (use (match_operand 2 "" ""))
8176 (clobber (reg:SI LR_REGNUM))]
8177 "TARGET_ARM && !arm_arch5"
8179 return output_call (operands);
8181 ;; length is worst case, normally it is only two
8182 [(set_attr "length" "12")
8183 (set_attr "type" "call")]
8186 (define_insn "*call_mem"
8187 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8188 (match_operand 1 "" ""))
8189 (use (match_operand 2 "" ""))
8190 (clobber (reg:SI LR_REGNUM))]
8193 return output_call_mem (operands);
8195 [(set_attr "length" "12")
8196 (set_attr "type" "call")]
8199 (define_insn "*call_reg_thumb1_v5"
8200 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8201 (match_operand 1 "" ""))
8202 (use (match_operand 2 "" ""))
8203 (clobber (reg:SI LR_REGNUM))]
8204 "TARGET_THUMB1 && arm_arch5"
8206 [(set_attr "length" "2")
8207 (set_attr "type" "call")]
8210 (define_insn "*call_reg_thumb1"
8211 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8212 (match_operand 1 "" ""))
8213 (use (match_operand 2 "" ""))
8214 (clobber (reg:SI LR_REGNUM))]
8215 "TARGET_THUMB1 && !arm_arch5"
8218 if (!TARGET_CALLER_INTERWORKING)
8219 return thumb_call_via_reg (operands[0]);
8220 else if (operands[1] == const0_rtx)
8221 return \"bl\\t%__interwork_call_via_%0\";
8222 else if (frame_pointer_needed)
8223 return \"bl\\t%__interwork_r7_call_via_%0\";
8225 return \"bl\\t%__interwork_r11_call_via_%0\";
8227 [(set_attr "type" "call")]
8230 (define_expand "call_value"
8231 [(parallel [(set (match_operand 0 "" "")
8232 (call (match_operand 1 "memory_operand" "")
8233 (match_operand 2 "general_operand" "")))
8234 (use (match_operand 3 "" ""))
8235 (clobber (reg:SI LR_REGNUM))])]
8239 rtx callee = XEXP (operands[1], 0);
8241 /* In an untyped call, we can get NULL for operand 2. */
8242 if (operands[3] == 0)
8243 operands[3] = const0_rtx;
8245 /* See the comment in define_expand \"call\". */
8246 if ((GET_CODE (callee) == SYMBOL_REF
8247 && arm_is_longcall_p (operands[1], INTVAL (operands[3]), 0))
8248 || (GET_CODE (callee) != SYMBOL_REF
8249 && GET_CODE (callee) != REG))
8250 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8254 (define_insn "*call_value_reg_armv5"
8255 [(set (match_operand 0 "" "")
8256 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8257 (match_operand 2 "" "")))
8258 (use (match_operand 3 "" ""))
8259 (clobber (reg:SI LR_REGNUM))]
8260 "TARGET_ARM && arm_arch5"
8262 [(set_attr "type" "call")]
8265 (define_insn "*call_value_reg_arm"
8266 [(set (match_operand 0 "" "")
8267 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8268 (match_operand 2 "" "")))
8269 (use (match_operand 3 "" ""))
8270 (clobber (reg:SI LR_REGNUM))]
8271 "TARGET_ARM && !arm_arch5"
8273 return output_call (&operands[1]);
8275 [(set_attr "length" "12")
8276 (set_attr "type" "call")]
8279 (define_insn "*call_value_mem"
8280 [(set (match_operand 0 "" "")
8281 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8282 (match_operand 2 "" "")))
8283 (use (match_operand 3 "" ""))
8284 (clobber (reg:SI LR_REGNUM))]
8285 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8287 return output_call_mem (&operands[1]);
8289 [(set_attr "length" "12")
8290 (set_attr "type" "call")]
8293 (define_insn "*call_value_reg_thumb1_v5"
8294 [(set (match_operand 0 "" "")
8295 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8296 (match_operand 2 "" "")))
8297 (use (match_operand 3 "" ""))
8298 (clobber (reg:SI LR_REGNUM))]
8299 "TARGET_THUMB1 && arm_arch5"
8301 [(set_attr "length" "2")
8302 (set_attr "type" "call")]
8305 (define_insn "*call_value_reg_thumb1"
8306 [(set (match_operand 0 "" "")
8307 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8308 (match_operand 2 "" "")))
8309 (use (match_operand 3 "" ""))
8310 (clobber (reg:SI LR_REGNUM))]
8311 "TARGET_THUMB1 && !arm_arch5"
8314 if (!TARGET_CALLER_INTERWORKING)
8315 return thumb_call_via_reg (operands[1]);
8316 else if (operands[2] == const0_rtx)
8317 return \"bl\\t%__interwork_call_via_%1\";
8318 else if (frame_pointer_needed)
8319 return \"bl\\t%__interwork_r7_call_via_%1\";
8321 return \"bl\\t%__interwork_r11_call_via_%1\";
8323 [(set_attr "type" "call")]
8326 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8327 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8329 (define_insn "*call_symbol"
8330 [(call (mem:SI (match_operand:SI 0 "" ""))
8331 (match_operand 1 "" ""))
8332 (use (match_operand 2 "" ""))
8333 (clobber (reg:SI LR_REGNUM))]
8335 && (GET_CODE (operands[0]) == SYMBOL_REF)
8336 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
8339 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8341 [(set_attr "type" "call")]
8344 (define_insn "*call_value_symbol"
8345 [(set (match_operand 0 "" "")
8346 (call (mem:SI (match_operand:SI 1 "" ""))
8347 (match_operand:SI 2 "" "")))
8348 (use (match_operand 3 "" ""))
8349 (clobber (reg:SI LR_REGNUM))]
8351 && (GET_CODE (operands[1]) == SYMBOL_REF)
8352 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
8355 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8357 [(set_attr "type" "call")]
8360 (define_insn "*call_insn"
8361 [(call (mem:SI (match_operand:SI 0 "" ""))
8362 (match_operand:SI 1 "" ""))
8363 (use (match_operand 2 "" ""))
8364 (clobber (reg:SI LR_REGNUM))]
8366 && GET_CODE (operands[0]) == SYMBOL_REF
8367 && !arm_is_longcall_p (operands[0], INTVAL (operands[2]), 1)"
8369 [(set_attr "length" "4")
8370 (set_attr "type" "call")]
8373 (define_insn "*call_value_insn"
8374 [(set (match_operand 0 "" "")
8375 (call (mem:SI (match_operand 1 "" ""))
8376 (match_operand 2 "" "")))
8377 (use (match_operand 3 "" ""))
8378 (clobber (reg:SI LR_REGNUM))]
8380 && GET_CODE (operands[1]) == SYMBOL_REF
8381 && !arm_is_longcall_p (operands[1], INTVAL (operands[3]), 1)"
8383 [(set_attr "length" "4")
8384 (set_attr "type" "call")]
8387 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8388 (define_expand "sibcall"
8389 [(parallel [(call (match_operand 0 "memory_operand" "")
8390 (match_operand 1 "general_operand" ""))
8392 (use (match_operand 2 "" ""))])]
8396 if (operands[2] == NULL_RTX)
8397 operands[2] = const0_rtx;
8401 (define_expand "sibcall_value"
8402 [(parallel [(set (match_operand 0 "" "")
8403 (call (match_operand 1 "memory_operand" "")
8404 (match_operand 2 "general_operand" "")))
8406 (use (match_operand 3 "" ""))])]
8410 if (operands[3] == NULL_RTX)
8411 operands[3] = const0_rtx;
8415 (define_insn "*sibcall_insn"
8416 [(call (mem:SI (match_operand:SI 0 "" "X"))
8417 (match_operand 1 "" ""))
8419 (use (match_operand 2 "" ""))]
8420 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8422 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8424 [(set_attr "type" "call")]
8427 (define_insn "*sibcall_value_insn"
8428 [(set (match_operand 0 "" "")
8429 (call (mem:SI (match_operand:SI 1 "" "X"))
8430 (match_operand 2 "" "")))
8432 (use (match_operand 3 "" ""))]
8433 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8435 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8437 [(set_attr "type" "call")]
8440 ;; Often the return insn will be the same as loading from memory, so set attr
8441 (define_insn "return"
8443 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8446 if (arm_ccfsm_state == 2)
8448 arm_ccfsm_state += 2;
8451 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8453 [(set_attr "type" "load1")
8454 (set_attr "length" "12")
8455 (set_attr "predicable" "yes")]
8458 (define_insn "*cond_return"
8460 (if_then_else (match_operator 0 "arm_comparison_operator"
8461 [(match_operand 1 "cc_register" "") (const_int 0)])
8464 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8467 if (arm_ccfsm_state == 2)
8469 arm_ccfsm_state += 2;
8472 return output_return_instruction (operands[0], TRUE, FALSE);
8474 [(set_attr "conds" "use")
8475 (set_attr "length" "12")
8476 (set_attr "type" "load1")]
8479 (define_insn "*cond_return_inverted"
8481 (if_then_else (match_operator 0 "arm_comparison_operator"
8482 [(match_operand 1 "cc_register" "") (const_int 0)])
8485 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8488 if (arm_ccfsm_state == 2)
8490 arm_ccfsm_state += 2;
8493 return output_return_instruction (operands[0], TRUE, TRUE);
8495 [(set_attr "conds" "use")
8496 (set_attr "length" "12")
8497 (set_attr "type" "load1")]
8500 ;; Generate a sequence of instructions to determine if the processor is
8501 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8504 (define_expand "return_addr_mask"
8506 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8508 (set (match_operand:SI 0 "s_register_operand" "")
8509 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8511 (const_int 67108860)))] ; 0x03fffffc
8514 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8517 (define_insn "*check_arch2"
8518 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8519 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8522 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8523 [(set_attr "length" "8")
8524 (set_attr "conds" "set")]
8527 ;; Call subroutine returning any type.
8529 (define_expand "untyped_call"
8530 [(parallel [(call (match_operand 0 "" "")
8532 (match_operand 1 "" "")
8533 (match_operand 2 "" "")])]
8538 rtx par = gen_rtx_PARALLEL (VOIDmode,
8539 rtvec_alloc (XVECLEN (operands[2], 0)));
8540 rtx addr = gen_reg_rtx (Pmode);
8544 emit_move_insn (addr, XEXP (operands[1], 0));
8545 mem = change_address (operands[1], BLKmode, addr);
8547 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8549 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8551 /* Default code only uses r0 as a return value, but we could
8552 be using anything up to 4 registers. */
8553 if (REGNO (src) == R0_REGNUM)
8554 src = gen_rtx_REG (TImode, R0_REGNUM);
8556 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8558 size += GET_MODE_SIZE (GET_MODE (src));
8561 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8566 for (i = 0; i < XVECLEN (par, 0); i++)
8568 HOST_WIDE_INT offset = 0;
8569 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8572 emit_move_insn (addr, plus_constant (addr, size));
8574 mem = change_address (mem, GET_MODE (reg), NULL);
8575 if (REGNO (reg) == R0_REGNUM)
8577 /* On thumb we have to use a write-back instruction. */
8578 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8579 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8580 size = TARGET_ARM ? 16 : 0;
8584 emit_move_insn (mem, reg);
8585 size = GET_MODE_SIZE (GET_MODE (reg));
8589 /* The optimizer does not know that the call sets the function value
8590 registers we stored in the result block. We avoid problems by
8591 claiming that all hard registers are used and clobbered at this
8593 emit_insn (gen_blockage ());
8599 (define_expand "untyped_return"
8600 [(match_operand:BLK 0 "memory_operand" "")
8601 (match_operand 1 "" "")]
8606 rtx addr = gen_reg_rtx (Pmode);
8610 emit_move_insn (addr, XEXP (operands[0], 0));
8611 mem = change_address (operands[0], BLKmode, addr);
8613 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8615 HOST_WIDE_INT offset = 0;
8616 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8619 emit_move_insn (addr, plus_constant (addr, size));
8621 mem = change_address (mem, GET_MODE (reg), NULL);
8622 if (REGNO (reg) == R0_REGNUM)
8624 /* On thumb we have to use a write-back instruction. */
8625 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8626 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8627 size = TARGET_ARM ? 16 : 0;
8631 emit_move_insn (reg, mem);
8632 size = GET_MODE_SIZE (GET_MODE (reg));
8636 /* Emit USE insns before the return. */
8637 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8638 emit_insn (gen_rtx_USE (VOIDmode,
8639 SET_DEST (XVECEXP (operands[1], 0, i))));
8641 /* Construct the return. */
8642 expand_naked_return ();
8648 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8649 ;; all of memory. This blocks insns from being moved across this point.
8651 (define_insn "blockage"
8652 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8655 [(set_attr "length" "0")
8656 (set_attr "type" "block")]
8659 (define_expand "casesi"
8660 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8661 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8662 (match_operand:SI 2 "const_int_operand" "") ; total range
8663 (match_operand:SI 3 "" "") ; table label
8664 (match_operand:SI 4 "" "")] ; Out of range label
8669 if (operands[1] != const0_rtx)
8671 reg = gen_reg_rtx (SImode);
8673 emit_insn (gen_addsi3 (reg, operands[0],
8674 GEN_INT (-INTVAL (operands[1]))));
8678 if (!const_ok_for_arm (INTVAL (operands[2])))
8679 operands[2] = force_reg (SImode, operands[2]);
8683 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8684 operands[3], operands[4]));
8688 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8689 operands[2], operands[3], operands[4]));
8693 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8694 operands[3], operands[4]));
8700 ;; The USE in this pattern is needed to tell flow analysis that this is
8701 ;; a CASESI insn. It has no other purpose.
8702 (define_insn "arm_casesi_internal"
8703 [(parallel [(set (pc)
8705 (leu (match_operand:SI 0 "s_register_operand" "r")
8706 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8707 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8708 (label_ref (match_operand 2 "" ""))))
8709 (label_ref (match_operand 3 "" ""))))
8710 (clobber (reg:CC CC_REGNUM))
8711 (use (label_ref (match_dup 2)))])]
8715 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8716 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8718 [(set_attr "conds" "clob")
8719 (set_attr "length" "12")]
8722 (define_expand "indirect_jump"
8724 (match_operand:SI 0 "s_register_operand" ""))]
8727 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8728 address and use bx. */
8732 tmp = gen_reg_rtx (SImode);
8733 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8739 ;; NB Never uses BX.
8740 (define_insn "*arm_indirect_jump"
8742 (match_operand:SI 0 "s_register_operand" "r"))]
8744 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8745 [(set_attr "predicable" "yes")]
8748 (define_insn "*load_indirect_jump"
8750 (match_operand:SI 0 "memory_operand" "m"))]
8752 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8753 [(set_attr "type" "load1")
8754 (set_attr "pool_range" "4096")
8755 (set_attr "neg_pool_range" "4084")
8756 (set_attr "predicable" "yes")]
8759 ;; NB Never uses BX.
8760 (define_insn "*thumb1_indirect_jump"
8762 (match_operand:SI 0 "register_operand" "l*r"))]
8765 [(set_attr "conds" "clob")
8766 (set_attr "length" "2")]
8776 if (TARGET_UNIFIED_ASM)
8779 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8780 return \"mov\\tr8, r8\";
8782 [(set (attr "length")
8783 (if_then_else (eq_attr "is_thumb" "yes")
8789 ;; Patterns to allow combination of arithmetic, cond code and shifts
8791 (define_insn "*arith_shiftsi"
8792 [(set (match_operand:SI 0 "s_register_operand" "=r")
8793 (match_operator:SI 1 "shiftable_operator"
8794 [(match_operator:SI 3 "shift_operator"
8795 [(match_operand:SI 4 "s_register_operand" "r")
8796 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8797 (match_operand:SI 2 "s_register_operand" "r")]))]
8799 "%i1%?\\t%0, %2, %4%S3"
8800 [(set_attr "predicable" "yes")
8801 (set_attr "shift" "4")
8802 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8803 (const_string "alu_shift")
8804 (const_string "alu_shift_reg")))]
8808 [(set (match_operand:SI 0 "s_register_operand" "")
8809 (match_operator:SI 1 "shiftable_operator"
8810 [(match_operator:SI 2 "shiftable_operator"
8811 [(match_operator:SI 3 "shift_operator"
8812 [(match_operand:SI 4 "s_register_operand" "")
8813 (match_operand:SI 5 "reg_or_int_operand" "")])
8814 (match_operand:SI 6 "s_register_operand" "")])
8815 (match_operand:SI 7 "arm_rhs_operand" "")]))
8816 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8819 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8822 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8825 (define_insn "*arith_shiftsi_compare0"
8826 [(set (reg:CC_NOOV CC_REGNUM)
8827 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8828 [(match_operator:SI 3 "shift_operator"
8829 [(match_operand:SI 4 "s_register_operand" "r")
8830 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8831 (match_operand:SI 2 "s_register_operand" "r")])
8833 (set (match_operand:SI 0 "s_register_operand" "=r")
8834 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8837 "%i1%.\\t%0, %2, %4%S3"
8838 [(set_attr "conds" "set")
8839 (set_attr "shift" "4")
8840 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8841 (const_string "alu_shift")
8842 (const_string "alu_shift_reg")))]
8845 (define_insn "*arith_shiftsi_compare0_scratch"
8846 [(set (reg:CC_NOOV CC_REGNUM)
8847 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8848 [(match_operator:SI 3 "shift_operator"
8849 [(match_operand:SI 4 "s_register_operand" "r")
8850 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8851 (match_operand:SI 2 "s_register_operand" "r")])
8853 (clobber (match_scratch:SI 0 "=r"))]
8855 "%i1%.\\t%0, %2, %4%S3"
8856 [(set_attr "conds" "set")
8857 (set_attr "shift" "4")
8858 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8859 (const_string "alu_shift")
8860 (const_string "alu_shift_reg")))]
8863 (define_insn "*sub_shiftsi"
8864 [(set (match_operand:SI 0 "s_register_operand" "=r")
8865 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8866 (match_operator:SI 2 "shift_operator"
8867 [(match_operand:SI 3 "s_register_operand" "r")
8868 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
8870 "sub%?\\t%0, %1, %3%S2"
8871 [(set_attr "predicable" "yes")
8872 (set_attr "shift" "3")
8873 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8874 (const_string "alu_shift")
8875 (const_string "alu_shift_reg")))]
8878 (define_insn "*sub_shiftsi_compare0"
8879 [(set (reg:CC_NOOV CC_REGNUM)
8881 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8882 (match_operator:SI 2 "shift_operator"
8883 [(match_operand:SI 3 "s_register_operand" "r")
8884 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8886 (set (match_operand:SI 0 "s_register_operand" "=r")
8887 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
8890 "sub%.\\t%0, %1, %3%S2"
8891 [(set_attr "conds" "set")
8892 (set_attr "shift" "3")
8893 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8894 (const_string "alu_shift")
8895 (const_string "alu_shift_reg")))]
8898 (define_insn "*sub_shiftsi_compare0_scratch"
8899 [(set (reg:CC_NOOV CC_REGNUM)
8901 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8902 (match_operator:SI 2 "shift_operator"
8903 [(match_operand:SI 3 "s_register_operand" "r")
8904 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8906 (clobber (match_scratch:SI 0 "=r"))]
8908 "sub%.\\t%0, %1, %3%S2"
8909 [(set_attr "conds" "set")
8910 (set_attr "shift" "3")
8911 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8912 (const_string "alu_shift")
8913 (const_string "alu_shift_reg")))]
8918 (define_insn "*and_scc"
8919 [(set (match_operand:SI 0 "s_register_operand" "=r")
8920 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8921 [(match_operand 3 "cc_register" "") (const_int 0)])
8922 (match_operand:SI 2 "s_register_operand" "r")))]
8924 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8925 [(set_attr "conds" "use")
8926 (set_attr "length" "8")]
8929 (define_insn "*ior_scc"
8930 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8931 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8932 [(match_operand 3 "cc_register" "") (const_int 0)])
8933 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8937 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8938 [(set_attr "conds" "use")
8939 (set_attr "length" "4,8")]
8942 (define_insn "*compare_scc"
8943 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8944 (match_operator:SI 1 "arm_comparison_operator"
8945 [(match_operand:SI 2 "s_register_operand" "r,r")
8946 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8947 (clobber (reg:CC CC_REGNUM))]
8950 if (operands[3] == const0_rtx)
8952 if (GET_CODE (operands[1]) == LT)
8953 return \"mov\\t%0, %2, lsr #31\";
8955 if (GET_CODE (operands[1]) == GE)
8956 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
8958 if (GET_CODE (operands[1]) == EQ)
8959 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
8962 if (GET_CODE (operands[1]) == NE)
8964 if (which_alternative == 1)
8965 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
8966 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
8968 if (which_alternative == 1)
8969 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
8971 output_asm_insn (\"cmp\\t%2, %3\", operands);
8972 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
8974 [(set_attr "conds" "clob")
8975 (set_attr "length" "12")]
8978 (define_insn "*cond_move"
8979 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8980 (if_then_else:SI (match_operator 3 "equality_operator"
8981 [(match_operator 4 "arm_comparison_operator"
8982 [(match_operand 5 "cc_register" "") (const_int 0)])
8984 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8985 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8988 if (GET_CODE (operands[3]) == NE)
8990 if (which_alternative != 1)
8991 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8992 if (which_alternative != 0)
8993 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8996 if (which_alternative != 0)
8997 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8998 if (which_alternative != 1)
8999 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9002 [(set_attr "conds" "use")
9003 (set_attr "length" "4,4,8")]
9006 (define_insn "*cond_arith"
9007 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9008 (match_operator:SI 5 "shiftable_operator"
9009 [(match_operator:SI 4 "arm_comparison_operator"
9010 [(match_operand:SI 2 "s_register_operand" "r,r")
9011 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9012 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9013 (clobber (reg:CC CC_REGNUM))]
9016 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9017 return \"%i5\\t%0, %1, %2, lsr #31\";
9019 output_asm_insn (\"cmp\\t%2, %3\", operands);
9020 if (GET_CODE (operands[5]) == AND)
9021 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9022 else if (GET_CODE (operands[5]) == MINUS)
9023 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9024 else if (which_alternative != 0)
9025 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9026 return \"%i5%d4\\t%0, %1, #1\";
9028 [(set_attr "conds" "clob")
9029 (set_attr "length" "12")]
9032 (define_insn "*cond_sub"
9033 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9034 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9035 (match_operator:SI 4 "arm_comparison_operator"
9036 [(match_operand:SI 2 "s_register_operand" "r,r")
9037 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9038 (clobber (reg:CC CC_REGNUM))]
9041 output_asm_insn (\"cmp\\t%2, %3\", operands);
9042 if (which_alternative != 0)
9043 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9044 return \"sub%d4\\t%0, %1, #1\";
9046 [(set_attr "conds" "clob")
9047 (set_attr "length" "8,12")]
9050 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9051 (define_insn "*cmp_ite0"
9052 [(set (match_operand 6 "dominant_cc_register" "")
9055 (match_operator 4 "arm_comparison_operator"
9056 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9057 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9058 (match_operator:SI 5 "arm_comparison_operator"
9059 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9060 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9066 static const char * const opcodes[4][2] =
9068 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9069 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9070 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9071 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9072 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9073 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9074 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9075 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9078 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9080 return opcodes[which_alternative][swap];
9082 [(set_attr "conds" "set")
9083 (set_attr "length" "8")]
9086 (define_insn "*cmp_ite1"
9087 [(set (match_operand 6 "dominant_cc_register" "")
9090 (match_operator 4 "arm_comparison_operator"
9091 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9092 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9093 (match_operator:SI 5 "arm_comparison_operator"
9094 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9095 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9101 static const char * const opcodes[4][2] =
9103 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9104 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9105 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9106 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9107 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9108 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9109 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9110 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9113 comparison_dominates_p (GET_CODE (operands[5]),
9114 reverse_condition (GET_CODE (operands[4])));
9116 return opcodes[which_alternative][swap];
9118 [(set_attr "conds" "set")
9119 (set_attr "length" "8")]
9122 (define_insn "*cmp_and"
9123 [(set (match_operand 6 "dominant_cc_register" "")
9126 (match_operator 4 "arm_comparison_operator"
9127 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9128 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9129 (match_operator:SI 5 "arm_comparison_operator"
9130 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9131 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9136 static const char *const opcodes[4][2] =
9138 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9139 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9140 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9141 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9142 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9143 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9144 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9145 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9148 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9150 return opcodes[which_alternative][swap];
9152 [(set_attr "conds" "set")
9153 (set_attr "predicable" "no")
9154 (set_attr "length" "8")]
9157 (define_insn "*cmp_ior"
9158 [(set (match_operand 6 "dominant_cc_register" "")
9161 (match_operator 4 "arm_comparison_operator"
9162 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9163 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9164 (match_operator:SI 5 "arm_comparison_operator"
9165 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9166 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9171 static const char *const opcodes[4][2] =
9173 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9174 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9175 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9176 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9177 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9178 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9179 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9180 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9183 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9185 return opcodes[which_alternative][swap];
9188 [(set_attr "conds" "set")
9189 (set_attr "length" "8")]
9192 (define_insn_and_split "*ior_scc_scc"
9193 [(set (match_operand:SI 0 "s_register_operand" "=r")
9194 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9195 [(match_operand:SI 1 "s_register_operand" "r")
9196 (match_operand:SI 2 "arm_add_operand" "rIL")])
9197 (match_operator:SI 6 "arm_comparison_operator"
9198 [(match_operand:SI 4 "s_register_operand" "r")
9199 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9200 (clobber (reg:CC CC_REGNUM))]
9202 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9205 "TARGET_ARM && reload_completed"
9209 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9210 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9212 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9214 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9217 [(set_attr "conds" "clob")
9218 (set_attr "length" "16")])
9220 ; If the above pattern is followed by a CMP insn, then the compare is
9221 ; redundant, since we can rework the conditional instruction that follows.
9222 (define_insn_and_split "*ior_scc_scc_cmp"
9223 [(set (match_operand 0 "dominant_cc_register" "")
9224 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9225 [(match_operand:SI 1 "s_register_operand" "r")
9226 (match_operand:SI 2 "arm_add_operand" "rIL")])
9227 (match_operator:SI 6 "arm_comparison_operator"
9228 [(match_operand:SI 4 "s_register_operand" "r")
9229 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9231 (set (match_operand:SI 7 "s_register_operand" "=r")
9232 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9233 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9236 "TARGET_ARM && reload_completed"
9240 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9241 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9243 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9245 [(set_attr "conds" "set")
9246 (set_attr "length" "16")])
9248 (define_insn_and_split "*and_scc_scc"
9249 [(set (match_operand:SI 0 "s_register_operand" "=r")
9250 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9251 [(match_operand:SI 1 "s_register_operand" "r")
9252 (match_operand:SI 2 "arm_add_operand" "rIL")])
9253 (match_operator:SI 6 "arm_comparison_operator"
9254 [(match_operand:SI 4 "s_register_operand" "r")
9255 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9256 (clobber (reg:CC CC_REGNUM))]
9258 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9261 "TARGET_ARM && reload_completed
9262 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9267 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9268 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9270 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9272 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9275 [(set_attr "conds" "clob")
9276 (set_attr "length" "16")])
9278 ; If the above pattern is followed by a CMP insn, then the compare is
9279 ; redundant, since we can rework the conditional instruction that follows.
9280 (define_insn_and_split "*and_scc_scc_cmp"
9281 [(set (match_operand 0 "dominant_cc_register" "")
9282 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9283 [(match_operand:SI 1 "s_register_operand" "r")
9284 (match_operand:SI 2 "arm_add_operand" "rIL")])
9285 (match_operator:SI 6 "arm_comparison_operator"
9286 [(match_operand:SI 4 "s_register_operand" "r")
9287 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9289 (set (match_operand:SI 7 "s_register_operand" "=r")
9290 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9291 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9294 "TARGET_ARM && reload_completed"
9298 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9299 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9301 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9303 [(set_attr "conds" "set")
9304 (set_attr "length" "16")])
9306 ;; If there is no dominance in the comparison, then we can still save an
9307 ;; instruction in the AND case, since we can know that the second compare
9308 ;; need only zero the value if false (if true, then the value is already
9310 (define_insn_and_split "*and_scc_scc_nodom"
9311 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9312 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9313 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9314 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9315 (match_operator:SI 6 "arm_comparison_operator"
9316 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9317 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9318 (clobber (reg:CC CC_REGNUM))]
9320 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9323 "TARGET_ARM && reload_completed"
9324 [(parallel [(set (match_dup 0)
9325 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9326 (clobber (reg:CC CC_REGNUM))])
9327 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9329 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9332 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9333 operands[4], operands[5]),
9335 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9337 [(set_attr "conds" "clob")
9338 (set_attr "length" "20")])
9341 [(set (reg:CC_NOOV CC_REGNUM)
9342 (compare:CC_NOOV (ior:SI
9343 (and:SI (match_operand:SI 0 "s_register_operand" "")
9345 (match_operator:SI 1 "comparison_operator"
9346 [(match_operand:SI 2 "s_register_operand" "")
9347 (match_operand:SI 3 "arm_add_operand" "")]))
9349 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9352 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9354 (set (reg:CC_NOOV CC_REGNUM)
9355 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9360 [(set (reg:CC_NOOV CC_REGNUM)
9361 (compare:CC_NOOV (ior:SI
9362 (match_operator:SI 1 "comparison_operator"
9363 [(match_operand:SI 2 "s_register_operand" "")
9364 (match_operand:SI 3 "arm_add_operand" "")])
9365 (and:SI (match_operand:SI 0 "s_register_operand" "")
9368 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9371 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9373 (set (reg:CC_NOOV CC_REGNUM)
9374 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9377 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9379 (define_insn "*negscc"
9380 [(set (match_operand:SI 0 "s_register_operand" "=r")
9381 (neg:SI (match_operator 3 "arm_comparison_operator"
9382 [(match_operand:SI 1 "s_register_operand" "r")
9383 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9384 (clobber (reg:CC CC_REGNUM))]
9387 if (GET_CODE (operands[3]) == LT && operands[3] == const0_rtx)
9388 return \"mov\\t%0, %1, asr #31\";
9390 if (GET_CODE (operands[3]) == NE)
9391 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9393 if (GET_CODE (operands[3]) == GT)
9394 return \"subs\\t%0, %1, %2\;mvnne\\t%0, %0, asr #31\";
9396 output_asm_insn (\"cmp\\t%1, %2\", operands);
9397 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9398 return \"mvn%d3\\t%0, #0\";
9400 [(set_attr "conds" "clob")
9401 (set_attr "length" "12")]
9404 (define_insn "movcond"
9405 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9407 (match_operator 5 "arm_comparison_operator"
9408 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9409 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9410 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9411 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9412 (clobber (reg:CC CC_REGNUM))]
9415 if (GET_CODE (operands[5]) == LT
9416 && (operands[4] == const0_rtx))
9418 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9420 if (operands[2] == const0_rtx)
9421 return \"and\\t%0, %1, %3, asr #31\";
9422 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9424 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9426 if (operands[1] == const0_rtx)
9427 return \"bic\\t%0, %2, %3, asr #31\";
9428 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9430 /* The only case that falls through to here is when both ops 1 & 2
9434 if (GET_CODE (operands[5]) == GE
9435 && (operands[4] == const0_rtx))
9437 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9439 if (operands[2] == const0_rtx)
9440 return \"bic\\t%0, %1, %3, asr #31\";
9441 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9443 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9445 if (operands[1] == const0_rtx)
9446 return \"and\\t%0, %2, %3, asr #31\";
9447 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9449 /* The only case that falls through to here is when both ops 1 & 2
9452 if (GET_CODE (operands[4]) == CONST_INT
9453 && !const_ok_for_arm (INTVAL (operands[4])))
9454 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9456 output_asm_insn (\"cmp\\t%3, %4\", operands);
9457 if (which_alternative != 0)
9458 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9459 if (which_alternative != 1)
9460 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9463 [(set_attr "conds" "clob")
9464 (set_attr "length" "8,8,12")]
9467 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9469 (define_insn "*ifcompare_plus_move"
9470 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9471 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9472 [(match_operand:SI 4 "s_register_operand" "r,r")
9473 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9475 (match_operand:SI 2 "s_register_operand" "r,r")
9476 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9477 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9478 (clobber (reg:CC CC_REGNUM))]
9481 [(set_attr "conds" "clob")
9482 (set_attr "length" "8,12")]
9485 (define_insn "*if_plus_move"
9486 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9488 (match_operator 4 "arm_comparison_operator"
9489 [(match_operand 5 "cc_register" "") (const_int 0)])
9491 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9492 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9493 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9497 sub%d4\\t%0, %2, #%n3
9498 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9499 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9500 [(set_attr "conds" "use")
9501 (set_attr "length" "4,4,8,8")
9502 (set_attr "type" "*,*,*,*")]
9505 (define_insn "*ifcompare_move_plus"
9506 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9507 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9508 [(match_operand:SI 4 "s_register_operand" "r,r")
9509 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9510 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9512 (match_operand:SI 2 "s_register_operand" "r,r")
9513 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9514 (clobber (reg:CC CC_REGNUM))]
9517 [(set_attr "conds" "clob")
9518 (set_attr "length" "8,12")]
9521 (define_insn "*if_move_plus"
9522 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9524 (match_operator 4 "arm_comparison_operator"
9525 [(match_operand 5 "cc_register" "") (const_int 0)])
9526 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9528 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9529 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9533 sub%D4\\t%0, %2, #%n3
9534 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9535 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9536 [(set_attr "conds" "use")
9537 (set_attr "length" "4,4,8,8")
9538 (set_attr "type" "*,*,*,*")]
9541 (define_insn "*ifcompare_arith_arith"
9542 [(set (match_operand:SI 0 "s_register_operand" "=r")
9543 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9544 [(match_operand:SI 5 "s_register_operand" "r")
9545 (match_operand:SI 6 "arm_add_operand" "rIL")])
9546 (match_operator:SI 8 "shiftable_operator"
9547 [(match_operand:SI 1 "s_register_operand" "r")
9548 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9549 (match_operator:SI 7 "shiftable_operator"
9550 [(match_operand:SI 3 "s_register_operand" "r")
9551 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9552 (clobber (reg:CC CC_REGNUM))]
9555 [(set_attr "conds" "clob")
9556 (set_attr "length" "12")]
9559 (define_insn "*if_arith_arith"
9560 [(set (match_operand:SI 0 "s_register_operand" "=r")
9561 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9562 [(match_operand 8 "cc_register" "") (const_int 0)])
9563 (match_operator:SI 6 "shiftable_operator"
9564 [(match_operand:SI 1 "s_register_operand" "r")
9565 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9566 (match_operator:SI 7 "shiftable_operator"
9567 [(match_operand:SI 3 "s_register_operand" "r")
9568 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9570 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9571 [(set_attr "conds" "use")
9572 (set_attr "length" "8")]
9575 (define_insn "*ifcompare_arith_move"
9576 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9577 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9578 [(match_operand:SI 2 "s_register_operand" "r,r")
9579 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9580 (match_operator:SI 7 "shiftable_operator"
9581 [(match_operand:SI 4 "s_register_operand" "r,r")
9582 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9583 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9584 (clobber (reg:CC CC_REGNUM))]
9587 /* If we have an operation where (op x 0) is the identity operation and
9588 the conditional operator is LT or GE and we are comparing against zero and
9589 everything is in registers then we can do this in two instructions. */
9590 if (operands[3] == const0_rtx
9591 && GET_CODE (operands[7]) != AND
9592 && GET_CODE (operands[5]) == REG
9593 && GET_CODE (operands[1]) == REG
9594 && REGNO (operands[1]) == REGNO (operands[4])
9595 && REGNO (operands[4]) != REGNO (operands[0]))
9597 if (GET_CODE (operands[6]) == LT)
9598 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9599 else if (GET_CODE (operands[6]) == GE)
9600 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9602 if (GET_CODE (operands[3]) == CONST_INT
9603 && !const_ok_for_arm (INTVAL (operands[3])))
9604 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9606 output_asm_insn (\"cmp\\t%2, %3\", operands);
9607 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9608 if (which_alternative != 0)
9609 return \"mov%D6\\t%0, %1\";
9612 [(set_attr "conds" "clob")
9613 (set_attr "length" "8,12")]
9616 (define_insn "*if_arith_move"
9617 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9618 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9619 [(match_operand 6 "cc_register" "") (const_int 0)])
9620 (match_operator:SI 5 "shiftable_operator"
9621 [(match_operand:SI 2 "s_register_operand" "r,r")
9622 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9623 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9627 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9628 [(set_attr "conds" "use")
9629 (set_attr "length" "4,8")
9630 (set_attr "type" "*,*")]
9633 (define_insn "*ifcompare_move_arith"
9634 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9635 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9636 [(match_operand:SI 4 "s_register_operand" "r,r")
9637 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9638 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9639 (match_operator:SI 7 "shiftable_operator"
9640 [(match_operand:SI 2 "s_register_operand" "r,r")
9641 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9642 (clobber (reg:CC CC_REGNUM))]
9645 /* If we have an operation where (op x 0) is the identity operation and
9646 the conditional operator is LT or GE and we are comparing against zero and
9647 everything is in registers then we can do this in two instructions */
9648 if (operands[5] == const0_rtx
9649 && GET_CODE (operands[7]) != AND
9650 && GET_CODE (operands[3]) == REG
9651 && GET_CODE (operands[1]) == REG
9652 && REGNO (operands[1]) == REGNO (operands[2])
9653 && REGNO (operands[2]) != REGNO (operands[0]))
9655 if (GET_CODE (operands[6]) == GE)
9656 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9657 else if (GET_CODE (operands[6]) == LT)
9658 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9661 if (GET_CODE (operands[5]) == CONST_INT
9662 && !const_ok_for_arm (INTVAL (operands[5])))
9663 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9665 output_asm_insn (\"cmp\\t%4, %5\", operands);
9667 if (which_alternative != 0)
9668 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9669 return \"%I7%D6\\t%0, %2, %3\";
9671 [(set_attr "conds" "clob")
9672 (set_attr "length" "8,12")]
9675 (define_insn "*if_move_arith"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9678 (match_operator 4 "arm_comparison_operator"
9679 [(match_operand 6 "cc_register" "") (const_int 0)])
9680 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9681 (match_operator:SI 5 "shiftable_operator"
9682 [(match_operand:SI 2 "s_register_operand" "r,r")
9683 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9687 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9688 [(set_attr "conds" "use")
9689 (set_attr "length" "4,8")
9690 (set_attr "type" "*,*")]
9693 (define_insn "*ifcompare_move_not"
9694 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9696 (match_operator 5 "arm_comparison_operator"
9697 [(match_operand:SI 3 "s_register_operand" "r,r")
9698 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9699 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9701 (match_operand:SI 2 "s_register_operand" "r,r"))))
9702 (clobber (reg:CC CC_REGNUM))]
9705 [(set_attr "conds" "clob")
9706 (set_attr "length" "8,12")]
9709 (define_insn "*if_move_not"
9710 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9712 (match_operator 4 "arm_comparison_operator"
9713 [(match_operand 3 "cc_register" "") (const_int 0)])
9714 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9715 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9719 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9720 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9721 [(set_attr "conds" "use")
9722 (set_attr "length" "4,8,8")]
9725 (define_insn "*ifcompare_not_move"
9726 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9728 (match_operator 5 "arm_comparison_operator"
9729 [(match_operand:SI 3 "s_register_operand" "r,r")
9730 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9732 (match_operand:SI 2 "s_register_operand" "r,r"))
9733 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9734 (clobber (reg:CC CC_REGNUM))]
9737 [(set_attr "conds" "clob")
9738 (set_attr "length" "8,12")]
9741 (define_insn "*if_not_move"
9742 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9744 (match_operator 4 "arm_comparison_operator"
9745 [(match_operand 3 "cc_register" "") (const_int 0)])
9746 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9747 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9751 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9752 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9753 [(set_attr "conds" "use")
9754 (set_attr "length" "4,8,8")]
9757 (define_insn "*ifcompare_shift_move"
9758 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9760 (match_operator 6 "arm_comparison_operator"
9761 [(match_operand:SI 4 "s_register_operand" "r,r")
9762 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9763 (match_operator:SI 7 "shift_operator"
9764 [(match_operand:SI 2 "s_register_operand" "r,r")
9765 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9766 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9767 (clobber (reg:CC CC_REGNUM))]
9770 [(set_attr "conds" "clob")
9771 (set_attr "length" "8,12")]
9774 (define_insn "*if_shift_move"
9775 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9777 (match_operator 5 "arm_comparison_operator"
9778 [(match_operand 6 "cc_register" "") (const_int 0)])
9779 (match_operator:SI 4 "shift_operator"
9780 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9781 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9782 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9786 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9787 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9788 [(set_attr "conds" "use")
9789 (set_attr "shift" "2")
9790 (set_attr "length" "4,8,8")
9791 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9792 (const_string "alu_shift")
9793 (const_string "alu_shift_reg")))]
9796 (define_insn "*ifcompare_move_shift"
9797 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9799 (match_operator 6 "arm_comparison_operator"
9800 [(match_operand:SI 4 "s_register_operand" "r,r")
9801 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9802 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9803 (match_operator:SI 7 "shift_operator"
9804 [(match_operand:SI 2 "s_register_operand" "r,r")
9805 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9806 (clobber (reg:CC CC_REGNUM))]
9809 [(set_attr "conds" "clob")
9810 (set_attr "length" "8,12")]
9813 (define_insn "*if_move_shift"
9814 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9816 (match_operator 5 "arm_comparison_operator"
9817 [(match_operand 6 "cc_register" "") (const_int 0)])
9818 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9819 (match_operator:SI 4 "shift_operator"
9820 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9821 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9825 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9826 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9827 [(set_attr "conds" "use")
9828 (set_attr "shift" "2")
9829 (set_attr "length" "4,8,8")
9830 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9831 (const_string "alu_shift")
9832 (const_string "alu_shift_reg")))]
9835 (define_insn "*ifcompare_shift_shift"
9836 [(set (match_operand:SI 0 "s_register_operand" "=r")
9838 (match_operator 7 "arm_comparison_operator"
9839 [(match_operand:SI 5 "s_register_operand" "r")
9840 (match_operand:SI 6 "arm_add_operand" "rIL")])
9841 (match_operator:SI 8 "shift_operator"
9842 [(match_operand:SI 1 "s_register_operand" "r")
9843 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9844 (match_operator:SI 9 "shift_operator"
9845 [(match_operand:SI 3 "s_register_operand" "r")
9846 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9847 (clobber (reg:CC CC_REGNUM))]
9850 [(set_attr "conds" "clob")
9851 (set_attr "length" "12")]
9854 (define_insn "*if_shift_shift"
9855 [(set (match_operand:SI 0 "s_register_operand" "=r")
9857 (match_operator 5 "arm_comparison_operator"
9858 [(match_operand 8 "cc_register" "") (const_int 0)])
9859 (match_operator:SI 6 "shift_operator"
9860 [(match_operand:SI 1 "s_register_operand" "r")
9861 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9862 (match_operator:SI 7 "shift_operator"
9863 [(match_operand:SI 3 "s_register_operand" "r")
9864 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9866 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9867 [(set_attr "conds" "use")
9868 (set_attr "shift" "1")
9869 (set_attr "length" "8")
9870 (set (attr "type") (if_then_else
9871 (and (match_operand 2 "const_int_operand" "")
9872 (match_operand 4 "const_int_operand" ""))
9873 (const_string "alu_shift")
9874 (const_string "alu_shift_reg")))]
9877 (define_insn "*ifcompare_not_arith"
9878 [(set (match_operand:SI 0 "s_register_operand" "=r")
9880 (match_operator 6 "arm_comparison_operator"
9881 [(match_operand:SI 4 "s_register_operand" "r")
9882 (match_operand:SI 5 "arm_add_operand" "rIL")])
9883 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9884 (match_operator:SI 7 "shiftable_operator"
9885 [(match_operand:SI 2 "s_register_operand" "r")
9886 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9887 (clobber (reg:CC CC_REGNUM))]
9890 [(set_attr "conds" "clob")
9891 (set_attr "length" "12")]
9894 (define_insn "*if_not_arith"
9895 [(set (match_operand:SI 0 "s_register_operand" "=r")
9897 (match_operator 5 "arm_comparison_operator"
9898 [(match_operand 4 "cc_register" "") (const_int 0)])
9899 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9900 (match_operator:SI 6 "shiftable_operator"
9901 [(match_operand:SI 2 "s_register_operand" "r")
9902 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9904 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9905 [(set_attr "conds" "use")
9906 (set_attr "length" "8")]
9909 (define_insn "*ifcompare_arith_not"
9910 [(set (match_operand:SI 0 "s_register_operand" "=r")
9912 (match_operator 6 "arm_comparison_operator"
9913 [(match_operand:SI 4 "s_register_operand" "r")
9914 (match_operand:SI 5 "arm_add_operand" "rIL")])
9915 (match_operator:SI 7 "shiftable_operator"
9916 [(match_operand:SI 2 "s_register_operand" "r")
9917 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9918 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9919 (clobber (reg:CC CC_REGNUM))]
9922 [(set_attr "conds" "clob")
9923 (set_attr "length" "12")]
9926 (define_insn "*if_arith_not"
9927 [(set (match_operand:SI 0 "s_register_operand" "=r")
9929 (match_operator 5 "arm_comparison_operator"
9930 [(match_operand 4 "cc_register" "") (const_int 0)])
9931 (match_operator:SI 6 "shiftable_operator"
9932 [(match_operand:SI 2 "s_register_operand" "r")
9933 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9934 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9936 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9937 [(set_attr "conds" "use")
9938 (set_attr "length" "8")]
9941 (define_insn "*ifcompare_neg_move"
9942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9944 (match_operator 5 "arm_comparison_operator"
9945 [(match_operand:SI 3 "s_register_operand" "r,r")
9946 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9947 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9948 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9949 (clobber (reg:CC CC_REGNUM))]
9952 [(set_attr "conds" "clob")
9953 (set_attr "length" "8,12")]
9956 (define_insn "*if_neg_move"
9957 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9959 (match_operator 4 "arm_comparison_operator"
9960 [(match_operand 3 "cc_register" "") (const_int 0)])
9961 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9962 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9966 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9967 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9968 [(set_attr "conds" "use")
9969 (set_attr "length" "4,8,8")]
9972 (define_insn "*ifcompare_move_neg"
9973 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9975 (match_operator 5 "arm_comparison_operator"
9976 [(match_operand:SI 3 "s_register_operand" "r,r")
9977 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9978 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9979 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9980 (clobber (reg:CC CC_REGNUM))]
9983 [(set_attr "conds" "clob")
9984 (set_attr "length" "8,12")]
9987 (define_insn "*if_move_neg"
9988 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9990 (match_operator 4 "arm_comparison_operator"
9991 [(match_operand 3 "cc_register" "") (const_int 0)])
9992 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9993 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9997 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9998 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9999 [(set_attr "conds" "use")
10000 (set_attr "length" "4,8,8")]
10003 (define_insn "*arith_adjacentmem"
10004 [(set (match_operand:SI 0 "s_register_operand" "=r")
10005 (match_operator:SI 1 "shiftable_operator"
10006 [(match_operand:SI 2 "memory_operand" "m")
10007 (match_operand:SI 3 "memory_operand" "m")]))
10008 (clobber (match_scratch:SI 4 "=r"))]
10009 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10015 HOST_WIDE_INT val1 = 0, val2 = 0;
10017 if (REGNO (operands[0]) > REGNO (operands[4]))
10019 ldm[1] = operands[4];
10020 ldm[2] = operands[0];
10024 ldm[1] = operands[0];
10025 ldm[2] = operands[4];
10028 base_reg = XEXP (operands[2], 0);
10030 if (!REG_P (base_reg))
10032 val1 = INTVAL (XEXP (base_reg, 1));
10033 base_reg = XEXP (base_reg, 0);
10036 if (!REG_P (XEXP (operands[3], 0)))
10037 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10039 arith[0] = operands[0];
10040 arith[3] = operands[1];
10054 if (val1 !=0 && val2 != 0)
10058 if (val1 == 4 || val2 == 4)
10059 /* Other val must be 8, since we know they are adjacent and neither
10061 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10062 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10064 ldm[0] = ops[0] = operands[4];
10066 ops[2] = GEN_INT (val1);
10067 output_add_immediate (ops);
10069 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10071 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10075 /* Offset is out of range for a single add, so use two ldr. */
10078 ops[2] = GEN_INT (val1);
10079 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10081 ops[2] = GEN_INT (val2);
10082 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10085 else if (val1 != 0)
10088 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10090 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10095 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10097 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10099 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10102 [(set_attr "length" "12")
10103 (set_attr "predicable" "yes")
10104 (set_attr "type" "load1")]
10107 ; This pattern is never tried by combine, so do it as a peephole
10110 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10111 (match_operand:SI 1 "arm_general_register_operand" ""))
10112 (set (reg:CC CC_REGNUM)
10113 (compare:CC (match_dup 1) (const_int 0)))]
10115 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10116 (set (match_dup 0) (match_dup 1))])]
10120 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10121 ; reversed, check that the memory references aren't volatile.
10124 [(set (match_operand:SI 0 "s_register_operand" "=r")
10125 (match_operand:SI 4 "memory_operand" "m"))
10126 (set (match_operand:SI 1 "s_register_operand" "=r")
10127 (match_operand:SI 5 "memory_operand" "m"))
10128 (set (match_operand:SI 2 "s_register_operand" "=r")
10129 (match_operand:SI 6 "memory_operand" "m"))
10130 (set (match_operand:SI 3 "s_register_operand" "=r")
10131 (match_operand:SI 7 "memory_operand" "m"))]
10132 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10134 return emit_ldm_seq (operands, 4);
10139 [(set (match_operand:SI 0 "s_register_operand" "=r")
10140 (match_operand:SI 3 "memory_operand" "m"))
10141 (set (match_operand:SI 1 "s_register_operand" "=r")
10142 (match_operand:SI 4 "memory_operand" "m"))
10143 (set (match_operand:SI 2 "s_register_operand" "=r")
10144 (match_operand:SI 5 "memory_operand" "m"))]
10145 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10147 return emit_ldm_seq (operands, 3);
10152 [(set (match_operand:SI 0 "s_register_operand" "=r")
10153 (match_operand:SI 2 "memory_operand" "m"))
10154 (set (match_operand:SI 1 "s_register_operand" "=r")
10155 (match_operand:SI 3 "memory_operand" "m"))]
10156 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10158 return emit_ldm_seq (operands, 2);
10163 [(set (match_operand:SI 4 "memory_operand" "=m")
10164 (match_operand:SI 0 "s_register_operand" "r"))
10165 (set (match_operand:SI 5 "memory_operand" "=m")
10166 (match_operand:SI 1 "s_register_operand" "r"))
10167 (set (match_operand:SI 6 "memory_operand" "=m")
10168 (match_operand:SI 2 "s_register_operand" "r"))
10169 (set (match_operand:SI 7 "memory_operand" "=m")
10170 (match_operand:SI 3 "s_register_operand" "r"))]
10171 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10173 return emit_stm_seq (operands, 4);
10178 [(set (match_operand:SI 3 "memory_operand" "=m")
10179 (match_operand:SI 0 "s_register_operand" "r"))
10180 (set (match_operand:SI 4 "memory_operand" "=m")
10181 (match_operand:SI 1 "s_register_operand" "r"))
10182 (set (match_operand:SI 5 "memory_operand" "=m")
10183 (match_operand:SI 2 "s_register_operand" "r"))]
10184 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10186 return emit_stm_seq (operands, 3);
10191 [(set (match_operand:SI 2 "memory_operand" "=m")
10192 (match_operand:SI 0 "s_register_operand" "r"))
10193 (set (match_operand:SI 3 "memory_operand" "=m")
10194 (match_operand:SI 1 "s_register_operand" "r"))]
10195 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10197 return emit_stm_seq (operands, 2);
10202 [(set (match_operand:SI 0 "s_register_operand" "")
10203 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10205 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10206 [(match_operand:SI 3 "s_register_operand" "")
10207 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10208 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10210 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10211 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10216 ;; This split can be used because CC_Z mode implies that the following
10217 ;; branch will be an equality, or an unsigned inequality, so the sign
10218 ;; extension is not needed.
10221 [(set (reg:CC_Z CC_REGNUM)
10223 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10225 (match_operand 1 "const_int_operand" "")))
10226 (clobber (match_scratch:SI 2 ""))]
10228 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10229 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10230 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10231 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10233 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10236 ;; ??? Check the patterns above for Thumb-2 usefulness
10238 (define_expand "prologue"
10239 [(clobber (const_int 0))]
10242 arm_expand_prologue ();
10244 thumb1_expand_prologue ();
10249 (define_expand "epilogue"
10250 [(clobber (const_int 0))]
10253 if (current_function_calls_eh_return)
10254 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10256 thumb1_expand_epilogue ();
10257 else if (USE_RETURN_INSN (FALSE))
10259 emit_jump_insn (gen_return ());
10262 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10264 gen_rtx_RETURN (VOIDmode)),
10265 VUNSPEC_EPILOGUE));
10270 ;; Note - although unspec_volatile's USE all hard registers,
10271 ;; USEs are ignored after relaod has completed. Thus we need
10272 ;; to add an unspec of the link register to ensure that flow
10273 ;; does not think that it is unused by the sibcall branch that
10274 ;; will replace the standard function epilogue.
10275 (define_insn "sibcall_epilogue"
10276 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10277 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10280 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10281 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10282 return arm_output_epilogue (next_nonnote_insn (insn));
10284 ;; Length is absolute worst case
10285 [(set_attr "length" "44")
10286 (set_attr "type" "block")
10287 ;; We don't clobber the conditions, but the potential length of this
10288 ;; operation is sufficient to make conditionalizing the sequence
10289 ;; unlikely to be profitable.
10290 (set_attr "conds" "clob")]
10293 (define_insn "*epilogue_insns"
10294 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10298 return arm_output_epilogue (NULL);
10299 else /* TARGET_THUMB1 */
10300 return thumb_unexpanded_epilogue ();
10302 ; Length is absolute worst case
10303 [(set_attr "length" "44")
10304 (set_attr "type" "block")
10305 ;; We don't clobber the conditions, but the potential length of this
10306 ;; operation is sufficient to make conditionalizing the sequence
10307 ;; unlikely to be profitable.
10308 (set_attr "conds" "clob")]
10311 (define_expand "eh_epilogue"
10312 [(use (match_operand:SI 0 "register_operand" ""))
10313 (use (match_operand:SI 1 "register_operand" ""))
10314 (use (match_operand:SI 2 "register_operand" ""))]
10318 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10319 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10321 rtx ra = gen_rtx_REG (Pmode, 2);
10323 emit_move_insn (ra, operands[2]);
10326 /* This is a hack -- we may have crystalized the function type too
10328 cfun->machine->func_type = 0;
10332 ;; This split is only used during output to reduce the number of patterns
10333 ;; that need assembler instructions adding to them. We allowed the setting
10334 ;; of the conditions to be implicit during rtl generation so that
10335 ;; the conditional compare patterns would work. However this conflicts to
10336 ;; some extent with the conditional data operations, so we have to split them
10339 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10340 ;; conditional execution sufficient?
10343 [(set (match_operand:SI 0 "s_register_operand" "")
10344 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10345 [(match_operand 2 "" "") (match_operand 3 "" "")])
10347 (match_operand 4 "" "")))
10348 (clobber (reg:CC CC_REGNUM))]
10349 "TARGET_ARM && reload_completed"
10350 [(set (match_dup 5) (match_dup 6))
10351 (cond_exec (match_dup 7)
10352 (set (match_dup 0) (match_dup 4)))]
10355 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10356 operands[2], operands[3]);
10357 enum rtx_code rc = GET_CODE (operands[1]);
10359 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10360 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10361 if (mode == CCFPmode || mode == CCFPEmode)
10362 rc = reverse_condition_maybe_unordered (rc);
10364 rc = reverse_condition (rc);
10366 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10371 [(set (match_operand:SI 0 "s_register_operand" "")
10372 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10373 [(match_operand 2 "" "") (match_operand 3 "" "")])
10374 (match_operand 4 "" "")
10376 (clobber (reg:CC CC_REGNUM))]
10377 "TARGET_ARM && reload_completed"
10378 [(set (match_dup 5) (match_dup 6))
10379 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10380 (set (match_dup 0) (match_dup 4)))]
10383 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10384 operands[2], operands[3]);
10386 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10387 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10392 [(set (match_operand:SI 0 "s_register_operand" "")
10393 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10394 [(match_operand 2 "" "") (match_operand 3 "" "")])
10395 (match_operand 4 "" "")
10396 (match_operand 5 "" "")))
10397 (clobber (reg:CC CC_REGNUM))]
10398 "TARGET_ARM && reload_completed"
10399 [(set (match_dup 6) (match_dup 7))
10400 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10401 (set (match_dup 0) (match_dup 4)))
10402 (cond_exec (match_dup 8)
10403 (set (match_dup 0) (match_dup 5)))]
10406 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10407 operands[2], operands[3]);
10408 enum rtx_code rc = GET_CODE (operands[1]);
10410 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10411 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10412 if (mode == CCFPmode || mode == CCFPEmode)
10413 rc = reverse_condition_maybe_unordered (rc);
10415 rc = reverse_condition (rc);
10417 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10422 [(set (match_operand:SI 0 "s_register_operand" "")
10423 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10424 [(match_operand:SI 2 "s_register_operand" "")
10425 (match_operand:SI 3 "arm_add_operand" "")])
10426 (match_operand:SI 4 "arm_rhs_operand" "")
10428 (match_operand:SI 5 "s_register_operand" ""))))
10429 (clobber (reg:CC CC_REGNUM))]
10430 "TARGET_ARM && reload_completed"
10431 [(set (match_dup 6) (match_dup 7))
10432 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10433 (set (match_dup 0) (match_dup 4)))
10434 (cond_exec (match_dup 8)
10435 (set (match_dup 0) (not:SI (match_dup 5))))]
10438 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10439 operands[2], operands[3]);
10440 enum rtx_code rc = GET_CODE (operands[1]);
10442 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10443 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10444 if (mode == CCFPmode || mode == CCFPEmode)
10445 rc = reverse_condition_maybe_unordered (rc);
10447 rc = reverse_condition (rc);
10449 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10453 (define_insn "*cond_move_not"
10454 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10455 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10456 [(match_operand 3 "cc_register" "") (const_int 0)])
10457 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10459 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10463 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10464 [(set_attr "conds" "use")
10465 (set_attr "length" "4,8")]
10468 ;; The next two patterns occur when an AND operation is followed by a
10469 ;; scc insn sequence
10471 (define_insn "*sign_extract_onebit"
10472 [(set (match_operand:SI 0 "s_register_operand" "=r")
10473 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10475 (match_operand:SI 2 "const_int_operand" "n")))
10476 (clobber (reg:CC CC_REGNUM))]
10479 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10480 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10481 return \"mvnne\\t%0, #0\";
10483 [(set_attr "conds" "clob")
10484 (set_attr "length" "8")]
10487 (define_insn "*not_signextract_onebit"
10488 [(set (match_operand:SI 0 "s_register_operand" "=r")
10490 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10492 (match_operand:SI 2 "const_int_operand" "n"))))
10493 (clobber (reg:CC CC_REGNUM))]
10496 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10497 output_asm_insn (\"tst\\t%1, %2\", operands);
10498 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10499 return \"movne\\t%0, #0\";
10501 [(set_attr "conds" "clob")
10502 (set_attr "length" "12")]
10504 ;; ??? The above patterns need auditing for Thumb-2
10506 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10507 ;; expressions. For simplicity, the first register is also in the unspec
10509 (define_insn "*push_multi"
10510 [(match_parallel 2 "multi_register_push"
10511 [(set (match_operand:BLK 0 "memory_operand" "=m")
10512 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10513 UNSPEC_PUSH_MULT))])]
10517 int num_saves = XVECLEN (operands[2], 0);
10519 /* For the StrongARM at least it is faster to
10520 use STR to store only a single register.
10521 In Thumb mode always use push, and the assmebler will pick
10522 something approporiate. */
10523 if (num_saves == 1 && TARGET_ARM)
10524 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10531 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10533 strcpy (pattern, \"push\\t{%1\");
10535 for (i = 1; i < num_saves; i++)
10537 strcat (pattern, \", %|\");
10539 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10542 strcat (pattern, \"}\");
10543 output_asm_insn (pattern, operands);
10548 [(set_attr "type" "store4")]
10551 (define_insn "stack_tie"
10552 [(set (mem:BLK (scratch))
10553 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r")
10554 (match_operand:SI 1 "s_register_operand" "r")]
10558 [(set_attr "length" "0")]
10561 ;; Similarly for the floating point registers
10562 (define_insn "*push_fp_multi"
10563 [(match_parallel 2 "multi_register_push"
10564 [(set (match_operand:BLK 0 "memory_operand" "=m")
10565 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10566 UNSPEC_PUSH_MULT))])]
10567 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10572 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10573 output_asm_insn (pattern, operands);
10576 [(set_attr "type" "f_store")]
10579 ;; Special patterns for dealing with the constant pool
10581 (define_insn "align_4"
10582 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10585 assemble_align (32);
10590 (define_insn "align_8"
10591 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10594 assemble_align (64);
10599 (define_insn "consttable_end"
10600 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10603 making_const_table = FALSE;
10608 (define_insn "consttable_1"
10609 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10612 making_const_table = TRUE;
10613 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10614 assemble_zeros (3);
10617 [(set_attr "length" "4")]
10620 (define_insn "consttable_2"
10621 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10624 making_const_table = TRUE;
10625 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10626 assemble_zeros (2);
10629 [(set_attr "length" "4")]
10632 (define_insn "consttable_4"
10633 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10637 making_const_table = TRUE;
10638 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10643 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10644 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10648 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10653 [(set_attr "length" "4")]
10656 (define_insn "consttable_8"
10657 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10661 making_const_table = TRUE;
10662 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10667 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10668 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10672 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10677 [(set_attr "length" "8")]
10680 ;; Miscellaneous Thumb patterns
10682 (define_expand "tablejump"
10683 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10684 (use (label_ref (match_operand 1 "" "")))])]
10689 /* Hopefully, CSE will eliminate this copy. */
10690 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10691 rtx reg2 = gen_reg_rtx (SImode);
10693 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10694 operands[0] = reg2;
10699 ;; NB never uses BX.
10700 (define_insn "*thumb1_tablejump"
10701 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10702 (use (label_ref (match_operand 1 "" "")))]
10705 [(set_attr "length" "2")]
10708 ;; V5 Instructions,
10710 (define_insn "clzsi2"
10711 [(set (match_operand:SI 0 "s_register_operand" "=r")
10712 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10713 "TARGET_32BIT && arm_arch5"
10715 [(set_attr "predicable" "yes")])
10717 (define_expand "ffssi2"
10718 [(set (match_operand:SI 0 "s_register_operand" "")
10719 (ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
10720 "TARGET_32BIT && arm_arch5"
10725 t1 = gen_reg_rtx (SImode);
10726 t2 = gen_reg_rtx (SImode);
10727 t3 = gen_reg_rtx (SImode);
10729 emit_insn (gen_negsi2 (t1, operands[1]));
10730 emit_insn (gen_andsi3 (t2, operands[1], t1));
10731 emit_insn (gen_clzsi2 (t3, t2));
10732 emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3));
10737 (define_expand "ctzsi2"
10738 [(set (match_operand:SI 0 "s_register_operand" "")
10739 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10740 "TARGET_32BIT && arm_arch5"
10745 t1 = gen_reg_rtx (SImode);
10746 t2 = gen_reg_rtx (SImode);
10747 t3 = gen_reg_rtx (SImode);
10749 emit_insn (gen_negsi2 (t1, operands[1]));
10750 emit_insn (gen_andsi3 (t2, operands[1], t1));
10751 emit_insn (gen_clzsi2 (t3, t2));
10752 emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3));
10757 ;; V5E instructions.
10759 (define_insn "prefetch"
10760 [(prefetch (match_operand:SI 0 "address_operand" "p")
10761 (match_operand:SI 1 "" "")
10762 (match_operand:SI 2 "" ""))]
10763 "TARGET_32BIT && arm_arch5e"
10766 ;; General predication pattern
10769 [(match_operator 0 "arm_comparison_operator"
10770 [(match_operand 1 "cc_register" "")
10776 (define_insn "prologue_use"
10777 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10779 "%@ %0 needed for prologue"
10783 ;; Patterns for exception handling
10785 (define_expand "eh_return"
10786 [(use (match_operand 0 "general_operand" ""))]
10791 emit_insn (gen_arm_eh_return (operands[0]));
10793 emit_insn (gen_thumb_eh_return (operands[0]));
10798 ;; We can't expand this before we know where the link register is stored.
10799 (define_insn_and_split "arm_eh_return"
10800 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10802 (clobber (match_scratch:SI 1 "=&r"))]
10805 "&& reload_completed"
10809 arm_set_return_address (operands[0], operands[1]);
10814 (define_insn_and_split "thumb_eh_return"
10815 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10817 (clobber (match_scratch:SI 1 "=&l"))]
10820 "&& reload_completed"
10824 thumb_set_return_address (operands[0], operands[1]);
10832 (define_insn "load_tp_hard"
10833 [(set (match_operand:SI 0 "register_operand" "=r")
10834 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10836 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10837 [(set_attr "predicable" "yes")]
10840 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10841 (define_insn "load_tp_soft"
10842 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10843 (clobber (reg:SI LR_REGNUM))
10844 (clobber (reg:SI IP_REGNUM))
10845 (clobber (reg:CC CC_REGNUM))]
10847 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10848 [(set_attr "conds" "clob")]
10851 ;; Load the FPA co-processor patterns
10853 ;; Load the Maverick co-processor patterns
10854 (include "cirrus.md")
10855 ;; Load the Intel Wireless Multimedia Extension patterns
10856 (include "iwmmxt.md")
10857 ;; Load the VFP co-processor patterns
10859 ;; Thumb-2 patterns
10860 (include "thumb2.md")