1 ;; thumb.md Machine description for ARM/Thumb processors
2 ;; Copyright (C) 1996, 1997, 1998 Free Software Foundation, Inc.
3 ;; The basis of this contribution was generated by
4 ;; Richard Earnshaw, Advanced RISC Machines Ltd
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; LENGTH of an instruction is 2 bytes
24 (define_attr "length" "" (const_int 2))
26 ;; CONDS is set to UNCHANGED when an insn does not affect the condition codes
27 ;; Most insns change the condition codes
28 (define_attr "conds" "changed,unchanged" (const_string "changed"))
30 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
32 (define_attr "far_jump" "yes,no" (const_string "no"))
34 ;; Start with move insns
36 (define_expand "movsi"
37 [(set (match_operand:SI 0 "general_operand" "")
38 (match_operand:SI 1 "general_operand" ""))]
41 if (! (reload_in_progress || reload_completed))
43 if (GET_CODE (operands[0]) != REG)
44 operands[1] = force_reg (SImode, operands[1]);
48 (define_insn "*movsi_insn"
49 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l,m,*r,*h")
50 (match_operand:SI 1 "general_operand" "l,I,J,K,>,l,mi,l,*h,*r"))]
51 "register_operand (operands[0], SImode)
52 || register_operand (operands[1], SImode)"
64 [(set_attr "length" "2,2,4,4,2,2,2,2,2,2")])
67 [(set (match_operand:SI 0 "register_operand" "")
68 (match_operand:SI 1 "const_int_operand" ""))]
69 "thumb_shiftable_const (INTVAL (operands[1]))"
70 [(set (match_dup 0) (match_dup 1))
71 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
74 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
75 unsigned HOST_WIDE_INT mask = 0xff;
77 for (i = 0; i < 25; i++)
78 if ((val & (mask << i)) == val)
84 operands[1] = GEN_INT (val >> i);
85 operands[2] = GEN_INT (i);
89 [(set (match_operand:SI 0 "register_operand" "")
90 (match_operand:SI 1 "const_int_operand" ""))]
91 "INTVAL (operands[1]) < 0 && INTVAL (operands[1]) > -256"
92 [(set (match_dup 0) (match_dup 1))
93 (set (match_dup 0) (neg:SI (match_dup 0)))]
95 operands[1] = GEN_INT (- INTVAL (operands[1]));
98 ;;(define_expand "reload_outsi"
99 ;; [(set (match_operand:SI 2 "register_operand" "=&l")
100 ;; (match_operand:SI 1 "register_operand" "h"))
101 ;; (set (match_operand:SI 0 "reload_memory_operand" "=o")
105 ;;/* thumb_reload_out_si (operands);
109 (define_expand "movhi"
110 [(set (match_operand:HI 0 "general_operand" "")
111 (match_operand:HI 1 "general_operand" ""))]
115 if (! (reload_in_progress || reload_completed))
117 if (GET_CODE (operands[0]) != REG)
118 operands[1] = force_reg (HImode, operands[1]);
120 /* ??? We shouldn't really get invalid addresses here, but this can
121 happen if we are passed a SP (never OK for HImode/QImode) or virtual
122 register (rejected by GO_IF_LEGITIMATE_ADDRESS for HImode/QImode)
124 /* ??? This should perhaps be fixed elsewhere, for instance, in
125 fixup_stack_1, by checking for other kinds of invalid addresses,
126 e.g. a bare reference to a virtual register. This may confuse the
127 alpha though, which must handle this case differently. */
128 if (GET_CODE (operands[0]) == MEM
129 && ! memory_address_p (GET_MODE (operands[0]),
130 XEXP (operands[0], 0)))
132 rtx temp = copy_to_reg (XEXP (operands[0], 0));
133 operands[0] = change_address (operands[0], VOIDmode, temp);
135 if (GET_CODE (operands[1]) == MEM
136 && ! memory_address_p (GET_MODE (operands[1]),
137 XEXP (operands[1], 0)))
139 rtx temp = copy_to_reg (XEXP (operands[1], 0));
140 operands[1] = change_address (operands[1], VOIDmode, temp);
143 /* Handle loading a large integer during reload */
144 else if (GET_CODE (operands[1]) == CONST_INT
145 && ! CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I'))
147 /* Writing a constant to memory needs a scratch, which should
148 be handled with SECONDARY_RELOADs. */
149 if (GET_CODE (operands[0]) != REG)
152 operands[0] = gen_rtx (SUBREG, SImode, operands[0], 0);
153 emit_insn (gen_movsi (operands[0], operands[1]));
158 (define_insn "*movhi_insn"
159 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
160 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
161 "register_operand (operands[0], HImode)
162 || register_operand (operands[1], HImode)"
171 (define_expand "movqi"
172 [(set (match_operand:QI 0 "general_operand" "")
173 (match_operand:QI 1 "general_operand" ""))]
177 if (! (reload_in_progress || reload_completed))
179 if (GET_CODE (operands[0]) != REG)
180 operands[1] = force_reg (QImode, operands[1]);
182 /* ??? We shouldn't really get invalid addresses here, but this can
183 happen if we are passed a SP (never OK for HImode/QImode) or virtual
184 register (rejected by GO_IF_LEGITIMATE_ADDRESS for HImode/QImode)
186 /* ??? This should perhaps be fixed elsewhere, for instance, in
187 fixup_stack_1, by checking for other kinds of invalid addresses,
188 e.g. a bare reference to a virtual register. This may confuse the
189 alpha though, which must handle this case differently. */
190 if (GET_CODE (operands[0]) == MEM
191 && ! memory_address_p (GET_MODE (operands[0]),
192 XEXP (operands[0], 0)))
194 rtx temp = copy_to_reg (XEXP (operands[0], 0));
195 operands[0] = change_address (operands[0], VOIDmode, temp);
197 if (GET_CODE (operands[1]) == MEM
198 && ! memory_address_p (GET_MODE (operands[1]),
199 XEXP (operands[1], 0)))
201 rtx temp = copy_to_reg (XEXP (operands[1], 0));
202 operands[1] = change_address (operands[1], VOIDmode, temp);
205 /* Handle loading a large integer during reload */
206 else if (GET_CODE (operands[1]) == CONST_INT
207 && ! CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I'))
209 /* Writing a constant to memory needs a scratch, which should
210 be handled with SECONDARY_RELOADs. */
211 if (GET_CODE (operands[0]) != REG)
214 operands[0] = gen_rtx (SUBREG, SImode, operands[0], 0);
215 emit_insn (gen_movsi (operands[0], operands[1]));
220 (define_insn "*movqi_insn"
221 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
222 (match_operand:QI 1 "general_operand" "l,m,l,*h,*r,I"))]
223 "register_operand (operands[0], QImode)
224 || register_operand (operands[1], QImode)"
233 (define_expand "movdi"
234 [(set (match_operand:DI 0 "general_operand" "")
235 (match_operand:DI 1 "general_operand" ""))]
238 if (! (reload_in_progress || reload_completed))
240 if (GET_CODE (operands[0]) != REG)
241 operands[1] = force_reg (DImode, operands[1]);
245 ;;; ??? This should have alternatives for constants.
246 ;;; ??? This was originally identical to the movdf_insn pattern.
247 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
248 ;;; thumb_reorg with a memory reference.
249 (define_insn "*movdi_insn"
250 [(set (match_operand:DI 0 "general_operand" "=l,l,l,l,>,l,m,*r")
251 (match_operand:DI 1 "general_operand" "l,I,J,>,l,mi,l,*r"))]
252 "register_operand (operands[0], DImode)
253 || register_operand (operands[1], DImode)"
256 switch (which_alternative)
259 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
260 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
261 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
263 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
265 operands[1] = GEN_INT (- INTVAL (operands[1]));
266 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
268 return \"ldmia\\t%1, {%0, %H0}\";
270 return \"stmia\\t%0, {%1, %H1}\";
272 return thumb_load_double_from_address (operands);
274 operands[2] = gen_rtx (MEM, SImode, plus_constant (XEXP (operands[0], 0), 4));
275 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
278 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
279 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
280 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
282 }"[(set_attr "length" "4,4,6,2,2,6,4,4")])
284 (define_expand "movdf"
285 [(set (match_operand:DF 0 "general_operand" "")
286 (match_operand:DF 1 "general_operand" ""))]
289 if (! (reload_in_progress || reload_completed))
291 if (GET_CODE (operands[0]) != REG)
292 operands[1] = force_reg (DFmode, operands[1]);
296 ;;; ??? This should have alternatives for constants.
297 ;;; ??? This was originally identical to the movdi_insn pattern.
298 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
299 ;;; thumb_reorg with a memory reference.
300 (define_insn "*movdf_insn"
301 [(set (match_operand:DF 0 "general_operand" "=l,l,>,l,m,*r")
302 (match_operand:DF 1 "general_operand" "l,>,l,mF,l,*r"))]
303 "register_operand (operands[0], DFmode)
304 || register_operand (operands[1], DFmode)"
306 switch (which_alternative)
309 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
310 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
311 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
313 return \"ldmia\\t%1, {%0, %H0}\";
315 return \"stmia\\t%0, {%1, %H1}\";
317 return thumb_load_double_from_address (operands);
319 operands[2] = gen_rtx (MEM, SImode, plus_constant (XEXP (operands[0], 0), 4));
320 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
323 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
324 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
325 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
327 "[(set_attr "length" "4,2,2,6,4,4")])
329 (define_expand "movsf"
330 [(set (match_operand:SF 0 "general_operand" "")
331 (match_operand:SF 1 "general_operand" ""))]
334 if (! (reload_in_progress || reload_completed))
336 if (GET_CODE (operands[0]) != REG)
337 operands[1] = force_reg (SFmode, operands[1]);
341 ;;; ??? This should have alternatives for constants.
342 (define_insn "*movsf_insn"
343 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l,m,*r,*h")
344 (match_operand:SF 1 "general_operand" "l,>,l,mF,l,*h,*r"))]
345 "register_operand (operands[0], SFmode)
346 || register_operand (operands[1], SFmode)"
356 ;; Widening move insns
358 (define_expand "zero_extendhisi2"
359 [(set (match_operand:SI 0 "register_operand" "")
360 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
363 if (GET_CODE (operands[1]) != MEM)
365 rtx temp = gen_reg_rtx (SImode);
367 operands[1] = force_reg (HImode, operands[1]);
368 operands[1] = gen_lowpart (SImode, operands[1]);
369 emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (16)));
370 emit_insn (gen_lshrsi3 (operands[0], temp, GEN_INT (16)));
375 (define_insn "*zero_extendhisi2_insn"
376 [(set (match_operand:SI 0 "register_operand" "=l")
377 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
381 (define_expand "zero_extendqisi2"
382 [(set (match_operand:SI 0 "register_operand" "")
383 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
386 if (GET_CODE (operands[1]) != MEM)
388 rtx temp = gen_reg_rtx (SImode);
390 operands[1] = force_reg (QImode, operands[1]);
391 operands[1] = gen_lowpart (SImode, operands[1]);
392 emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (24)));
393 emit_insn (gen_lshrsi3 (operands[0], temp, GEN_INT (24)));
398 (define_insn "*zero_extendqisi2_insn"
399 [(set (match_operand:SI 0 "register_operand" "=l")
400 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
404 (define_expand "extendhisi2"
405 [(parallel [(set (match_operand:SI 0 "register_operand" "")
406 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))
407 (clobber (match_scratch:SI 2 ""))])]
410 if (GET_CODE (operands[1]) != MEM)
412 rtx temp = gen_reg_rtx (SImode);
414 operands[1] = force_reg (HImode, operands[1]);
415 operands[1] = gen_lowpart (SImode, operands[1]);
416 emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (16)));
417 emit_insn (gen_ashrsi3 (operands[0], temp, GEN_INT (16)));
422 (define_insn "*extendhisi2_insn"
423 [(set (match_operand:SI 0 "register_operand" "=l")
424 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
425 (clobber (match_scratch:SI 2 "=&l"))]
430 /* This code used to try to use 'V', and fix the address only if it was
431 offsettable, but this fails for e.g. REG+48 because 48 is outside the
432 range of QImode offsets, and offsettable_address_p does a QImode
435 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
437 ops[1] = XEXP (XEXP (operands[1], 0), 0);
438 ops[2] = XEXP (XEXP (operands[1], 0), 1);
442 ops[1] = XEXP (operands[1], 0);
445 if (GET_CODE (ops[2]) == REG)
446 return \"ldrsh\\t%0, %1\";
448 ops[0] = operands[0];
449 ops[3] = operands[2];
450 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
453 [(set_attr "length" "4")])
455 (define_expand "extendqisi2"
456 [(set (match_operand:SI 0 "register_operand" "")
457 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
460 if (GET_CODE (operands[1]) != MEM)
462 rtx temp = gen_reg_rtx (SImode);
464 operands[1] = force_reg (QImode, operands[1]);
465 operands[1] = gen_lowpart (SImode, operands[1]);
466 emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (24)));
467 emit_insn (gen_ashrsi3 (operands[0], temp, GEN_INT (24)));
472 (define_insn "*extendqisi2_insn"
473 [(set (match_operand:SI 0 "register_operand" "=l,l")
474 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
480 if (which_alternative == 0)
481 return \"ldrsb\\t%0, %1\";
482 ops[0] = operands[0];
483 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
485 ops[1] = XEXP (XEXP (operands[1], 0), 0);
486 ops[2] = XEXP (XEXP (operands[1], 0), 1);
488 if (GET_CODE (ops[1]) == REG && GET_CODE (ops[2]) == REG)
489 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
490 else if (GET_CODE (ops[1]) == REG)
492 if (REGNO (ops[1]) == REGNO (operands[0]))
493 output_asm_insn (\"ldrb\\t%0, [%1, %2]\;lsl\\t%0, %0, #24\;asr\\t%0, %0, #24\", ops);
495 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
499 if (REGNO (ops[2]) == REGNO (operands[0]))
500 output_asm_insn (\"ldrb\\t%0, [%2, %1]\;lsl\\t%0, %0, #24\;asr\\t%0, %0, #24\", ops);
502 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
505 else if (REGNO (operands[0]) == REGNO (XEXP (operands[1], 0)))
507 output_asm_insn (\"ldrb\\t%0, [%0, #0]\;lsl\\t%0, %0, #24\;asr\\t%0, %0, #24\", ops);
511 ops[1] = XEXP (operands[1], 0);
513 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
517 [(set_attr "length" "2,6")])
519 ;; We don't really have extzv, but defining this using shifts helps
520 ;; to reduce register pressure later on.
522 (define_expand "extzv"
524 (ashift:SI (match_operand:SI 1 "register_operand" "")
525 (match_operand:SI 2 "const_int_operand" "")))
526 (set (match_operand:SI 0 "register_operand" "")
527 (lshiftrt:SI (match_dup 4)
528 (match_operand:SI 3 "const_int_operand" "")))]
532 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
533 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
534 operands[3] = GEN_INT (rshift);
537 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
540 operands[2] = GEN_INT (lshift);
541 operands[4] = gen_reg_rtx (SImode);
547 (define_expand "movstrqi"
548 [(match_operand:BLK 0 "general_operand" "")
549 (match_operand:BLK 1 "general_operand" "")
550 (match_operand:SI 2 "" "")
551 (match_operand:SI 3 "const_int_operand" "")]
554 if (INTVAL (operands[3]) != 4
555 || GET_CODE (operands[2]) != CONST_INT
556 || INTVAL (operands[2]) > 48)
559 thumb_expand_movstrqi (operands);
563 (define_insn "movmem12b"
564 [(set (mem:SI (match_operand:SI 0 "register_operand" "+&l"))
565 (mem:SI (match_operand:SI 1 "register_operand" "+&l")))
566 (set (mem:SI (plus:SI (match_dup 0) (const_int 4)))
567 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
568 (set (mem:SI (plus:SI (match_dup 0) (const_int 8)))
569 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
570 (set (match_dup 0) (plus:SI (match_dup 0) (const_int 12)))
571 (set (match_dup 1) (plus:SI (match_dup 1) (const_int 12)))
572 (clobber (match_scratch:SI 2 "=&l"))
573 (clobber (match_scratch:SI 3 "=&l"))
574 (clobber (match_scratch:SI 4 "=&l"))]
576 "* return output_move_mem_multiple (3, operands);"
577 [(set_attr "length" "4")])
579 (define_insn "movmem8b"
580 [(set (mem:SI (match_operand:SI 0 "register_operand" "+&l"))
581 (mem:SI (match_operand:SI 1 "register_operand" "+&l")))
582 (set (mem:SI (plus:SI (match_dup 0) (const_int 4)))
583 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
584 (set (match_dup 0) (plus:SI (match_dup 0) (const_int 8)))
585 (set (match_dup 1) (plus:SI (match_dup 1) (const_int 8)))
586 (clobber (match_scratch:SI 2 "=&l"))
587 (clobber (match_scratch:SI 3 "=&l"))]
589 "* return output_move_mem_multiple (2, operands);"
590 [(set_attr "length" "4")])
594 (define_insn "adddi3"
595 [(set (match_operand:DI 0 "register_operand" "=l")
596 (plus:DI (match_operand:DI 1 "register_operand" "%0")
597 (match_operand:DI 2 "register_operand" "l")))]
599 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
600 [(set_attr "conds" "changed")
601 (set_attr "length" "8")])
603 ;; register group 'k' is a single register group containing only the stack
604 ;; register. Trying to reload it will always fail catastrophically,
605 ;; so never allow those alternatives to match if reloading is needed.
606 (define_insn "addsi3"
607 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
608 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
609 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
612 static char *asms[] =
614 \"add\\t%0, %0, %2\",
615 \"sub\\t%0, %0, #%n2\",
616 \"add\\t%0, %1, %2\",
617 \"add\\t%0, %0, %2\",
618 \"add\\t%0, %0, %2\",
619 \"add\\t%0, %1, %2\",
622 if (which_alternative == 2 && GET_CODE (operands[2]) == CONST_INT
623 && INTVAL (operands[2]) < 0)
624 return \"sub\\t%0, %1, #%n2\";
625 return asms[which_alternative];
628 ; reloading and elimination of the frame pointer can sometimes cause this
629 ; optimization to be missed.
631 [(set (match_operand:SI 0 "register_operand" "=l")
632 (match_operand:SI 1 "const_int_operand" "M"))
634 (plus:SI (match_dup 0) (match_operand:SI 2 "register_operand" "k")))]
635 "REGNO (operands[2]) == STACK_POINTER_REGNUM
636 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
637 && (INTVAL (operands[1]) & 3) == 0"
640 (define_insn "subdi3"
641 [(set (match_operand:DI 0 "register_operand" "=l")
642 (minus:DI (match_operand:DI 1 "register_operand" "0")
643 (match_operand:DI 2 "register_operand" "l")))]
645 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
646 [(set_attr "conds" "changed")
647 (set_attr "length" "8")])
649 (define_insn "subsi3"
650 [(set (match_operand:SI 0 "register_operand" "=l")
651 (minus:SI (match_operand:SI 1 "register_operand" "l")
652 (match_operand:SI 2 "register_operand" "l")))]
656 ;; We must ensure that one input matches the output, and that the other input
657 ;; does not match the output. Using 0 satisfies the first, and using &
658 ;; satisfies the second. Unfortunately, this fails when operands 1 and 2
659 ;; are the same, because reload will make operand 0 match operand 1 without
660 ;; realizing that this conflicts with operand 2. We fix this by adding another
661 ;; alternative to match this case, and then `reload' it ourselves. This
662 ;; alternative must come first.
663 (define_insn "mulsi3"
664 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
665 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
666 (match_operand:SI 2 "register_operand" "l,l,l")))]
670 if (which_alternative < 2)
671 return \"mov\\t%0, %1\;mul\\t%0, %0, %2\";
673 return \"mul\\t%0, %0, %2\";
675 [(set_attr "length" "4,4,2")])
677 (define_insn "negsi2"
678 [(set (match_operand:SI 0 "register_operand" "=l")
679 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
685 (define_expand "andsi3"
686 [(set (match_operand:SI 0 "register_operand" "")
687 (and:SI (match_operand:SI 1 "register_operand" "")
688 (match_operand:SI 2 "nonmemory_operand" "")))]
691 if (GET_CODE (operands[2]) != CONST_INT)
692 operands[2] = force_reg (SImode, operands[2]);
696 if (((unsigned HOST_WIDE_INT) ~ INTVAL (operands[2])) < 256)
698 operands[2] = force_reg (SImode, GEN_INT (~INTVAL (operands[2])));
699 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
703 for (i = 9; i <= 31; i++)
704 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
706 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
710 else if ((((HOST_WIDE_INT) 1) << i) - 1 == ~ INTVAL (operands[2]))
712 rtx shift = GEN_INT (i);
713 rtx reg = gen_reg_rtx (SImode);
714 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
715 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
719 operands[2] = force_reg (SImode, operands[2]);
723 (define_insn "*andsi3_insn"
724 [(set (match_operand:SI 0 "register_operand" "=l")
725 (and:SI (match_operand:SI 1 "register_operand" "%0")
726 (match_operand:SI 2 "register_operand" "l")))]
730 (define_insn "bicsi3"
731 [(set (match_operand:SI 0 "register_operand" "=l")
732 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
733 (match_operand:SI 2 "register_operand" "0")))]
737 (define_insn "iorsi3"
738 [(set (match_operand:SI 0 "register_operand" "=l")
739 (ior:SI (match_operand:SI 1 "register_operand" "%0")
740 (match_operand:SI 2 "register_operand" "l")))]
744 (define_insn "xorsi3"
745 [(set (match_operand:SI 0 "register_operand" "=l")
746 (xor:SI (match_operand:SI 1 "register_operand" "%0")
747 (match_operand:SI 2 "register_operand" "l")))]
751 (define_insn "one_cmplsi2"
752 [(set (match_operand:SI 0 "register_operand" "=l")
753 (not:SI (match_operand:SI 1 "register_operand" "l")))]
757 ;; Shift and rotation insns
759 (define_insn "ashlsi3"
760 [(set (match_operand:SI 0 "register_operand" "=l,l")
761 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
762 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
768 (define_insn "ashrsi3"
769 [(set (match_operand:SI 0 "register_operand" "=l,l")
770 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
771 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
777 (define_insn "lshrsi3"
778 [(set (match_operand:SI 0 "register_operand" "=l,l")
779 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
780 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
786 (define_insn "rotrsi3"
787 [(set (match_operand:SI 0 "register_operand" "=l")
788 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
789 (match_operand:SI 2 "register_operand" "l")))]
795 (define_expand "cmpsi"
796 [(set (cc0) (compare (match_operand:SI 0 "register_operand" "")
797 (match_operand:SI 1 "nonmemory_operand" "")))]
800 if (GET_CODE (operands[1]) != REG && GET_CODE (operands[1]) != SUBREG)
802 if (GET_CODE (operands[1]) != CONST_INT
803 || (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) >= 256)
805 if (GET_CODE (operands[1]) != CONST_INT
806 || INTVAL (operands[1]) < -255
807 || INTVAL (operands[1]) > 0)
808 operands[1] = force_reg (SImode, operands[1]);
811 operands[1] = force_reg (SImode,
812 GEN_INT (- INTVAL (operands[1])));
813 emit_insn (gen_cmnsi (operands[0], operands[1]));
820 (define_insn "*cmpsi_insn"
821 [(set (cc0) (compare (match_operand:SI 0 "register_operand" "l,*r,*h")
822 (match_operand:SI 1 "thumb_cmp_operand" "lI,*h,*r")))]
830 [(set (cc0) (match_operand:SI 0 "register_operand" "l"))]
835 [(set (cc0) (compare (match_operand:SI 0 "register_operand" "l")
836 (neg:SI (match_operand:SI 1 "register_operand" "l"))))]
843 [(set (pc) (label_ref (match_operand 0 "" "")))]
846 if (get_attr_length (insn) == 2)
848 return \"bl\\t%l0\\t%@ far jump\";
849 "[(set (attr "far_jump")
850 (if_then_else (eq_attr "length" "4")
852 (const_string "no")))
854 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2048))
855 (le (minus (match_dup 0) (pc)) (const_int 2044)))
861 [(set (pc) (if_then_else (eq (cc0) (const_int 0))
862 (label_ref (match_operand 0 "" ""))
868 [(set (pc) (if_then_else (ne (cc0) (const_int 0))
869 (label_ref (match_operand 0 "" ""))
875 [(set (pc) (if_then_else (ge (cc0) (const_int 0))
876 (label_ref (match_operand 0 "" ""))
882 [(set (pc) (if_then_else (le (cc0) (const_int 0))
883 (label_ref (match_operand 0 "" ""))
889 [(set (pc) (if_then_else (gt (cc0) (const_int 0))
890 (label_ref (match_operand 0 "" ""))
896 [(set (pc) (if_then_else (lt (cc0) (const_int 0))
897 (label_ref (match_operand 0 "" ""))
902 (define_expand "bgeu"
903 [(set (pc) (if_then_else (geu (cc0) (const_int 0))
904 (label_ref (match_operand 0 "" ""))
909 (define_expand "bleu"
910 [(set (pc) (if_then_else (leu (cc0) (const_int 0))
911 (label_ref (match_operand 0 "" ""))
916 (define_expand "bgtu"
917 [(set (pc) (if_then_else (gtu (cc0) (const_int 0))
918 (label_ref (match_operand 0 "" ""))
923 (define_expand "bltu"
924 [(set (pc) (if_then_else (ltu (cc0) (const_int 0))
925 (label_ref (match_operand 0 "" ""))
930 (define_insn "*cond_branch"
931 [(set (pc) (if_then_else (match_operator 1 "comparison_operator"
932 [(cc0) (const_int 0)])
933 (label_ref (match_operand 0 "" ""))
937 switch (get_attr_length (insn))
939 case 2: return \"b%d1\\t%l0\\t%@cond_branch\";
940 case 4: return \"b%D1\\t.LCB%=\;b\\t%l0\\t%@long jump\\n.LCB%=:\";
941 default: return \"b%D1\\t.LCB%=\;bl\\t%l0\\t%@far jump\\n.LCB%=:\";
943 "[(set (attr "far_jump")
944 (if_then_else (eq_attr "length" "6")
946 (const_string "no")))
949 (and (ge (minus (match_dup 0) (pc)) (const_int -252))
950 (le (minus (match_dup 0) (pc)) (const_int 254)))
952 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
953 (le (minus (match_dup 0) (pc)) (const_int 2044)))
957 (define_insn "*cond_branch_reversed"
958 [(set (pc) (if_then_else (match_operator 1 "comparison_operator"
959 [(cc0) (const_int 0)])
961 (label_ref (match_operand 0 "" ""))))]
964 switch (get_attr_length (insn))
966 case 2: return \"b%D1\\t%l0\\t%@cond_branch_reversed\";
967 case 4: return \"b%d1\\t.LCBR%=\;b\\t%l0\\t%@long jump\\n.LCBR%=:\";
968 default: return \"b%d1\\t.LCBR%=\;bl\\t%l0\\t%@far jump\\n.LCBR%=:\";
971 "[(set (attr "far_jump")
972 (if_then_else (eq_attr "length" "6")
974 (const_string "no")))
977 (and (ge (minus (match_dup 0) (pc)) (const_int -252))
978 (le (minus (match_dup 0) (pc)) (const_int 254)))
980 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
981 (le (minus (match_dup 0) (pc)) (const_int 2044)))
985 (define_insn "indirect_jump"
986 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))]
990 (define_insn "tablejump"
991 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
992 (use (label_ref (match_operand 1 "" "")))]
996 (define_insn "return"
999 "* return output_return ();"
1000 [(set_attr "length" "18")])
1004 (define_expand "call"
1005 [(call (match_operand:SI 0 "memory_operand" "")
1006 (match_operand 1 "" ""))]
1010 (define_insn "*call_indirect"
1011 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1012 (match_operand 1 "" ""))]
1013 "! TARGET_CALLER_INTERWORKING"
1014 "bl\\t%__call_via_%0"
1015 [(set_attr "length" "4")])
1016 ;; The non THUMB_INTERWORK, non TARGET_CALLER_INTERWORKING version
1017 ;; used to be: "mov\\tlr,pc\;bx\\t%0", but the mov does not set
1018 ;; the bottom bit of lr so that a function return (using bx)
1019 ;; would switch back into ARM mode...
1021 (define_insn "*call_indirect_interwork"
1022 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1023 (match_operand 1 "" ""))]
1024 "TARGET_CALLER_INTERWORKING"
1025 "bl\\t%__interwork_call_via_%0"
1026 [(set_attr "length" "4")])
1028 (define_expand "call_value"
1029 [(set (match_operand 0 "" "")
1030 (call (match_operand 1 "memory_operand" "")
1031 (match_operand 2 "" "")))]
1035 (define_insn "*call_value_indirect"
1036 [(set (match_operand 0 "" "=l")
1037 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1038 (match_operand 2 "" "")))]
1039 "! TARGET_CALLER_INTERWORKING"
1040 "bl\\t%__call_via_%1"
1041 [(set_attr "length" "4")])
1042 ;; See comment for call_indirect pattern
1044 (define_insn "*call_value_indirect_interwork"
1045 [(set (match_operand 0 "" "=l")
1046 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1047 (match_operand 2 "" "")))]
1048 "TARGET_CALLER_INTERWORKING"
1049 "bl\\t%__interwork_call_via_%1"
1050 [(set_attr "length" "4")])
1053 (define_insn "*call_insn"
1054 [(call (mem:SI (match_operand:SI 0 "" "i"))
1055 (match_operand:SI 1 "" ""))]
1056 "GET_CODE (operands[0]) == SYMBOL_REF"
1058 [(set_attr "length" "4")])
1060 (define_insn "*call_value_insn"
1061 [(set (match_operand 0 "register_operand" "=l")
1062 (call (mem:SI (match_operand 1 "" "i"))
1063 (match_operand 2 "" "")))]
1064 "GET_CODE (operands[1]) == SYMBOL_REF"
1066 [(set_attr "length" "4")])
1068 ;; Untyped call not required, since all funcs return in r0
1070 ;; Miscellaneous patterns
1073 [(clobber (const_int 0))]
1077 (define_insn "blockage"
1078 [(unspec_volatile [(const_int 0)] 0)]
1081 [(set_attr "length" "0")])
1083 (define_expand "prologue"
1087 thumb_expand_prologue ();
1091 (define_expand "epilogue"
1092 [(unspec_volatile [(const_int 0)] 1)]
1093 "! thumb_trivial_epilogue ()"
1095 thumb_expand_epilogue ();
1098 (define_insn "*epilogue_insns"
1099 [(unspec_volatile [(const_int 0)] 1)]
1102 return thumb_unexpanded_epilogue ();
1104 [(set_attr "length" "42")])
1106 ;; Special patterns for dealing with the constant pool
1108 (define_insn "consttable_4"
1109 [(unspec_volatile [(match_operand 0 "" "")] 2)]
1113 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
1117 union real_extract u;
1118 bcopy ((char *) &CONST_DOUBLE_LOW (operands[0]), (char *) &u, sizeof u);
1119 assemble_real (u.d, GET_MODE (operands[0]));
1123 assemble_integer (operands[0], 4, 1);
1128 [(set_attr "length" "4")])
1130 (define_insn "consttable_8"
1131 [(unspec_volatile [(match_operand 0 "" "")] 3)]
1135 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
1139 union real_extract u;
1140 bcopy ((char *) &CONST_DOUBLE_LOW (operands[0]), (char *) &u, sizeof u);
1141 assemble_real (u.d, GET_MODE (operands[0]));
1145 assemble_integer (operands[0], 8, 1);
1150 [(set_attr "length" "8")])
1152 (define_insn "consttable_end"
1153 [(unspec_volatile [(const_int 0)] 4)]
1156 /* Nothing to do (currently). */
1160 (define_insn "align_4"
1161 [(unspec_volatile [(const_int 0)] 5)]
1164 assemble_align (32);