1 ;; Machine description of the Mitsubishi M32R cpu for GNU C compiler
2 ;; Copyright (C) 1996, 1997 Free Software Foundation, Inc.
4 ;; This file is part of GNU CC.
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 2, or (at your option)
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
19 ;; Boston, MA 02111-1307, USA.
21 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
29 ;; move4 = 4 byte move
31 "move,move4,load,store,unary,binary,compare,shift,mul,div,uncond_branch,branch,call,multi,misc"
32 (const_string "misc"))
35 (define_attr "length" ""
36 (cond [(eq_attr "type" "move,unary,shift,mul,div")
39 (eq_attr "type" "binary")
40 (if_then_else (match_operand 2 "register_operand" "")
41 (const_int 2) (const_int 4))
43 (eq_attr "type" "compare")
44 (if_then_else (match_operand 1 "register_operand" "")
45 (const_int 2) (const_int 4))
47 (eq_attr "type" "load")
48 (if_then_else (match_operand 1 "memreg_operand" "")
49 (const_int 2) (const_int 4))
51 (eq_attr "type" "store")
52 (if_then_else (match_operand 0 "memreg_operand" "")
53 (const_int 2) (const_int 4))
55 (eq_attr "type" "multi")
58 (eq_attr "type" "uncond_branch,branch,call")
63 ;; The length here is the length of a single asm. Unfortunately it might be
64 ;; 2 or 4 so we must allow for 4. That's ok though.
65 (define_asm_attributes
66 [(set_attr "length" "4")
67 (set_attr "type" "multi")])
69 ;; Function units of the M32R
70 ;; Units that take one cycle do not need to be specified.
72 ;; (define_function_unit {name} {num-units} {n-users} {test}
73 ;; {ready-delay} {issue-delay} [{conflict-list}])
75 ;; References to loaded registers should wait a cycle.
76 ;; Memory with load-delay of 1 (i.e. 2 cycle load).
77 (define_function_unit "memory" 1 1 (eq_attr "type" "load") 2 0)
79 ;; Hack to get GCC to better pack the instructions.
80 ;; We pretend there is a separate long function unit that conflicts with
81 ;; both the left and right 16 bit insn slots.
83 (define_function_unit "left" 1 1
84 (eq_attr "length" "2")
86 [(not (eq_attr "length" "2"))])
88 (define_function_unit "right" 1 1
89 (eq_attr "length" "1")
91 [(not (eq_attr "length" "2"))])
93 (define_function_unit "long" 1 1
94 (not (eq_attr "length" "2"))
96 [(eq_attr "length" "2")])
98 ;; Expand prologue as RTL
101 ;(define_expand "prologue"
108 ;; Move instructions.
110 ;; For QI and HI moves, the register must contain the full properly
111 ;; sign-extended value. nonzero_bits assumes this [otherwise
112 ;; SHORT_IMMEDIATES_SIGN_EXTEND must be used, but the comment for it
113 ;; says it's a kludge and the .md files should be fixed instead].
115 (define_expand "movqi"
116 [(set (match_operand:QI 0 "general_operand" "")
117 (match_operand:QI 1 "general_operand" ""))]
121 /* Everything except mem = const or mem = mem can be done easily.
122 Objects in the small data area are handled too. */
124 if (GET_CODE (operands[0]) == MEM)
125 operands[1] = force_reg (QImode, operands[1]);
128 (define_insn "*movqi_insn"
129 [(set (match_operand:QI 0 "move_dest_operand" "=r,r,r,r,m")
130 (match_operand:QI 1 "move_src_operand" "r,I,JQR,m,r"))]
131 "register_operand (operands[0], QImode) || register_operand (operands[1], QImode)"
138 [(set_attr "type" "move,move,move4,load,store")])
140 (define_expand "movhi"
141 [(set (match_operand:HI 0 "general_operand" "")
142 (match_operand:HI 1 "general_operand" ""))]
146 /* Everything except mem = const or mem = mem can be done easily. */
148 if (GET_CODE (operands[0]) == MEM)
149 operands[1] = force_reg (HImode, operands[1]);
152 (define_insn "*movhi_insn"
153 [(set (match_operand:HI 0 "move_dest_operand" "=r,r,r,r,r,m")
154 (match_operand:HI 1 "move_src_operand" "r,I,JQR,K,m,r"))]
155 "register_operand (operands[0], HImode) || register_operand (operands[1], HImode)"
163 [(set_attr "type" "move,move,move4,move4,load,store")])
165 (define_expand "movsi"
166 [(set (match_operand:SI 0 "general_operand" "")
167 (match_operand:SI 1 "general_operand" ""))]
171 /* Everything except mem = const or mem = mem can be done easily. */
173 if (GET_CODE (operands[0]) == MEM)
174 operands[1] = force_reg (SImode, operands[1]);
176 /* Small Data Area reference? */
177 if (small_data_operand (operands[1], SImode))
179 emit_insn (gen_movsi_sda (operands[0], operands[1]));
183 /* If medium or large code model, symbols have to be loaded with
185 if (addr32_operand (operands[1], SImode))
187 emit_insn (gen_movsi_addr32 (operands[0], operands[1]));
192 (define_insn "*movsi_insn"
193 [(set (match_operand:SI 0 "move_dest_operand" "=r,r,r,r,r,r,r,m")
194 ;; ??? Do we need a const_double constraint here for large unsigned values?
195 (match_operand:SI 1 "move_src_operand" "r,I,J,MQ,L,N,m,r"))]
196 "register_operand (operands[0], SImode) || register_operand (operands[1], SImode)"
203 seth %0,%#%T1\;or3 %0,%0,%#%B1
206 [(set_attr "type" "move,move,move4,move4,move4,multi,load,store")])
208 ; Try to use a four byte / two byte pair for constants not loadable with
212 [(set (match_operand:SI 0 "register_operand" "")
213 (match_operand:SI 1 "two_insn_const_operand" ""))]
215 [(set (match_dup 0) (match_dup 2))
216 (set (match_dup 0) (ior:SI (match_dup 0) (match_dup 3)))]
219 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
220 unsigned HOST_WIDE_INT tmp;
223 /* In all cases we will emit two instructions. However we try to
224 use 2 byte instructions wherever possible. We can assume the
225 constant isn't loadable with any of ldi, ld24, or seth. */
227 /* See if we can load a 24 bit unsigned value and invert it. */
228 if (UINT24_P (~ val))
230 emit_insn (gen_movsi (operands[0], GEN_INT (~ val)));
231 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
235 /* See if we can load a 24 bit unsigned value and shift it into place.
236 0x01fffffe is just beyond ld24's range. */
237 for (shift = 1, tmp = 0x01fffffe;
241 if ((val & ~tmp) == 0)
243 emit_insn (gen_movsi (operands[0], GEN_INT (val >> shift)));
244 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (shift)));
249 /* Can't use any two byte insn, fall back to seth/or3. */
250 operands[2] = GEN_INT ((val) & 0xffff0000);
251 operands[3] = GEN_INT ((val) & 0xffff);
254 ;; Small data area support.
255 ;; The address of _SDA_BASE_ is loaded into a register and all objects in
256 ;; the small data area are indexed off that. This is done for each reference
257 ;; but cse will clean things up for us. We let the compiler choose the
258 ;; register to use so we needn't allocate (and maybe even fix) a special
259 ;; register to use. Since the load and store insns have a 16 bit offset the
260 ;; total size of the data area can be 64K. However, if the data area lives
261 ;; above 16M (24 bits), _SDA_BASE_ will have to be loaded with seth/add3 which
262 ;; would then yield 3 instructions to reference an object [though there would
263 ;; be no net loss if two or more objects were referenced]. The 3 insns can be
264 ;; reduced back to 2 if the size of the small data area were reduced to 32K
265 ;; [then seth + ld/st would work for any object in the area]. Doing this
266 ;; would require special handling of _SDA_BASE_ (its value would be
267 ;; (.sdata + 32K) & 0xffff0000) and reloc computations would be different
268 ;; [I think]. What to do about this is deferred until later and for now we
269 ;; require .sdata to be in the first 16M.
271 (define_expand "movsi_sda"
273 (unspec [(const_int 0)] 2))
274 (set (match_operand:SI 0 "register_operand" "")
275 (lo_sum:SI (match_dup 2)
276 (match_operand:SI 1 "small_data_operand" "")))]
280 if (reload_in_progress || reload_completed)
281 operands[2] = operands[0];
283 operands[2] = gen_reg_rtx (SImode);
286 (define_insn "*load_sda_base"
287 [(set (match_operand:SI 0 "register_operand" "=r")
288 (unspec [(const_int 0)] 2))]
290 "ld24 %0,#_SDA_BASE_"
291 [(set_attr "type" "move4")])
293 ;; 32 bit address support.
295 (define_expand "movsi_addr32"
297 ; addr32_operand isn't used because it's too restrictive,
298 ; seth_add3_operand is more general and thus safer.
299 (high:SI (match_operand:SI 1 "seth_add3_operand" "")))
300 (set (match_operand:SI 0 "register_operand" "")
301 (lo_sum:SI (match_dup 2) (match_dup 1)))]
305 if (reload_in_progress || reload_completed)
306 operands[2] = operands[0];
308 operands[2] = gen_reg_rtx (SImode);
311 (define_insn "set_hi_si"
312 [(set (match_operand:SI 0 "register_operand" "=r")
313 (high:SI (match_operand 1 "symbolic_operand" "")))]
315 "seth %0,%#shigh(%1)"
316 [(set_attr "type" "move4")])
318 (define_insn "lo_sum_si"
319 [(set (match_operand:SI 0 "register_operand" "=r")
320 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
321 (match_operand:SI 2 "immediate_operand" "in")))]
324 [(set_attr "length" "4")])
326 (define_expand "movdi"
327 [(set (match_operand:DI 0 "general_operand" "")
328 (match_operand:DI 1 "general_operand" ""))]
332 /* Everything except mem = const or mem = mem can be done easily. */
334 if (GET_CODE (operands[0]) == MEM)
335 operands[1] = force_reg (DImode, operands[1]);
337 if (CONSTANT_P (operands[1])
338 && ! easy_di_const (operands[1]))
340 rtx mem = force_const_mem (DImode, operands[1]);
341 rtx reg = ((reload_in_progress || reload_completed)
342 ? copy_to_suggested_reg (XEXP (mem, 0),
343 gen_rtx (REG, Pmode, REGNO (operands[0])),
345 : force_reg (Pmode, XEXP (mem, 0)));
346 operands[1] = change_address (mem, DImode, reg);
350 (define_insn "*movdi_insn"
351 [(set (match_operand:DI 0 "move_dest_operand" "=r,r,r,m")
352 (match_operand:DI 1 "move_double_src_operand" "r,nG,m,r"))]
353 "register_operand (operands[0], DImode) || register_operand (operands[1], DImode)"
356 switch (which_alternative)
359 /* We normally copy the low-numbered register first. However, if
360 the first register operand 0 is the same as the second register of
361 operand 1, we must copy in the opposite order. */
362 if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
363 return \"mv %R0,%R1\;mv %0,%1\";
365 return \"mv %0,%1\;mv %R0,%R1\";
369 /* If the low-address word is used in the address, we must load it
370 last. Otherwise, load it first. Note that we cannot have
371 auto-increment in that case since the address register is known to be
373 if (refers_to_regno_p (REGNO (operands[0]), REGNO (operands[0]) + 1,
376 return \"ld %R0,%R1\;ld %0,%1\";
380 /* Try to use auto-inc addressing if we can. */
381 if (GET_CODE (XEXP (operands[1], 0)) == REG
382 && dead_or_set_p (insn, XEXP (operands[1], 0)))
384 operands[1] = XEXP (operands[1], 0);
385 return \"ld %0,@%1+\;ld %R0,@%1\";
387 return \"ld %0,%1\;ld %R0,%R1\";
390 /* Try to use auto-inc addressing if we can. */
391 if (GET_CODE (XEXP (operands[0], 0)) == REG
392 && dead_or_set_p (insn, XEXP (operands[0], 0)))
394 operands[0] = XEXP (operands[0], 0);
395 return \"st %1,@%0\;st %R1,@+%0\";
397 return \"st %1,%0\;st %R1,%R0\";
400 [(set_attr "type" "multi,multi,multi,multi")
401 (set_attr "length" "4,4,6,6")])
404 [(set (match_operand:DI 0 "register_operand" "")
405 (match_operand:DI 1 "const_double_operand" ""))]
407 [(set (match_dup 2) (match_dup 4))
408 (set (match_dup 3) (match_dup 5))]
411 operands[2] = gen_rtx (SUBREG, SImode, operands[0], WORDS_BIG_ENDIAN == 0);
412 operands[3] = gen_rtx (SUBREG, SImode, operands[0], WORDS_BIG_ENDIAN != 0);
413 split_double (operands[1], operands + 4, operands + 5);
416 ;; Floating point move insns.
418 (define_expand "movsf"
419 [(set (match_operand:SF 0 "general_operand" "")
420 (match_operand:SF 1 "general_operand" ""))]
424 /* Everything except mem = const or mem = mem can be done easily. */
426 if (GET_CODE (operands[0]) == MEM)
427 operands[1] = force_reg (SFmode, operands[1]);
430 (define_insn "*movsf_insn"
431 [(set (match_operand:SF 0 "move_dest_operand" "=r,r,r,m")
432 (match_operand:SF 1 "move_src_operand" "r,F,m,r"))]
433 "register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode)"
436 switch (which_alternative)
444 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
445 REAL_VALUE_TO_TARGET_SINGLE (r, l);
446 operands[1] = GEN_INT (l);
448 return \"ldi %0,%#0\";
449 if ((l & 0xffff) == 0)
450 return \"seth %0,%#%T1\";
452 return \"seth %0,%#%T1\;or3 %0,%0,%#%B1\";
460 ;; ??? Length of alternative 1 is either 2, 4 or 8.
461 [(set_attr "type" "move,multi,load,store")])
463 (define_expand "movdf"
464 [(set (match_operand:DF 0 "general_operand" "")
465 (match_operand:DF 1 "general_operand" ""))]
469 /* Everything except mem = const or mem = mem can be done easily. */
471 if (GET_CODE (operands[0]) == MEM)
472 operands[1] = force_reg (DFmode, operands[1]);
474 if (GET_CODE (operands[1]) == CONST_DOUBLE
475 && ! easy_df_const (operands[1]))
477 rtx mem = force_const_mem (DFmode, operands[1]);
478 rtx reg = ((reload_in_progress || reload_completed)
479 ? copy_to_suggested_reg (XEXP (mem, 0),
480 gen_rtx (REG, Pmode, REGNO (operands[0])),
482 : force_reg (Pmode, XEXP (mem, 0)));
483 operands[1] = change_address (mem, DFmode, reg);
487 (define_insn "*movdf_insn"
488 [(set (match_operand:DF 0 "move_dest_operand" "=r,r,r,m")
489 (match_operand:DF 1 "move_double_src_operand" "r,H,m,r"))]
490 "register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode)"
493 switch (which_alternative)
496 /* We normally copy the low-numbered register first. However, if
497 the first register operand 0 is the same as the second register of
498 operand 1, we must copy in the opposite order. */
499 if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
500 return \"mv %R0,%R1\;mv %0,%1\";
502 return \"mv %0,%1\;mv %R0,%R1\";
507 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
508 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
509 operands[1] = GEN_INT (l[0]);
510 if (l[0] == 0 && l[1] == 0)
511 return \"ldi %0,%#0\;ldi %R0,%#0\";
514 else if ((l[0] & 0xffff) == 0)
515 return \"seth %0,%#%T1\;ldi %R0,%#0\";
520 /* If the low-address word is used in the address, we must load it
521 last. Otherwise, load it first. Note that we cannot have
522 auto-increment in that case since the address register is known to be
524 if (refers_to_regno_p (REGNO (operands[0]), REGNO (operands[0]) + 1,
527 return \"ld %R0,%R1\;ld %0,%1\";
531 /* Try to use auto-inc addressing if we can. */
532 if (GET_CODE (XEXP (operands[1], 0)) == REG
533 && dead_or_set_p (insn, XEXP (operands[1], 0)))
535 operands[1] = XEXP (operands[1], 0);
536 return \"ld %0,@%1+\;ld %R0,@%1\";
538 return \"ld %0,%1\;ld %R0,%R1\";
541 /* Try to use auto-inc addressing if we can. */
542 if (GET_CODE (XEXP (operands[0], 0)) == REG
543 && dead_or_set_p (insn, XEXP (operands[0], 0)))
545 operands[0] = XEXP (operands[0], 0);
546 return \"st %1,@%0\;st %R1,@+%0\";
548 return \"st %1,%0\;st %R1,%R0\";
551 [(set_attr "type" "multi,multi,multi,multi")
552 (set_attr "length" "4,6,6,6")])
554 ;; Zero extension instructions.
556 (define_insn "zero_extendqihi2"
557 [(set (match_operand:HI 0 "register_operand" "=r,r")
558 (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
563 [(set_attr "type" "unary,load")
564 (set_attr "length" "4,*")])
566 (define_insn "zero_extendqisi2"
567 [(set (match_operand:SI 0 "register_operand" "=r,r")
568 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
573 [(set_attr "type" "unary,load")
574 (set_attr "length" "4,*")])
576 (define_insn "zero_extendhisi2"
577 [(set (match_operand:SI 0 "register_operand" "=r,r")
578 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
583 [(set_attr "type" "unary,load")
584 (set_attr "length" "4,*")])
586 ;; Sign extension instructions.
589 ;; These patterns originally accepted general_operands, however, slightly
590 ;; better code is generated by only accepting register_operands, and then
591 ;; letting combine generate the lds[hb] insns.
592 ;; [This comment copied from sparc.md, I think.]
594 (define_expand "extendqihi2"
595 [(set (match_operand:HI 0 "register_operand" "")
596 (sign_extend:HI (match_operand:QI 1 "register_operand" "")))]
600 rtx temp = gen_reg_rtx (SImode);
601 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
605 if (GET_CODE (operand1) == SUBREG)
607 op1_subword = SUBREG_WORD (operand1);
608 operand1 = XEXP (operand1, 0);
610 if (GET_CODE (operand0) == SUBREG)
612 op0_subword = SUBREG_WORD (operand0);
613 operand0 = XEXP (operand0, 0);
615 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
618 if (GET_MODE (operand0) != SImode)
619 operand0 = gen_rtx (SUBREG, SImode, operand0, op0_subword);
620 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
624 (define_insn "*sign_extendqihi2_insn"
625 [(set (match_operand:HI 0 "register_operand" "=r")
626 (sign_extend:HI (match_operand:QI 1 "memory_operand" "m")))]
629 [(set_attr "type" "load")])
631 (define_expand "extendqisi2"
632 [(set (match_operand:SI 0 "register_operand" "")
633 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
637 rtx temp = gen_reg_rtx (SImode);
638 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
641 if (GET_CODE (operand1) == SUBREG)
643 op1_subword = SUBREG_WORD (operand1);
644 operand1 = XEXP (operand1, 0);
647 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
650 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
654 (define_insn "*sign_extendqisi2_insn"
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (sign_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
659 [(set_attr "type" "load")])
661 (define_expand "extendhisi2"
662 [(set (match_operand:SI 0 "register_operand" "")
663 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
667 rtx temp = gen_reg_rtx (SImode);
668 rtx shift_16 = gen_rtx (CONST_INT, VOIDmode, 16);
671 if (GET_CODE (operand1) == SUBREG)
673 op1_subword = SUBREG_WORD (operand1);
674 operand1 = XEXP (operand1, 0);
677 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
680 emit_insn (gen_ashrsi3 (operand0, temp, shift_16));
684 (define_insn "*sign_extendhisi2_insn"
685 [(set (match_operand:SI 0 "register_operand" "=r")
686 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
689 [(set_attr "type" "load")])
691 ;; Arithmetic instructions.
693 ; ??? Adding an alternative to split add3 of small constants into two
694 ; insns yields better instruction packing but slower code. Adds of small
695 ; values is done a lot.
697 (define_insn "addsi3"
698 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
699 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,r")
700 (match_operand:SI 2 "nonmemory_operand" "r,I,J")))]
706 [(set_attr "type" "binary")
707 (set_attr "length" "2,2,4")])
710 ; [(set (match_operand:SI 0 "register_operand" "")
711 ; (plus:SI (match_operand:SI 1 "register_operand" "")
712 ; (match_operand:SI 2 "int8_operand" "")))]
714 ; && REGNO (operands[0]) != REGNO (operands[1])
715 ; && INT8_P (INTVAL (operands[2]))
716 ; && INTVAL (operands[2]) != 0"
717 ; [(set (match_dup 0) (match_dup 1))
718 ; (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 2)))]
721 (define_insn "adddi3"
722 [(set (match_operand:DI 0 "register_operand" "=r")
723 (plus:DI (match_operand:DI 1 "register_operand" "%0")
724 (match_operand:DI 2 "register_operand" "r")))
725 (clobber (reg:CC 17))]
729 /* ??? The cmp clears the condition bit. Can we speed up somehow? */
730 return \"cmp %L0,%L0\;addx %L0,%L2\;addx %H0,%H2\";
732 [(set_attr "type" "binary")
733 (set_attr "length" "6")])
735 (define_insn "subsi3"
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (match_operand:SI 1 "register_operand" "0")
738 (match_operand:SI 2 "register_operand" "r")))]
741 [(set_attr "type" "binary")])
743 (define_insn "subdi3"
744 [(set (match_operand:DI 0 "register_operand" "=r")
745 (minus:DI (match_operand:DI 1 "register_operand" "0")
746 (match_operand:DI 2 "register_operand" "r")))
747 (clobber (reg:CC 17))]
751 /* ??? The cmp clears the condition bit. Can we speed up somehow? */
752 return \"cmp %L0,%L0\;subx %L0,%L2\;subx %H0,%H2\";
754 [(set_attr "type" "binary")
755 (set_attr "length" "6")])
757 ; Multiply/Divide instructions.
759 (define_insn "mulhisi3"
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (mult:SI (sign_extend:SI (match_operand:HI 1 "register_operand" "r"))
762 (sign_extend:SI (match_operand:HI 2 "register_operand" "r"))))]
764 "mullo %1,%2\;mvfacmi %0"
765 [(set_attr "type" "mul")
766 (set_attr "length" "4")])
768 (define_insn "mulsi3"
769 [(set (match_operand:SI 0 "register_operand" "=r")
770 (mult:SI (match_operand:SI 1 "register_operand" "%0")
771 (match_operand:SI 2 "register_operand" "r")))]
774 [(set_attr "type" "mul")])
776 (define_insn "divsi3"
777 [(set (match_operand:SI 0 "register_operand" "=r")
778 (div:SI (match_operand:SI 1 "register_operand" "0")
779 (match_operand:SI 2 "register_operand" "r")))]
782 [(set_attr "type" "div")])
784 (define_insn "udivsi3"
785 [(set (match_operand:SI 0 "register_operand" "=r")
786 (udiv:SI (match_operand:SI 1 "register_operand" "0")
787 (match_operand:SI 2 "register_operand" "r")))]
790 [(set_attr "type" "div")])
792 (define_insn "modsi3"
793 [(set (match_operand:SI 0 "register_operand" "=r")
794 (mod:SI (match_operand:SI 1 "register_operand" "0")
795 (match_operand:SI 2 "register_operand" "r")))]
798 [(set_attr "type" "div")])
800 (define_insn "umodsi3"
801 [(set (match_operand:SI 0 "register_operand" "=r")
802 (umod:SI (match_operand:SI 1 "register_operand" "0")
803 (match_operand:SI 2 "register_operand" "r")))]
806 [(set_attr "type" "div")])
808 ;; Boolean instructions.
810 ;; We don't define the DImode versions as expand_binop does a good enough job.
811 ;; And if it doesn't it should be fixed.
813 (define_insn "andsi3"
814 [(set (match_operand:SI 0 "register_operand" "=r,r")
815 (and:SI (match_operand:SI 1 "register_operand" "%0,r")
816 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
820 and3 %0,%1,%#%2 ; %X2"
821 [(set_attr "type" "binary")])
823 (define_insn "iorsi3"
824 [(set (match_operand:SI 0 "register_operand" "=r,r")
825 (ior:SI (match_operand:SI 1 "register_operand" "%0,r")
826 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
830 or3 %0,%1,%#%2 ; %X2"
831 [(set_attr "type" "binary")])
833 (define_insn "xorsi3"
834 [(set (match_operand:SI 0 "register_operand" "=r,r")
835 (xor:SI (match_operand:SI 1 "register_operand" "%0,r")
836 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
840 xor3 %0,%1,%#%2 ; %X2"
841 [(set_attr "type" "binary")])
843 (define_insn "negsi2"
844 [(set (match_operand:SI 0 "register_operand" "=r")
845 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
848 [(set_attr "type" "unary")])
850 (define_insn "one_cmplsi2"
851 [(set (match_operand:SI 0 "register_operand" "=r")
852 (not:SI (match_operand:SI 1 "register_operand" "r")))]
855 [(set_attr "type" "unary")])
857 ;; Shift instructions.
859 (define_insn "ashlsi3"
860 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
861 (ashift:SI (match_operand:SI 1 "register_operand" "0,0,r")
862 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
868 [(set_attr "type" "shift")
869 (set_attr "length" "2,2,4")])
871 (define_insn "ashrsi3"
872 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
873 (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r")
874 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
880 [(set_attr "type" "shift")
881 (set_attr "length" "2,2,4")])
883 (define_insn "lshrsi3"
884 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
885 (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r")
886 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
892 [(set_attr "type" "shift")
893 (set_attr "length" "2,2,4")])
895 ;; Compare instructions.
896 ;; This controls RTL generation and register allocation.
898 ;; We generate RTL for comparisons and branches by having the cmpxx
899 ;; patterns store away the operands. Then the bcc patterns
900 ;; emit RTL for both the compare and the branch.
902 ;; On the m32r it is more efficient to use the bxxz instructions and
903 ;; thus merge the compare and branch into one instruction, so they are
906 (define_expand "cmpsi"
908 (compare:CC (match_operand:SI 0 "register_operand" "")
909 (match_operand:SI 1 "nonmemory_operand" "")))]
913 m32r_compare_op0 = operands[0];
914 m32r_compare_op1 = operands[1];
918 ;; The cmp_xxx_insn patterns set the condition bit to the result of the
919 ;; comparison. There isn't a "compare equal" instruction so cmp_eqsi_insn
920 ;; is quite inefficient. However, it is rarely used.
922 (define_insn "cmp_eqsi_insn"
924 (eq:CC (match_operand:SI 0 "register_operand" "r,r")
925 (match_operand:SI 1 "reg_or_cmp_int16_operand" "r,P")))
926 (clobber (match_scratch:SI 2 "=&r,&r"))]
929 mv %2,%0\;sub %2,%1\;cmpui %2,#1
930 add3 %2,%0,%#%N1\;cmpui %2,#1"
931 [(set_attr "type" "compare,compare")
932 (set_attr "length" "8,8")])
934 (define_insn "cmp_ltsi_insn"
936 (lt:CC (match_operand:SI 0 "register_operand" "r,r")
937 (match_operand:SI 1 "reg_or_int16_operand" "r,J")))]
942 [(set_attr "type" "compare")])
944 (define_insn "cmp_ltusi_insn"
946 (ltu:CC (match_operand:SI 0 "register_operand" "r,r")
947 (match_operand:SI 1 "reg_or_uint16_operand" "r,K")))]
952 [(set_attr "type" "compare")])
954 ;; reg == small constant comparisons are best handled by putting the result
955 ;; of the comparison in a tmp reg and then using beqz/bnez.
956 ;; ??? The result register doesn't contain 0/STORE_FLAG_VALUE,
957 ;; it contains 0/non-zero.
959 (define_insn "cmp_ne_small_const_insn"
960 [(set (match_operand:SI 0 "register_operand" "=r")
961 (ne:SI (match_operand:SI 1 "register_operand" "r")
962 (match_operand:SI 2 "cmp_int16_operand" "P")))]
965 [(set_attr "type" "compare")
966 (set_attr "length" "4")])
968 ;; These control RTL generation for conditional jump insns.
972 (if_then_else (match_dup 1)
973 (label_ref (match_operand 0 "" ""))
978 operands[1] = gen_compare (EQ, m32r_compare_op0, m32r_compare_op1);
983 (if_then_else (match_dup 1)
984 (label_ref (match_operand 0 "" ""))
989 operands[1] = gen_compare (NE, m32r_compare_op0, m32r_compare_op1);
994 (if_then_else (match_dup 1)
995 (label_ref (match_operand 0 "" ""))
1000 operands[1] = gen_compare (GT, m32r_compare_op0, m32r_compare_op1);
1003 (define_expand "ble"
1005 (if_then_else (match_dup 1)
1006 (label_ref (match_operand 0 "" ""))
1011 operands[1] = gen_compare (LE, m32r_compare_op0, m32r_compare_op1);
1014 (define_expand "bge"
1016 (if_then_else (match_dup 1)
1017 (label_ref (match_operand 0 "" ""))
1022 operands[1] = gen_compare (GE, m32r_compare_op0, m32r_compare_op1);
1025 (define_expand "blt"
1027 (if_then_else (match_dup 1)
1028 (label_ref (match_operand 0 "" ""))
1033 operands[1] = gen_compare (LT, m32r_compare_op0, m32r_compare_op1);
1036 (define_expand "bgtu"
1038 (if_then_else (match_dup 1)
1039 (label_ref (match_operand 0 "" ""))
1044 operands[1] = gen_compare (GTU, m32r_compare_op0, m32r_compare_op1);
1047 (define_expand "bleu"
1049 (if_then_else (match_dup 1)
1050 (label_ref (match_operand 0 "" ""))
1055 operands[1] = gen_compare (LEU, m32r_compare_op0, m32r_compare_op1);
1058 (define_expand "bgeu"
1060 (if_then_else (match_dup 1)
1061 (label_ref (match_operand 0 "" ""))
1066 operands[1] = gen_compare (GEU, m32r_compare_op0, m32r_compare_op1);
1069 (define_expand "bltu"
1071 (if_then_else (match_dup 1)
1072 (label_ref (match_operand 0 "" ""))
1077 operands[1] = gen_compare (LTU, m32r_compare_op0, m32r_compare_op1);
1080 ;; Now match both normal and inverted jump.
1082 (define_insn "*branch_insn"
1084 (if_then_else (match_operator 1 "eqne_comparison_operator"
1085 [(reg 17) (const_int 0)])
1086 (label_ref (match_operand 0 "" ""))
1091 if (GET_CODE (operands[1]) == NE)
1096 [(set_attr "type" "branch")
1097 ; We use 400/800 instead of 512,1024 to account for inaccurate insn
1098 ; lengths and insn alignments that are complex to track.
1099 ; It's not important that we be hyper-precise here. It may be more
1100 ; important blah blah blah when the chip supports parallel execution
1101 ; blah blah blah but until then blah blah blah this is simple and
1103 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1109 (define_insn "*rev_branch_insn"
1111 (if_then_else (match_operator 1 "eqne_comparison_operator"
1112 [(reg 17) (const_int 0)])
1114 (label_ref (match_operand 0 "" ""))))]
1115 ;"REVERSIBLE_CC_MODE (GET_MODE (XEXP (operands[1], 0)))"
1119 if (GET_CODE (operands[1]) == EQ)
1124 [(set_attr "type" "branch")
1125 ; We use 400/800 instead of 512,1024 to account for inaccurate insn
1126 ; lengths and insn alignments that are complex to track.
1127 ; It's not important that we be hyper-precise here. It may be more
1128 ; important blah blah blah when the chip supports parallel execution
1129 ; blah blah blah but until then blah blah blah this is simple and
1131 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1137 ; reg/reg compare and branch insns
1139 (define_insn "*reg_branch_insn"
1141 (if_then_else (match_operator 1 "eqne_comparison_operator"
1142 [(match_operand:SI 2 "register_operand" "r")
1143 (match_operand:SI 3 "register_operand" "r")])
1144 (label_ref (match_operand 0 "" ""))
1149 /* Is branch target reachable with beq/bne? */
1150 if (get_attr_length (insn) == 4)
1152 if (GET_CODE (operands[1]) == EQ)
1153 return \"beq %2,%3,%l0\";
1155 return \"bne %2,%3,%l0\";
1159 if (GET_CODE (operands[1]) == EQ)
1160 return \"bne %2,%3,1f\;bra %l0\;1:\";
1162 return \"beq %2,%3,1f\;bra %l0\;1:\";
1165 [(set_attr "type" "branch")
1166 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1167 ; which is complex to track and inaccurate length specs.
1168 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1174 (define_insn "*rev_reg_branch_insn"
1176 (if_then_else (match_operator 1 "eqne_comparison_operator"
1177 [(match_operand:SI 2 "register_operand" "r")
1178 (match_operand:SI 3 "register_operand" "r")])
1180 (label_ref (match_operand 0 "" ""))))]
1184 /* Is branch target reachable with beq/bne? */
1185 if (get_attr_length (insn) == 4)
1187 if (GET_CODE (operands[1]) == NE)
1188 return \"beq %2,%3,%l0\";
1190 return \"bne %2,%3,%l0\";
1194 if (GET_CODE (operands[1]) == NE)
1195 return \"bne %2,%3,1f\;bra %l0\;1:\";
1197 return \"beq %2,%3,1f\;bra %l0\;1:\";
1200 [(set_attr "type" "branch")
1201 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1202 ; which is complex to track and inaccurate length specs.
1203 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1209 ; reg/zero compare and branch insns
1211 (define_insn "*zero_branch_insn"
1213 (if_then_else (match_operator 1 "signed_comparison_operator"
1214 [(match_operand:SI 2 "register_operand" "r")
1216 (label_ref (match_operand 0 "" ""))
1224 switch (GET_CODE (operands[1]))
1226 case EQ : br = \"eq\"; invbr = \"ne\"; break;
1227 case NE : br = \"ne\"; invbr = \"eq\"; break;
1228 case LE : br = \"le\"; invbr = \"gt\"; break;
1229 case GT : br = \"gt\"; invbr = \"le\"; break;
1230 case LT : br = \"lt\"; invbr = \"ge\"; break;
1231 case GE : br = \"ge\"; invbr = \"lt\"; break;
1234 /* Is branch target reachable with bxxz? */
1235 if (get_attr_length (insn) == 4)
1237 sprintf (asmtext, \"b%sz %%2,%%l0\", br);
1238 output_asm_insn (asmtext, operands);
1242 sprintf (asmtext, \"b%sz %%2,1f\;bra %%l0\;1:\", invbr);
1243 output_asm_insn (asmtext, operands);
1247 [(set_attr "type" "branch")
1248 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1249 ; which is complex to track and inaccurate length specs.
1250 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1256 (define_insn "*rev_zero_branch_insn"
1258 (if_then_else (match_operator 1 "eqne_comparison_operator"
1259 [(match_operand:SI 2 "register_operand" "r")
1262 (label_ref (match_operand 0 "" ""))))]
1269 switch (GET_CODE (operands[1]))
1271 case EQ : br = \"eq\"; invbr = \"ne\"; break;
1272 case NE : br = \"ne\"; invbr = \"eq\"; break;
1273 case LE : br = \"le\"; invbr = \"gt\"; break;
1274 case GT : br = \"gt\"; invbr = \"le\"; break;
1275 case LT : br = \"lt\"; invbr = \"ge\"; break;
1276 case GE : br = \"ge\"; invbr = \"lt\"; break;
1279 /* Is branch target reachable with bxxz? */
1280 if (get_attr_length (insn) == 4)
1282 sprintf (asmtext, \"b%sz %%2,%%l0\", invbr);
1283 output_asm_insn (asmtext, operands);
1287 sprintf (asmtext, \"b%sz %%2,1f\;bra %%l0\;1:\", br);
1288 output_asm_insn (asmtext, operands);
1292 [(set_attr "type" "branch")
1293 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1294 ; which is complex to track and inaccurate length specs.
1295 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1301 ;; Unconditional and other jump instructions.
1304 [(set (pc) (label_ref (match_operand 0 "" "")))]
1307 [(set_attr "type" "uncond_branch")
1308 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1314 (define_insn "indirect_jump"
1315 [(set (pc) (match_operand:SI 0 "address_operand" "p"))]
1318 [(set_attr "type" "uncond_branch")
1319 (set_attr "length" "2")])
1321 (define_insn "tablejump"
1322 [(set (pc) (match_operand:SI 0 "address_operand" "p"))
1323 (use (label_ref (match_operand 1 "" "")))]
1326 [(set_attr "type" "uncond_branch")
1327 (set_attr "length" "2")])
1329 (define_expand "call"
1330 ;; operands[1] is stack_size_rtx
1331 ;; operands[2] is next_arg_register
1332 [(parallel [(call (match_operand:SI 0 "call_operand" "")
1333 (match_operand 1 "" ""))
1334 (clobber (reg:SI 14))])]
1338 (define_insn "*call_via_reg"
1339 [(call (mem:SI (match_operand:SI 0 "register_operand" "r"))
1340 (match_operand 1 "" ""))
1341 (clobber (reg:SI 14))]
1344 [(set_attr "type" "call")
1345 (set_attr "length" "2")])
1347 (define_insn "*call_via_label"
1348 [(call (mem:SI (match_operand:SI 0 "call_address_operand" ""))
1349 (match_operand 1 "" ""))
1350 (clobber (reg:SI 14))]
1354 int call26_p = call26_operand (operands[0], FUNCTION_MODE);
1358 /* We may not be able to reach with a `bl' insn so punt and leave it to
1360 We do this here, rather than doing a force_reg in the define_expand
1361 so these insns won't be separated, say by scheduling, thus simplifying
1363 return \"seth r14,%T0\;add3 r14,r14,%B0\;jl r14\";
1368 [(set_attr "type" "call")
1369 (set (attr "length")
1370 (if_then_else (eq (symbol_ref "call26_operand (operands[0], FUNCTION_MODE)")
1372 (const_int 12) ; 10 + 2 for nop filler
1373 ; The return address must be on a 4 byte boundary so
1374 ; there's no point in using a value of 2 here. A 2 byte
1375 ; insn may go in the left slot but we currently can't
1376 ; use such knowledge.
1379 (define_expand "call_value"
1380 ;; operand 2 is stack_size_rtx
1381 ;; operand 3 is next_arg_register
1382 [(parallel [(set (match_operand 0 "register_operand" "=r")
1383 (call (match_operand:SI 1 "call_operand" "")
1384 (match_operand 2 "" "")))
1385 (clobber (reg:SI 14))])]
1389 (define_insn "*call_value_via_reg"
1390 [(set (match_operand 0 "register_operand" "=r")
1391 (call (mem:SI (match_operand:SI 1 "register_operand" "r"))
1392 (match_operand 2 "" "")))
1393 (clobber (reg:SI 14))]
1396 [(set_attr "type" "call")
1397 (set_attr "length" "2")])
1399 (define_insn "*call_value_via_label"
1400 [(set (match_operand 0 "register_operand" "=r")
1401 (call (mem:SI (match_operand:SI 1 "call_address_operand" ""))
1402 (match_operand 2 "" "")))
1403 (clobber (reg:SI 14))]
1407 int call26_p = call26_operand (operands[1], FUNCTION_MODE);
1411 /* We may not be able to reach with a `bl' insn so punt and leave it to
1413 We do this here, rather than doing a force_reg in the define_expand
1414 so these insns won't be separated, say by scheduling, thus simplifying
1416 return \"seth r14,%T1\;add3 r14,r14,%B1\;jl r14\";
1421 [(set_attr "type" "call")
1422 (set (attr "length")
1423 (if_then_else (eq (symbol_ref "call26_operand (operands[1], FUNCTION_MODE)")
1425 (const_int 12) ; 10 + 2 for nop filler
1426 ; The return address must be on a 4 byte boundary so
1427 ; there's no point in using a value of 2 here. A 2 byte
1428 ; insn may go in the left slot but we currently can't
1429 ; use such knowledge.
1436 [(set_attr "type" "misc")
1437 (set_attr "length" "2")])
1439 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
1440 ;; all of memory. This blocks insns from being moved across this point.
1442 (define_insn "blockage"
1443 [(unspec_volatile [(const_int 0)] 0)]
1447 ;; Special pattern to flush the icache.
1449 (define_insn "flush_icache"
1450 [(unspec_volatile [(match_operand 0 "memory_operand" "m")] 0)]
1452 "* return \"nop ; flush-icache\";"
1453 [(set_attr "type" "misc")])
1455 ;; Split up troublesome insns for better scheduling.
1457 ;; Peepholes go at the end.
1459 ;; ??? Setting the type attribute may not be useful, but for completeness
1463 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "r")
1465 (match_operand:SI 1 "register_operand" "r"))]
1466 "dead_or_set_p (insn, operands[0])"
1468 [(set_attr "type" "store")
1469 (set_attr "length" "2")])