1 ;; Machine description of the M32R/D cpu for GNU C compiler
2 ;; Copyright (C) 1996, 1997 Free Software Foundation, Inc.
4 ;; This file is part of GNU CC.
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 2, or (at your option)
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
19 ;; Boston, MA 02111-1307, USA.
21 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
29 ;; move4 = 4 byte move
31 "move,move4,load,store,unary,binary,compare,shift,mul,div,uncond_branch,branch,call,multi,misc"
32 (const_string "misc"))
35 (define_attr "length" ""
36 (cond [(eq_attr "type" "move,unary,shift,mul,div")
39 (eq_attr "type" "binary")
40 (if_then_else (match_operand 2 "register_operand" "")
41 (const_int 2) (const_int 4))
43 (eq_attr "type" "compare")
44 (if_then_else (match_operand 1 "register_operand" "")
45 (const_int 2) (const_int 4))
47 (eq_attr "type" "load")
48 (if_then_else (match_operand 1 "memreg_operand" "")
49 (const_int 2) (const_int 4))
51 (eq_attr "type" "store")
52 (if_then_else (match_operand 0 "memreg_operand" "")
53 (const_int 2) (const_int 4))
55 (eq_attr "type" "multi")
58 (eq_attr "type" "uncond_branch,branch,call")
63 ;; The length here is the length of a single asm. Unfortunately it might be
64 ;; 2 or 4 so we must allow for 4. That's ok though.
65 (define_asm_attributes
66 [(set_attr "length" "4")
67 (set_attr "type" "multi")])
69 ;; Function units of the M32R
70 ;; Units that take one cycle do not need to be specified.
72 ;; (define_function_unit {name} {num-units} {n-users} {test}
73 ;; {ready-delay} {issue-delay} [{conflict-list}])
75 ;; References to loaded registers should wait a cycle.
76 ;; Memory with load-delay of 1 (i.e. 2 cycle load).
77 (define_function_unit "memory" 1 1 (eq_attr "type" "load") 2 0)
79 ;; Hack to get GCC to better pack the instructions.
80 ;; We pretend there is a separate long function unit that conflicts with
81 ;; both the left and right 16 bit insn slots.
83 (define_function_unit "left" 1 1
84 (eq_attr "length" "2")
86 [(not (eq_attr "length" "2"))])
88 (define_function_unit "right" 1 1
89 (eq_attr "length" "1")
91 [(not (eq_attr "length" "2"))])
93 (define_function_unit "long" 1 1
94 (not (eq_attr "length" "2"))
96 [(eq_attr "length" "2")])
98 ;; Expand prologue as RTL
101 ;(define_expand "prologue"
108 ;; Move instructions.
110 ;; For QI and HI moves, the register must contain the full properly
111 ;; sign-extended value. nonzero_bits assumes this [otherwise
112 ;; SHORT_IMMEDIATES_SIGN_EXTEND must be used, but the comment for it
113 ;; says it's a kludge and the .md files should be fixed instead].
115 (define_expand "movqi"
116 [(set (match_operand:QI 0 "general_operand" "")
117 (match_operand:QI 1 "general_operand" ""))]
121 /* Everything except mem = const or mem = mem can be done easily.
122 Objects in the small data area are handled too. */
124 if (GET_CODE (operands[0]) == MEM)
125 operands[1] = force_reg (QImode, operands[1]);
128 (define_insn "*movqi_insn"
129 [(set (match_operand:QI 0 "move_dest_operand" "=r,r,r,r,m")
130 (match_operand:QI 1 "move_src_operand" "r,I,JQR,m,r"))]
131 "register_operand (operands[0], QImode) || register_operand (operands[1], QImode)"
138 [(set_attr "type" "move,move,move4,load,store")])
140 (define_expand "movhi"
141 [(set (match_operand:HI 0 "general_operand" "")
142 (match_operand:HI 1 "general_operand" ""))]
146 /* Everything except mem = const or mem = mem can be done easily. */
148 if (GET_CODE (operands[0]) == MEM)
149 operands[1] = force_reg (HImode, operands[1]);
152 (define_insn "*movhi_insn"
153 [(set (match_operand:HI 0 "move_dest_operand" "=r,r,r,r,r,m")
154 (match_operand:HI 1 "move_src_operand" "r,I,JQR,K,m,r"))]
155 "register_operand (operands[0], HImode) || register_operand (operands[1], HImode)"
163 [(set_attr "type" "move,move,move4,move4,load,store")])
165 (define_expand "movsi"
166 [(set (match_operand:SI 0 "general_operand" "")
167 (match_operand:SI 1 "general_operand" ""))]
171 /* Everything except mem = const or mem = mem can be done easily.
172 If medium or large code model, symbols have to be loaded with seth/add3.
173 Objects in the small data area are handled too. */
175 if (GET_CODE (operands[0]) == MEM)
176 operands[1] = force_reg (SImode, operands[1]);
178 if (small_data_operand (operands[1], SImode))
180 emit_insn (gen_movsi_sda (operands[0], operands[1]));
183 else if (addr32_operand (operands[1], SImode))
185 emit_insn (gen_movsi_addr32 (operands[0], operands[1]));
190 (define_insn "*movsi_insn"
191 [(set (match_operand:SI 0 "move_dest_operand" "=r,r,r,r,r,r,r,m")
192 ;; FIXME: Do we need a const_double constraint here for large unsigned values?
193 (match_operand:SI 1 "move_src_operand" "r,I,J,MQ,L,N,m,r"))]
194 "register_operand (operands[0], SImode) || register_operand (operands[1], SImode)"
201 seth %0,%#%T1\;or3 %0,%0,%#%B1
204 [(set_attr "type" "move,move,move4,move4,move4,multi,load,store")])
206 ;; Small data area support.
207 ;; The address of _SDA_BASE_ is loaded into a register and all objects in
208 ;; the small data area are indexed off that. This is done for each reference
209 ;; but cse will clean things up for us. We let the compiler choose the
210 ;; register to use so we needn't allocate (and maybe even fix) a special
211 ;; register to use. Since the load and store insns have a 16 bit offset the
212 ;; total size of the data area can be 64K. However, if the data area lives
213 ;; above 16M (24 bits), _SDA_BASE_ will have to be loaded with seth/add3 which
214 ;; would then yield 3 instructions to reference an object [though there would
215 ;; be no net loss if two or more objects were referenced]. The 3 insns can be
216 ;; reduced back to 2 if the size of the small data area were reduced to 32K
217 ;; [then seth + ld/st would work for any object in the area]. Doing this
218 ;; would require special handling of _SDA_BASE_ (its value would be
219 ;; (.sdata + 32K) & 0xffff0000) and reloc computations would be different
220 ;; [I think]. What to do about this is defered until later and for now we
221 ;; require .sdata to be in the first 16M.
223 (define_expand "movsi_sda"
225 (unspec [(const_int 0)] 2))
226 (set (match_operand:SI 0 "register_operand" "")
227 (lo_sum:SI (match_dup 2)
228 (match_operand:SI 1 "small_data_operand" "")))]
232 if (reload_in_progress || reload_completed)
233 operands[2] = operands[0];
235 operands[2] = gen_reg_rtx (SImode);
238 (define_insn "*load_sda_base"
239 [(set (match_operand:SI 0 "register_operand" "=r")
240 (unspec [(const_int 0)] 2))]
242 "ld24 %0,#_SDA_BASE_"
243 [(set_attr "type" "move4")])
245 ;; 32 bit address support.
247 (define_expand "movsi_addr32"
249 ; addr32_operand isn't used because it's too restrictive,
250 ; seth_add3_operand is more general and thus safer.
251 (high:SI (match_operand:SI 1 "seth_add3_operand" "")))
252 (set (match_operand:SI 0 "register_operand" "")
253 (lo_sum:SI (match_dup 2) (match_dup 1)))]
257 if (reload_in_progress || reload_completed)
258 operands[2] = operands[0];
260 operands[2] = gen_reg_rtx (SImode);
263 (define_insn "set_hi_si"
264 [(set (match_operand:SI 0 "register_operand" "=r")
265 (high:SI (match_operand 1 "symbolic_operand" "")))]
267 "seth %0,%#shigh(%1)"
268 [(set_attr "type" "move4")])
270 (define_insn "lo_sum_si"
271 [(set (match_operand:SI 0 "register_operand" "=r")
272 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
273 (match_operand:SI 2 "immediate_operand" "in")))]
276 [(set_attr "length" "4")])
278 (define_expand "movdi"
279 [(set (match_operand:DI 0 "general_operand" "")
280 (match_operand:DI 1 "general_operand" ""))]
284 /* Everything except mem = const or mem = mem can be done easily. */
286 if (GET_CODE (operands[0]) == MEM)
287 operands[1] = force_reg (DImode, operands[1]);
289 if (CONSTANT_P (operands[1])
290 && ! easy_di_const (operands[1]))
292 rtx mem = force_const_mem (DImode, operands[1]);
293 rtx reg = ((reload_in_progress || reload_completed)
294 ? copy_to_suggested_reg (XEXP (mem, 0),
295 gen_rtx (REG, Pmode, REGNO (operands[0])),
297 : force_reg (Pmode, XEXP (mem, 0)));
298 operands[1] = change_address (mem, DImode, reg);
302 (define_insn "*movdi_insn"
303 [(set (match_operand:DI 0 "move_dest_operand" "=r,r,r,m")
304 (match_operand:DI 1 "move_double_src_operand" "r,nG,m,r"))]
305 "register_operand (operands[0], DImode) || register_operand (operands[1], DImode)"
308 switch (which_alternative)
311 /* We normally copy the low-numbered register first. However, if
312 the first register operand 0 is the same as the second register of
313 operand 1, we must copy in the opposite order. */
314 if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
315 return \"mv %R0,%R1\;mv %0,%1\";
317 return \"mv %0,%1\;mv %R0,%R1\";
321 /* If the low-address word is used in the address, we must load it
322 last. Otherwise, load it first. Note that we cannot have
323 auto-increment in that case since the address register is known to be
325 if (refers_to_regno_p (REGNO (operands[0]), REGNO (operands[0]) + 1,
328 return \"ld %R0,%R1\;ld %0,%1\";
332 /* Try to use auto-inc addressing if we can. */
333 if (GET_CODE (XEXP (operands[1], 0)) == REG
334 && dead_or_set_p (insn, XEXP (operands[1], 0)))
336 operands[1] = XEXP (operands[1], 0);
337 return \"ld %0,@%1+\;ld %R0,@%1\";
339 return \"ld %0,%1\;ld %R0,%R1\";
342 /* Try to use auto-inc addressing if we can. */
343 if (GET_CODE (XEXP (operands[0], 0)) == REG
344 && dead_or_set_p (insn, XEXP (operands[0], 0)))
346 operands[0] = XEXP (operands[0], 0);
347 return \"st %1,@%0\;st %R1,@+%0\";
349 return \"st %1,%0\;st %R1,%R0\";
352 [(set_attr "type" "multi,multi,multi,multi")
353 (set_attr "length" "4,4,6,6")])
356 [(set (match_operand:DI 0 "register_operand" "")
357 (match_operand:DI 1 "const_double_operand" ""))]
359 [(set (match_dup 2) (match_dup 4))
360 (set (match_dup 3) (match_dup 5))]
363 operands[2] = gen_rtx (SUBREG, SImode, operands[0], WORDS_BIG_ENDIAN == 0);
364 operands[3] = gen_rtx (SUBREG, SImode, operands[0], WORDS_BIG_ENDIAN != 0);
365 split_double (operands[1], operands + 4, operands + 5);
368 ;; Floating point move insns.
370 (define_expand "movsf"
371 [(set (match_operand:SF 0 "general_operand" "")
372 (match_operand:SF 1 "general_operand" ""))]
376 /* Everything except mem = const or mem = mem can be done easily. */
378 if (GET_CODE (operands[0]) == MEM)
379 operands[1] = force_reg (SFmode, operands[1]);
382 (define_insn "*movsf_insn"
383 [(set (match_operand:SF 0 "move_dest_operand" "=r,r,r,m")
384 (match_operand:SF 1 "move_src_operand" "r,F,m,r"))]
385 "register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode)"
388 switch (which_alternative)
396 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
397 REAL_VALUE_TO_TARGET_SINGLE (r, l);
398 operands[1] = GEN_INT (l);
400 return \"ldi %0,%#0\";
401 if ((l & 0xffff) == 0)
402 return \"seth %0,%#%T1\";
404 return \"seth %0,%#%T1\;or3 %0,%0,%#%B1\";
412 ;; ??? Length of alternative 1 is either 2, 4 or 8.
413 [(set_attr "type" "move,multi,load,store")])
415 (define_expand "movdf"
416 [(set (match_operand:DF 0 "general_operand" "")
417 (match_operand:DF 1 "general_operand" ""))]
421 /* Everything except mem = const or mem = mem can be done easily. */
423 if (GET_CODE (operands[0]) == MEM)
424 operands[1] = force_reg (DFmode, operands[1]);
426 if (GET_CODE (operands[1]) == CONST_DOUBLE
427 && ! easy_df_const (operands[1]))
429 rtx mem = force_const_mem (DFmode, operands[1]);
430 rtx reg = ((reload_in_progress || reload_completed)
431 ? copy_to_suggested_reg (XEXP (mem, 0),
432 gen_rtx (REG, Pmode, REGNO (operands[0])),
434 : force_reg (Pmode, XEXP (mem, 0)));
435 operands[1] = change_address (mem, DFmode, reg);
439 (define_insn "*movdf_insn"
440 [(set (match_operand:DF 0 "move_dest_operand" "=r,r,r,m")
441 (match_operand:DF 1 "move_double_src_operand" "r,H,m,r"))]
442 "register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode)"
445 switch (which_alternative)
448 /* We normally copy the low-numbered register first. However, if
449 the first register operand 0 is the same as the second register of
450 operand 1, we must copy in the opposite order. */
451 if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
452 return \"mv %R0,%R1\;mv %0,%1\";
454 return \"mv %0,%1\;mv %R0,%R1\";
459 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
460 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
461 operands[1] = GEN_INT (l[0]);
462 if (l[0] == 0 && l[1] == 0)
463 return \"ldi %0,%#0\;ldi %R0,%#0\";
466 else if ((l[0] & 0xffff) == 0)
467 return \"seth %0,%#%T1\;ldi %R0,%#0\";
472 /* If the low-address word is used in the address, we must load it
473 last. Otherwise, load it first. Note that we cannot have
474 auto-increment in that case since the address register is known to be
476 if (refers_to_regno_p (REGNO (operands[0]), REGNO (operands[0]) + 1,
479 return \"ld %R0,%R1\;ld %0,%1\";
483 /* Try to use auto-inc addressing if we can. */
484 if (GET_CODE (XEXP (operands[1], 0)) == REG
485 && dead_or_set_p (insn, XEXP (operands[1], 0)))
487 operands[1] = XEXP (operands[1], 0);
488 return \"ld %0,@%1+\;ld %R0,@%1\";
490 return \"ld %0,%1\;ld %R0,%R1\";
493 /* Try to use auto-inc addressing if we can. */
494 if (GET_CODE (XEXP (operands[0], 0)) == REG
495 && dead_or_set_p (insn, XEXP (operands[0], 0)))
497 operands[0] = XEXP (operands[0], 0);
498 return \"st %1,@%0\;st %R1,@+%0\";
500 return \"st %1,%0\;st %R1,%R0\";
503 [(set_attr "type" "multi,multi,multi,multi")
504 (set_attr "length" "4,6,6,6")])
506 ;; Zero extension instructions.
508 (define_insn "zero_extendqihi2"
509 [(set (match_operand:HI 0 "register_operand" "=r,r")
510 (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
515 [(set_attr "type" "unary,load")
516 (set_attr "length" "4,*")])
518 (define_insn "zero_extendqisi2"
519 [(set (match_operand:SI 0 "register_operand" "=r,r")
520 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
525 [(set_attr "type" "unary,load")
526 (set_attr "length" "4,*")])
528 (define_insn "zero_extendhisi2"
529 [(set (match_operand:SI 0 "register_operand" "=r,r")
530 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
535 [(set_attr "type" "unary,load")
536 (set_attr "length" "4,*")])
538 ;; Sign extension instructions.
539 ;; FIXME: See v850.md.
541 ;; These patterns originally accepted general_operands, however, slightly
542 ;; better code is generated by only accepting register_operands, and then
543 ;; letting combine generate the lds[hb] insns.
544 ;; [This comment copied from sparc.md, I think.]
546 (define_expand "extendqihi2"
547 [(set (match_operand:HI 0 "register_operand" "")
548 (sign_extend:HI (match_operand:QI 1 "register_operand" "")))]
552 rtx temp = gen_reg_rtx (SImode);
553 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
557 if (GET_CODE (operand1) == SUBREG)
559 op1_subword = SUBREG_WORD (operand1);
560 operand1 = XEXP (operand1, 0);
562 if (GET_CODE (operand0) == SUBREG)
564 op0_subword = SUBREG_WORD (operand0);
565 operand0 = XEXP (operand0, 0);
567 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
570 if (GET_MODE (operand0) != SImode)
571 operand0 = gen_rtx (SUBREG, SImode, operand0, op0_subword);
572 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
576 (define_insn "*sign_extendqihi2_insn"
577 [(set (match_operand:HI 0 "register_operand" "=r")
578 (sign_extend:HI (match_operand:QI 1 "memory_operand" "m")))]
581 [(set_attr "type" "load")])
583 (define_expand "extendqisi2"
584 [(set (match_operand:SI 0 "register_operand" "")
585 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
589 rtx temp = gen_reg_rtx (SImode);
590 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
593 if (GET_CODE (operand1) == SUBREG)
595 op1_subword = SUBREG_WORD (operand1);
596 operand1 = XEXP (operand1, 0);
599 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
602 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
606 (define_insn "*sign_extendqisi2_insn"
607 [(set (match_operand:SI 0 "register_operand" "=r")
608 (sign_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
611 [(set_attr "type" "load")])
613 (define_expand "extendhisi2"
614 [(set (match_operand:SI 0 "register_operand" "")
615 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
619 rtx temp = gen_reg_rtx (SImode);
620 rtx shift_16 = gen_rtx (CONST_INT, VOIDmode, 16);
623 if (GET_CODE (operand1) == SUBREG)
625 op1_subword = SUBREG_WORD (operand1);
626 operand1 = XEXP (operand1, 0);
629 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
632 emit_insn (gen_ashrsi3 (operand0, temp, shift_16));
636 (define_insn "*sign_extendhisi2_insn"
637 [(set (match_operand:SI 0 "register_operand" "=r")
638 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
641 [(set_attr "type" "load")])
643 ;; Arithmetic instructions.
645 ; ??? Adding an alternative to split add3 of small constants into two
646 ; insns yields better instruction packing but slower code. Adds of small
647 ; values is done a lot.
649 (define_insn "addsi3"
650 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
651 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,r")
652 (match_operand:SI 2 "nonmemory_operand" "r,I,J")))]
658 [(set_attr "type" "binary")
659 (set_attr "length" "2,2,4")])
662 ; [(set (match_operand:SI 0 "register_operand" "")
663 ; (plus:SI (match_operand:SI 1 "register_operand" "")
664 ; (match_operand:SI 2 "int8_operand" "")))]
666 ; && REGNO (operands[0]) != REGNO (operands[1])
667 ; && INT8_P (INTVAL (operands[2]))
668 ; && INTVAL (operands[2]) != 0"
669 ; [(set (match_dup 0) (match_dup 1))
670 ; (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 2)))]
673 (define_insn "adddi3"
674 [(set (match_operand:DI 0 "register_operand" "=r")
675 (plus:DI (match_operand:DI 1 "register_operand" "%0")
676 (match_operand:DI 2 "register_operand" "r")))
677 (clobber (reg:CC 17))]
681 /* ??? The cmp clears the condition bit. Can we speed up somehow? */
682 return \"cmp %L0,%L0\;addx %L0,%L2\;addx %H0,%H2\";
684 [(set_attr "type" "binary")
685 (set_attr "length" "6")])
687 (define_insn "subsi3"
688 [(set (match_operand:SI 0 "register_operand" "=r")
689 (minus:SI (match_operand:SI 1 "register_operand" "0")
690 (match_operand:SI 2 "register_operand" "r")))]
693 [(set_attr "type" "binary")])
695 (define_insn "subdi3"
696 [(set (match_operand:DI 0 "register_operand" "=r")
697 (minus:DI (match_operand:DI 1 "register_operand" "0")
698 (match_operand:DI 2 "register_operand" "r")))
699 (clobber (reg:CC 17))]
703 /* ??? The cmp clears the condition bit. Can we speed up somehow? */
704 return \"cmp %L0,%L0\;subx %L0,%L2\;subx %H0,%H2\";
706 [(set_attr "type" "binary")
707 (set_attr "length" "6")])
709 ; Multiply/Divide instructions.
711 (define_insn "mulhisi3"
712 [(set (match_operand:SI 0 "register_operand" "=r")
713 (mult:SI (sign_extend:SI (match_operand:HI 1 "register_operand" "r"))
714 (sign_extend:SI (match_operand:HI 2 "register_operand" "r"))))]
716 "mullo %1,%2\;mvfacmi %0"
717 [(set_attr "type" "mul")
718 (set_attr "length" "4")])
720 (define_insn "mulsi3"
721 [(set (match_operand:SI 0 "register_operand" "=r")
722 (mult:SI (match_operand:SI 1 "register_operand" "%0")
723 (match_operand:SI 2 "register_operand" "r")))]
726 [(set_attr "type" "mul")])
728 (define_insn "divsi3"
729 [(set (match_operand:SI 0 "register_operand" "=r")
730 (div:SI (match_operand:SI 1 "register_operand" "0")
731 (match_operand:SI 2 "register_operand" "r")))]
734 [(set_attr "type" "div")])
736 (define_insn "udivsi3"
737 [(set (match_operand:SI 0 "register_operand" "=r")
738 (udiv:SI (match_operand:SI 1 "register_operand" "0")
739 (match_operand:SI 2 "register_operand" "r")))]
742 [(set_attr "type" "div")])
744 (define_insn "modsi3"
745 [(set (match_operand:SI 0 "register_operand" "=r")
746 (mod:SI (match_operand:SI 1 "register_operand" "0")
747 (match_operand:SI 2 "register_operand" "r")))]
750 [(set_attr "type" "div")])
752 (define_insn "umodsi3"
753 [(set (match_operand:SI 0 "register_operand" "=r")
754 (umod:SI (match_operand:SI 1 "register_operand" "0")
755 (match_operand:SI 2 "register_operand" "r")))]
758 [(set_attr "type" "div")])
760 ;; Boolean instructions.
762 ;; We don't define the DImode versions as expand_binop does a good enough job.
763 ;; And if it doesn't it should be fixed.
765 (define_insn "andsi3"
766 [(set (match_operand:SI 0 "register_operand" "=r,r")
767 (and:SI (match_operand:SI 1 "register_operand" "%0,r")
768 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
773 [(set_attr "type" "binary")])
775 (define_insn "iorsi3"
776 [(set (match_operand:SI 0 "register_operand" "=r,r")
777 (ior:SI (match_operand:SI 1 "register_operand" "%0,r")
778 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
783 [(set_attr "type" "binary")])
785 (define_insn "xorsi3"
786 [(set (match_operand:SI 0 "register_operand" "=r,r")
787 (xor:SI (match_operand:SI 1 "register_operand" "%0,r")
788 (match_operand:SI 2 "nonmemory_operand" "r,K")))]
793 [(set_attr "type" "binary")])
795 (define_insn "negsi2"
796 [(set (match_operand:SI 0 "register_operand" "=r")
797 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
800 [(set_attr "type" "unary")])
802 (define_insn "one_cmplsi2"
803 [(set (match_operand:SI 0 "register_operand" "=r")
804 (not:SI (match_operand:SI 1 "register_operand" "r")))]
807 [(set_attr "type" "unary")])
809 ;; Shift instructions.
811 (define_insn "ashlsi3"
812 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
813 (ashift:SI (match_operand:SI 1 "register_operand" "0,0,r")
814 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
820 [(set_attr "type" "shift")
821 (set_attr "length" "2,2,4")])
823 (define_insn "ashrsi3"
824 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
825 (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r")
826 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
832 [(set_attr "type" "shift")
833 (set_attr "length" "2,2,4")])
835 (define_insn "lshrsi3"
836 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
837 (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r")
838 (match_operand:SI 2 "reg_or_uint16_operand" "r,O,K")))]
844 [(set_attr "type" "shift")
845 (set_attr "length" "2,2,4")])
847 ;; Compare instructions.
848 ;; This controls RTL generation and register allocation.
850 ;; We generate RTL for comparisons and branches by having the cmpxx
851 ;; patterns store away the operands. Then the bcc patterns
852 ;; emit RTL for both the compare and the branch.
854 ;; On the m32r it is more efficient to use the bxxz instructions and
855 ;; thus merge the compare and branch into one instruction, so they are
858 (define_expand "cmpsi"
860 (compare:CC (match_operand:SI 0 "register_operand" "")
861 (match_operand:SI 1 "nonmemory_operand" "")))]
865 m32r_compare_op0 = operands[0];
866 m32r_compare_op1 = operands[1];
870 ;; The cmp_xxx_insn patterns set the condition bit to the result of the
871 ;; comparison. There isn't a "compare equal" instruction so cmp_eqsi_insn
872 ;; is quite inefficient. However, it is rarely used.
874 (define_insn "cmp_eqsi_insn"
876 (eq:CC (match_operand:SI 0 "register_operand" "r,r")
877 (match_operand:SI 1 "reg_or_cmp_int16_operand" "r,P")))
878 (clobber (match_scratch:SI 2 "=&r,&r"))]
881 mv %2,%0\;sub %2,%1\;cmpui %2,#1
882 add3 %2,%0,%#%N1\;cmpui %2,#1"
883 [(set_attr "type" "compare,compare")
884 (set_attr "length" "8,8")])
886 (define_insn "cmp_ltsi_insn"
888 (lt:CC (match_operand:SI 0 "register_operand" "r,r")
889 (match_operand:SI 1 "reg_or_int16_operand" "r,J")))]
894 [(set_attr "type" "compare")])
896 (define_insn "cmp_ltusi_insn"
898 (ltu:CC (match_operand:SI 0 "register_operand" "r,r")
899 (match_operand:SI 1 "reg_or_uint16_operand" "r,K")))]
904 [(set_attr "type" "compare")])
906 ;; reg == small constant comparisons are best handled by putting the result
907 ;; of the comparison in a tmp reg and then using beqz/bnez.
908 ;; ??? The result register doesn't contain 0/STORE_FLAG_VALUE,
909 ;; it contains 0/non-zero.
911 (define_insn "cmp_ne_small_const_insn"
912 [(set (match_operand:SI 0 "register_operand" "=r")
913 (ne:SI (match_operand:SI 1 "register_operand" "r")
914 (match_operand:SI 2 "cmp_int16_operand" "P")))]
917 [(set_attr "type" "compare")
918 (set_attr "length" "4")])
920 ;; These control RTL generation for conditional jump insns.
924 (if_then_else (match_dup 1)
925 (label_ref (match_operand 0 "" ""))
930 operands[1] = gen_compare (EQ, m32r_compare_op0, m32r_compare_op1);
935 (if_then_else (match_dup 1)
936 (label_ref (match_operand 0 "" ""))
941 operands[1] = gen_compare (NE, m32r_compare_op0, m32r_compare_op1);
946 (if_then_else (match_dup 1)
947 (label_ref (match_operand 0 "" ""))
952 operands[1] = gen_compare (GT, m32r_compare_op0, m32r_compare_op1);
957 (if_then_else (match_dup 1)
958 (label_ref (match_operand 0 "" ""))
963 operands[1] = gen_compare (LE, m32r_compare_op0, m32r_compare_op1);
968 (if_then_else (match_dup 1)
969 (label_ref (match_operand 0 "" ""))
974 operands[1] = gen_compare (GE, m32r_compare_op0, m32r_compare_op1);
979 (if_then_else (match_dup 1)
980 (label_ref (match_operand 0 "" ""))
985 operands[1] = gen_compare (LT, m32r_compare_op0, m32r_compare_op1);
988 (define_expand "bgtu"
990 (if_then_else (match_dup 1)
991 (label_ref (match_operand 0 "" ""))
996 operands[1] = gen_compare (GTU, m32r_compare_op0, m32r_compare_op1);
999 (define_expand "bleu"
1001 (if_then_else (match_dup 1)
1002 (label_ref (match_operand 0 "" ""))
1007 operands[1] = gen_compare (LEU, m32r_compare_op0, m32r_compare_op1);
1010 (define_expand "bgeu"
1012 (if_then_else (match_dup 1)
1013 (label_ref (match_operand 0 "" ""))
1018 operands[1] = gen_compare (GEU, m32r_compare_op0, m32r_compare_op1);
1021 (define_expand "bltu"
1023 (if_then_else (match_dup 1)
1024 (label_ref (match_operand 0 "" ""))
1029 operands[1] = gen_compare (LTU, m32r_compare_op0, m32r_compare_op1);
1032 ;; Now match both normal and inverted jump.
1034 (define_insn "*branch_insn"
1036 (if_then_else (match_operator 1 "eqne_comparison_operator"
1037 [(reg 17) (const_int 0)])
1038 (label_ref (match_operand 0 "" ""))
1043 if (GET_CODE (operands[1]) == NE)
1048 [(set_attr "type" "branch")
1049 ; We use 400/800 instead of 512,1024 to account for inaccurate insn
1050 ; lengths and insn alignments that are complex to track.
1051 ; It's not important that we be hyper-precise here. It may be more
1052 ; important blah blah blah when the chip supports parallel execution
1053 ; blah blah blah but until then blah blah blah this is simple and
1055 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1061 (define_insn "*rev_branch_insn"
1063 (if_then_else (match_operator 1 "eqne_comparison_operator"
1064 [(reg 17) (const_int 0)])
1066 (label_ref (match_operand 0 "" ""))))]
1067 ;"REVERSIBLE_CC_MODE (GET_MODE (XEXP (operands[1], 0)))"
1071 if (GET_CODE (operands[1]) == EQ)
1076 [(set_attr "type" "branch")
1077 ; We use 400/800 instead of 512,1024 to account for inaccurate insn
1078 ; lengths and insn alignments that are complex to track.
1079 ; It's not important that we be hyper-precise here. It may be more
1080 ; important blah blah blah when the chip supports parallel execution
1081 ; blah blah blah but until then blah blah blah this is simple and
1083 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1089 ; reg/reg compare and branch insns
1091 (define_insn "*reg_branch_insn"
1093 (if_then_else (match_operator 1 "eqne_comparison_operator"
1094 [(match_operand:SI 2 "register_operand" "r")
1095 (match_operand:SI 3 "register_operand" "r")])
1096 (label_ref (match_operand 0 "" ""))
1101 /* Is branch target reachable with beq/bne? */
1102 if (get_attr_length (insn) == 4)
1104 if (GET_CODE (operands[1]) == EQ)
1105 return \"beq %2,%3,%l0\";
1107 return \"bne %2,%3,%l0\";
1111 if (GET_CODE (operands[1]) == EQ)
1112 return \"bne %2,%3,1f\;bra %l0\;1:\";
1114 return \"beq %2,%3,1f\;bra %l0\;1:\";
1117 [(set_attr "type" "branch")
1118 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1119 ; which is complex to track and inaccurate length specs.
1120 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1126 (define_insn "*rev_reg_branch_insn"
1128 (if_then_else (match_operator 1 "eqne_comparison_operator"
1129 [(match_operand:SI 2 "register_operand" "r")
1130 (match_operand:SI 3 "register_operand" "r")])
1132 (label_ref (match_operand 0 "" ""))))]
1136 /* Is branch target reachable with beq/bne? */
1137 if (get_attr_length (insn) == 4)
1139 if (GET_CODE (operands[1]) == NE)
1140 return \"beq %2,%3,%l0\";
1142 return \"bne %2,%3,%l0\";
1146 if (GET_CODE (operands[1]) == NE)
1147 return \"bne %2,%3,1f\;bra %l0\;1:\";
1149 return \"beq %2,%3,1f\;bra %l0\;1:\";
1152 [(set_attr "type" "branch")
1153 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1154 ; which is complex to track and inaccurate length specs.
1155 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1161 ; reg/zero compare and branch insns
1163 (define_insn "*zero_branch_insn"
1165 (if_then_else (match_operator 1 "signed_comparison_operator"
1166 [(match_operand:SI 2 "register_operand" "r")
1168 (label_ref (match_operand 0 "" ""))
1176 switch (GET_CODE (operands[1]))
1178 case EQ : br = \"eq\"; invbr = \"ne\"; break;
1179 case NE : br = \"ne\"; invbr = \"eq\"; break;
1180 case LE : br = \"le\"; invbr = \"gt\"; break;
1181 case GT : br = \"gt\"; invbr = \"le\"; break;
1182 case LT : br = \"lt\"; invbr = \"ge\"; break;
1183 case GE : br = \"ge\"; invbr = \"lt\"; break;
1186 /* Is branch target reachable with bxxz? */
1187 if (get_attr_length (insn) == 4)
1189 sprintf (asmtext, \"b%sz %%2,%%l0\", br);
1190 output_asm_insn (asmtext, operands);
1194 sprintf (asmtext, \"b%sz %%2,1f\;bra %%l0\;1:\", invbr);
1195 output_asm_insn (asmtext, operands);
1199 [(set_attr "type" "branch")
1200 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1201 ; which is complex to track and inaccurate length specs.
1202 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1208 (define_insn "*rev_zero_branch_insn"
1210 (if_then_else (match_operator 1 "eqne_comparison_operator"
1211 [(match_operand:SI 2 "register_operand" "r")
1214 (label_ref (match_operand 0 "" ""))))]
1221 switch (GET_CODE (operands[1]))
1223 case EQ : br = \"eq\"; invbr = \"ne\"; break;
1224 case NE : br = \"ne\"; invbr = \"eq\"; break;
1225 case LE : br = \"le\"; invbr = \"gt\"; break;
1226 case GT : br = \"gt\"; invbr = \"le\"; break;
1227 case LT : br = \"lt\"; invbr = \"ge\"; break;
1228 case GE : br = \"ge\"; invbr = \"lt\"; break;
1231 /* Is branch target reachable with bxxz? */
1232 if (get_attr_length (insn) == 4)
1234 sprintf (asmtext, \"b%sz %%2,%%l0\", invbr);
1235 output_asm_insn (asmtext, operands);
1239 sprintf (asmtext, \"b%sz %%2,1f\;bra %%l0\;1:\", br);
1240 output_asm_insn (asmtext, operands);
1244 [(set_attr "type" "branch")
1245 ; We use 25000/50000 instead of 32768/65536 to account for slot filling
1246 ; which is complex to track and inaccurate length specs.
1247 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1253 ;; Unconditional and other jump instructions.
1256 [(set (pc) (label_ref (match_operand 0 "" "")))]
1259 [(set_attr "type" "uncond_branch")
1260 (set (attr "length") (if_then_else (ltu (plus (minus (match_dup 0) (pc))
1266 (define_insn "indirect_jump"
1267 [(set (pc) (match_operand:SI 0 "address_operand" "p"))]
1270 [(set_attr "type" "uncond_branch")
1271 (set_attr "length" "2")])
1273 (define_insn "tablejump"
1274 [(set (pc) (match_operand:SI 0 "address_operand" "p"))
1275 (use (label_ref (match_operand 1 "" "")))]
1278 [(set_attr "type" "uncond_branch")
1279 (set_attr "length" "2")])
1281 (define_expand "call"
1282 ;; operands[1] is stack_size_rtx
1283 ;; operands[2] is next_arg_register
1284 [(parallel [(call (match_operand:SI 0 "call_operand" "")
1285 (match_operand 1 "" ""))
1286 (clobber (reg:SI 14))])]
1290 (define_insn "*call_via_reg"
1291 [(call (mem:SI (match_operand:SI 0 "register_operand" "r"))
1292 (match_operand 1 "" ""))
1293 (clobber (reg:SI 14))]
1296 [(set_attr "type" "call")
1297 (set_attr "length" "2")])
1299 (define_insn "*call_via_label"
1300 [(call (mem:SI (match_operand:SI 0 "call_address_operand" ""))
1301 (match_operand 1 "" ""))
1302 (clobber (reg:SI 14))]
1306 int call26_p = call26_operand (operands[0], FUNCTION_MODE);
1310 /* We may not be able to reach with a `bl' insn so punt and leave it to
1312 We do this here, rather than doing a force_reg in the define_expand
1313 so these insns won't be separated, say by scheduling, thus simplifying
1315 return \"seth r14,%T0\;add3 r14,r14,%B0\;jl r14\";
1320 [(set_attr "type" "call")
1321 (set (attr "length")
1322 (if_then_else (eq (symbol_ref "call26_operand (operands[0], FUNCTION_MODE)")
1324 (const_int 12) ; 10 + 2 for nop filler
1325 ; The return address must be on a 4 byte boundary so
1326 ; there's no point in using a value of 2 here. A 2 byte
1327 ; insn may go in the left slot but we currently can't
1328 ; use such knowledge.
1331 (define_expand "call_value"
1332 ;; operand 2 is stack_size_rtx
1333 ;; operand 3 is next_arg_register
1334 [(parallel [(set (match_operand 0 "register_operand" "=r")
1335 (call (match_operand:SI 1 "call_operand" "")
1336 (match_operand 2 "" "")))
1337 (clobber (reg:SI 14))])]
1341 (define_insn "*call_value_via_reg"
1342 [(set (match_operand 0 "register_operand" "=r")
1343 (call (mem:SI (match_operand:SI 1 "register_operand" "r"))
1344 (match_operand 2 "" "")))
1345 (clobber (reg:SI 14))]
1348 [(set_attr "type" "call")
1349 (set_attr "length" "2")])
1351 (define_insn "*call_value_via_label"
1352 [(set (match_operand 0 "register_operand" "=r")
1353 (call (mem:SI (match_operand:SI 1 "call_address_operand" ""))
1354 (match_operand 2 "" "")))
1355 (clobber (reg:SI 14))]
1359 int call26_p = call26_operand (operands[1], FUNCTION_MODE);
1363 /* We may not be able to reach with a `bl' insn so punt and leave it to
1365 We do this here, rather than doing a force_reg in the define_expand
1366 so these insns won't be separated, say by scheduling, thus simplifying
1368 return \"seth r14,%T1\;add3 r14,r14,%B1\;jl r14\";
1373 [(set_attr "type" "call")
1374 (set (attr "length")
1375 (if_then_else (eq (symbol_ref "call26_operand (operands[1], FUNCTION_MODE)")
1377 (const_int 12) ; 10 + 2 for nop filler
1378 ; The return address must be on a 4 byte boundary so
1379 ; there's no point in using a value of 2 here. A 2 byte
1380 ; insn may go in the left slot but we currently can't
1381 ; use such knowledge.
1388 [(set_attr "type" "misc")
1389 (set_attr "length" "2")])
1391 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
1392 ;; all of memory. This blocks insns from being moved across this point.
1394 (define_insn "blockage"
1395 [(unspec_volatile [(const_int 0)] 0)]
1399 ;; Special pattern to flush the icache.
1401 (define_insn "flush_icache"
1402 [(unspec_volatile [(match_operand 0 "memory_operand" "m")] 0)]
1404 "* return \"nop ; flush-icache\";"
1405 [(set_attr "type" "misc")])
1407 ;; Split up troublesome insns for better scheduling.
1409 ;; Peepholes go at the end.
1411 ;; ??? Setting the type attribute may not be useful, but for completeness
1415 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "r")
1417 (match_operand:SI 1 "register_operand" "r"))]
1418 "dead_or_set_p (insn, operands[0])"
1420 [(set_attr "type" "store")
1421 (set_attr "length" "2")])