1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-97, 1998 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of insns).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops actually execute in one of the integer
327 ;; ALUs, but we can't really model that.
328 (define_function_unit "pa7100LCshiftmem" 1 1
330 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
331 (eq_attr "cpu" "7100LC,7200")) 1 1)
333 ;; We have two basic ALUs.
334 (define_function_unit "pa7100LCalu" 2 2
336 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl,load,fpload,store,fpstore,shift,nullshift")
337 (eq_attr "cpu" "7100LC,7200")) 1 1)
339 ;; I don't have complete information on the PA7200; however, most of
340 ;; what I've heard makes it look like a 7100LC without the store-store
341 ;; penalty. So that's how we'll model it.
343 ;; Memory. Disregarding Cache misses, memory loads and stores take
344 ;; two cycles. Any special cases are handled in pa_adjust_cost.
345 (define_function_unit "pa7200memory" 1 0
346 (and (eq_attr "type" "load,fpload,store,fpstore")
347 (eq_attr "cpu" "7200")) 2 0)
349 ;; I don't have detailed information on the PA7200 FP pipeline, so I
350 ;; treat it just like the 7100LC pipeline.
351 ;; Similarly for the multi-issue fake units.
354 ;; Compare instructions.
355 ;; This controls RTL generation and register allocation.
357 ;; We generate RTL for comparisons and branches by having the cmpxx
358 ;; patterns store away the operands. Then, the scc and bcc patterns
359 ;; emit RTL for both the compare and the branch.
362 (define_expand "cmpsi"
364 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
365 (match_operand:SI 1 "arith5_operand" "")))]
369 hppa_compare_op0 = operands[0];
370 hppa_compare_op1 = operands[1];
371 hppa_branch_type = CMP_SI;
375 (define_expand "cmpsf"
377 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
378 (match_operand:SF 1 "reg_or_0_operand" "")))]
379 "! TARGET_SOFT_FLOAT"
382 hppa_compare_op0 = operands[0];
383 hppa_compare_op1 = operands[1];
384 hppa_branch_type = CMP_SF;
388 (define_expand "cmpdf"
390 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
391 (match_operand:DF 1 "reg_or_0_operand" "")))]
392 "! TARGET_SOFT_FLOAT"
395 hppa_compare_op0 = operands[0];
396 hppa_compare_op1 = operands[1];
397 hppa_branch_type = CMP_DF;
403 (match_operator:CCFP 2 "comparison_operator"
404 [(match_operand:SF 0 "reg_or_0_operand" "fG")
405 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
406 "! TARGET_SOFT_FLOAT"
407 "fcmp,sgl,%Y2 %r0,%r1"
408 [(set_attr "length" "4")
409 (set_attr "type" "fpcc")])
413 (match_operator:CCFP 2 "comparison_operator"
414 [(match_operand:DF 0 "reg_or_0_operand" "fG")
415 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
416 "! TARGET_SOFT_FLOAT"
417 "fcmp,dbl,%Y2 %r0,%r1"
418 [(set_attr "length" "4")
419 (set_attr "type" "fpcc")])
424 [(set (match_operand:SI 0 "register_operand" "")
430 /* fp scc patterns rarely match, and are not a win on the PA. */
431 if (hppa_branch_type != CMP_SI)
433 /* set up operands from compare. */
434 operands[1] = hppa_compare_op0;
435 operands[2] = hppa_compare_op1;
436 /* fall through and generate default code */
440 [(set (match_operand:SI 0 "register_operand" "")
446 /* fp scc patterns rarely match, and are not a win on the PA. */
447 if (hppa_branch_type != CMP_SI)
449 operands[1] = hppa_compare_op0;
450 operands[2] = hppa_compare_op1;
454 [(set (match_operand:SI 0 "register_operand" "")
460 /* fp scc patterns rarely match, and are not a win on the PA. */
461 if (hppa_branch_type != CMP_SI)
463 operands[1] = hppa_compare_op0;
464 operands[2] = hppa_compare_op1;
468 [(set (match_operand:SI 0 "register_operand" "")
474 /* fp scc patterns rarely match, and are not a win on the PA. */
475 if (hppa_branch_type != CMP_SI)
477 operands[1] = hppa_compare_op0;
478 operands[2] = hppa_compare_op1;
482 [(set (match_operand:SI 0 "register_operand" "")
488 /* fp scc patterns rarely match, and are not a win on the PA. */
489 if (hppa_branch_type != CMP_SI)
491 operands[1] = hppa_compare_op0;
492 operands[2] = hppa_compare_op1;
496 [(set (match_operand:SI 0 "register_operand" "")
502 /* fp scc patterns rarely match, and are not a win on the PA. */
503 if (hppa_branch_type != CMP_SI)
505 operands[1] = hppa_compare_op0;
506 operands[2] = hppa_compare_op1;
509 (define_expand "sltu"
510 [(set (match_operand:SI 0 "register_operand" "")
511 (ltu:SI (match_dup 1)
516 if (hppa_branch_type != CMP_SI)
518 operands[1] = hppa_compare_op0;
519 operands[2] = hppa_compare_op1;
522 (define_expand "sgtu"
523 [(set (match_operand:SI 0 "register_operand" "")
524 (gtu:SI (match_dup 1)
529 if (hppa_branch_type != CMP_SI)
531 operands[1] = hppa_compare_op0;
532 operands[2] = hppa_compare_op1;
535 (define_expand "sleu"
536 [(set (match_operand:SI 0 "register_operand" "")
537 (leu:SI (match_dup 1)
542 if (hppa_branch_type != CMP_SI)
544 operands[1] = hppa_compare_op0;
545 operands[2] = hppa_compare_op1;
548 (define_expand "sgeu"
549 [(set (match_operand:SI 0 "register_operand" "")
550 (geu:SI (match_dup 1)
555 if (hppa_branch_type != CMP_SI)
557 operands[1] = hppa_compare_op0;
558 operands[2] = hppa_compare_op1;
561 ;; Instruction canonicalization puts immediate operands second, which
562 ;; is the reverse of what we want.
565 [(set (match_operand:SI 0 "register_operand" "=r")
566 (match_operator:SI 3 "comparison_operator"
567 [(match_operand:SI 1 "register_operand" "r")
568 (match_operand:SI 2 "arith11_operand" "rI")]))]
570 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
571 [(set_attr "type" "binary")
572 (set_attr "length" "8")])
574 (define_insn "iorscc"
575 [(set (match_operand:SI 0 "register_operand" "=r")
576 (ior:SI (match_operator:SI 3 "comparison_operator"
577 [(match_operand:SI 1 "register_operand" "r")
578 (match_operand:SI 2 "arith11_operand" "rI")])
579 (match_operator:SI 6 "comparison_operator"
580 [(match_operand:SI 4 "register_operand" "r")
581 (match_operand:SI 5 "arith11_operand" "rI")])))]
583 "com%I2clr,%S3 %2,%1,0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
584 [(set_attr "type" "binary")
585 (set_attr "length" "12")])
587 ;; Combiner patterns for common operations performed with the output
588 ;; from an scc insn (negscc and incscc).
589 (define_insn "negscc"
590 [(set (match_operand:SI 0 "register_operand" "=r")
591 (neg:SI (match_operator:SI 3 "comparison_operator"
592 [(match_operand:SI 1 "register_operand" "r")
593 (match_operand:SI 2 "arith11_operand" "rI")])))]
595 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
596 [(set_attr "type" "binary")
597 (set_attr "length" "8")])
599 ;; Patterns for adding/subtracting the result of a boolean expression from
600 ;; a register. First we have special patterns that make use of the carry
601 ;; bit, and output only two instructions. For the cases we can't in
602 ;; general do in two instructions, the incscc pattern at the end outputs
603 ;; two or three instructions.
606 [(set (match_operand:SI 0 "register_operand" "=r")
607 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
608 (match_operand:SI 3 "arith11_operand" "rI"))
609 (match_operand:SI 1 "register_operand" "r")))]
611 "sub%I3 %3,%2,0\;addc 0,%1,%0"
612 [(set_attr "type" "binary")
613 (set_attr "length" "8")])
615 ; This need only accept registers for op3, since canonicalization
616 ; replaces geu with gtu when op3 is an integer.
618 [(set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
620 (match_operand:SI 3 "register_operand" "r"))
621 (match_operand:SI 1 "register_operand" "r")))]
623 "sub %2,%3,0\;addc 0,%1,%0"
624 [(set_attr "type" "binary")
625 (set_attr "length" "8")])
627 ; Match only integers for op3 here. This is used as canonical form of the
628 ; geu pattern when op3 is an integer. Don't match registers since we can't
629 ; make better code than the general incscc pattern.
631 [(set (match_operand:SI 0 "register_operand" "=r")
632 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
633 (match_operand:SI 3 "int11_operand" "I"))
634 (match_operand:SI 1 "register_operand" "r")))]
636 "addi %k3,%2,0\;addc 0,%1,%0"
637 [(set_attr "type" "binary")
638 (set_attr "length" "8")])
640 (define_insn "incscc"
641 [(set (match_operand:SI 0 "register_operand" "=r,r")
642 (plus:SI (match_operator:SI 4 "comparison_operator"
643 [(match_operand:SI 2 "register_operand" "r,r")
644 (match_operand:SI 3 "arith11_operand" "rI,rI")])
645 (match_operand:SI 1 "register_operand" "0,?r")))]
648 com%I3clr,%B4 %3,%2,0\;addi 1,%0,%0
649 com%I3clr,%B4 %3,%2,0\;addi,tr 1,%1,%0\;copy %1,%0"
650 [(set_attr "type" "binary,binary")
651 (set_attr "length" "8,12")])
654 [(set (match_operand:SI 0 "register_operand" "=r")
655 (minus:SI (match_operand:SI 1 "register_operand" "r")
656 (gtu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))))]
659 "sub%I3 %3,%2,0\;subb %1,0,%0"
660 [(set_attr "type" "binary")
661 (set_attr "length" "8")])
664 [(set (match_operand:SI 0 "register_operand" "=r")
665 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
666 (gtu:SI (match_operand:SI 2 "register_operand" "r")
667 (match_operand:SI 3 "arith11_operand" "rI")))
668 (match_operand:SI 4 "register_operand" "r")))]
670 "sub%I3 %3,%2,0\;subb %1,%4,%0"
671 [(set_attr "type" "binary")
672 (set_attr "length" "8")])
674 ; This need only accept registers for op3, since canonicalization
675 ; replaces ltu with leu when op3 is an integer.
677 [(set (match_operand:SI 0 "register_operand" "=r")
678 (minus:SI (match_operand:SI 1 "register_operand" "r")
679 (ltu:SI (match_operand:SI 2 "register_operand" "r")
680 (match_operand:SI 3 "register_operand" "r"))))]
682 "sub %2,%3,0\;subb %1,0,%0"
683 [(set_attr "type" "binary")
684 (set_attr "length" "8")])
687 [(set (match_operand:SI 0 "register_operand" "=r")
688 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
689 (ltu:SI (match_operand:SI 2 "register_operand" "r")
690 (match_operand:SI 3 "register_operand" "r")))
691 (match_operand:SI 4 "register_operand" "r")))]
693 "sub %2,%3,0\;subb %1,%4,%0"
694 [(set_attr "type" "binary")
695 (set_attr "length" "8")])
697 ; Match only integers for op3 here. This is used as canonical form of the
698 ; ltu pattern when op3 is an integer. Don't match registers since we can't
699 ; make better code than the general incscc pattern.
701 [(set (match_operand:SI 0 "register_operand" "=r")
702 (minus:SI (match_operand:SI 1 "register_operand" "r")
703 (leu:SI (match_operand:SI 2 "register_operand" "r")
704 (match_operand:SI 3 "int11_operand" "I"))))]
706 "addi %k3,%2,0\;subb %1,0,%0"
707 [(set_attr "type" "binary")
708 (set_attr "length" "8")])
711 [(set (match_operand:SI 0 "register_operand" "=r")
712 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
713 (leu:SI (match_operand:SI 2 "register_operand" "r")
714 (match_operand:SI 3 "int11_operand" "I")))
715 (match_operand:SI 4 "register_operand" "r")))]
717 "addi %k3,%2,0\;subb %1,%4,%0"
718 [(set_attr "type" "binary")
719 (set_attr "length" "8")])
721 (define_insn "decscc"
722 [(set (match_operand:SI 0 "register_operand" "=r,r")
723 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
724 (match_operator:SI 4 "comparison_operator"
725 [(match_operand:SI 2 "register_operand" "r,r")
726 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
729 com%I3clr,%B4 %3,%2,0\;addi -1,%0,%0
730 com%I3clr,%B4 %3,%2,0\;addi,tr -1,%1,%0\;copy %1,%0"
731 [(set_attr "type" "binary,binary")
732 (set_attr "length" "8,12")])
734 ; Patterns for max and min. (There is no need for an earlyclobber in the
735 ; last alternative since the middle alternative will match if op0 == op1.)
737 (define_insn "sminsi3"
738 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
739 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
740 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
743 comclr,> %2,%0,0\;copy %2,%0
744 comiclr,> %2,%0,0\;ldi %2,%0
745 comclr,> %1,%2,%0\;copy %1,%0"
746 [(set_attr "type" "multi,multi,multi")
747 (set_attr "length" "8,8,8")])
749 (define_insn "uminsi3"
750 [(set (match_operand:SI 0 "register_operand" "=r,r")
751 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
752 (match_operand:SI 2 "arith11_operand" "r,I")))]
755 comclr,>> %2,%0,0\;copy %2,%0
756 comiclr,>> %2,%0,0\;ldi %2,%0"
757 [(set_attr "type" "multi,multi")
758 (set_attr "length" "8,8")])
760 (define_insn "smaxsi3"
761 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
762 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
763 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
766 comclr,< %2,%0,0\;copy %2,%0
767 comiclr,< %2,%0,0\;ldi %2,%0
768 comclr,< %1,%2,%0\;copy %1,%0"
769 [(set_attr "type" "multi,multi,multi")
770 (set_attr "length" "8,8,8")])
772 (define_insn "umaxsi3"
773 [(set (match_operand:SI 0 "register_operand" "=r,r")
774 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
775 (match_operand:SI 2 "arith11_operand" "r,I")))]
778 comclr,<< %2,%0,0\;copy %2,%0
779 comiclr,<< %2,%0,0\;ldi %2,%0"
780 [(set_attr "type" "multi,multi")
781 (set_attr "length" "8,8")])
783 (define_insn "abssi2"
784 [(set (match_operand:SI 0 "register_operand" "=r")
785 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
787 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
788 [(set_attr "type" "multi")
789 (set_attr "length" "8")])
791 ;;; Experimental conditional move patterns
793 (define_expand "movsicc"
794 [(set (match_operand:SI 0 "register_operand" "")
796 (match_operator 1 "comparison_operator"
799 (match_operand:SI 2 "reg_or_cint_move_operand" "")
800 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
804 enum rtx_code code = GET_CODE (operands[1]);
806 if (hppa_branch_type != CMP_SI)
809 /* operands[1] is currently the result of compare_from_rtx. We want to
810 emit a compare of the original operands. */
811 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
812 operands[4] = hppa_compare_op0;
813 operands[5] = hppa_compare_op1;
816 ; We need the first constraint alternative in order to avoid
817 ; earlyclobbers on all other alternatives.
819 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
821 (match_operator 5 "comparison_operator"
822 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
823 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
824 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
828 com%I4clr,%S5 %4,%3,0\;ldi 0,%0
829 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
830 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
831 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
832 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
833 [(set_attr "type" "multi,multi,multi,multi,nullshift")
834 (set_attr "length" "8,8,8,8,8")])
837 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
839 (match_operator 5 "comparison_operator"
840 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
841 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
842 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
843 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
846 com%I4clr,%S5 %4,%3,0\;copy %2,%0
847 com%I4clr,%S5 %4,%3,0\;ldi %2,%0
848 com%I4clr,%S5 %4,%3,0\;ldil L'%2,%0
849 com%I4clr,%S5 %4,%3,0\;zdepi %Z2,%0
850 com%I4clr,%B5 %4,%3,0\;copy %1,%0
851 com%I4clr,%B5 %4,%3,0\;ldi %1,%0
852 com%I4clr,%B5 %4,%3,0\;ldil L'%1,%0
853 com%I4clr,%B5 %4,%3,0\;zdepi %Z1,%0"
854 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
855 (set_attr "length" "8,8,8,8,8,8,8,8")])
857 ;; Conditional Branches
861 (if_then_else (eq (match_dup 1) (match_dup 2))
862 (label_ref (match_operand 0 "" ""))
867 if (hppa_branch_type != CMP_SI)
869 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
870 emit_bcond_fp (NE, operands[0]);
873 /* set up operands from compare. */
874 operands[1] = hppa_compare_op0;
875 operands[2] = hppa_compare_op1;
876 /* fall through and generate default code */
881 (if_then_else (ne (match_dup 1) (match_dup 2))
882 (label_ref (match_operand 0 "" ""))
887 if (hppa_branch_type != CMP_SI)
889 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
890 emit_bcond_fp (NE, operands[0]);
893 operands[1] = hppa_compare_op0;
894 operands[2] = hppa_compare_op1;
899 (if_then_else (gt (match_dup 1) (match_dup 2))
900 (label_ref (match_operand 0 "" ""))
905 if (hppa_branch_type != CMP_SI)
907 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
908 emit_bcond_fp (NE, operands[0]);
911 operands[1] = hppa_compare_op0;
912 operands[2] = hppa_compare_op1;
917 (if_then_else (lt (match_dup 1) (match_dup 2))
918 (label_ref (match_operand 0 "" ""))
923 if (hppa_branch_type != CMP_SI)
925 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
926 emit_bcond_fp (NE, operands[0]);
929 operands[1] = hppa_compare_op0;
930 operands[2] = hppa_compare_op1;
935 (if_then_else (ge (match_dup 1) (match_dup 2))
936 (label_ref (match_operand 0 "" ""))
941 if (hppa_branch_type != CMP_SI)
943 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
944 emit_bcond_fp (NE, operands[0]);
947 operands[1] = hppa_compare_op0;
948 operands[2] = hppa_compare_op1;
953 (if_then_else (le (match_dup 1) (match_dup 2))
954 (label_ref (match_operand 0 "" ""))
959 if (hppa_branch_type != CMP_SI)
961 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
962 emit_bcond_fp (NE, operands[0]);
965 operands[1] = hppa_compare_op0;
966 operands[2] = hppa_compare_op1;
969 (define_expand "bgtu"
971 (if_then_else (gtu (match_dup 1) (match_dup 2))
972 (label_ref (match_operand 0 "" ""))
977 if (hppa_branch_type != CMP_SI)
979 operands[1] = hppa_compare_op0;
980 operands[2] = hppa_compare_op1;
983 (define_expand "bltu"
985 (if_then_else (ltu (match_dup 1) (match_dup 2))
986 (label_ref (match_operand 0 "" ""))
991 if (hppa_branch_type != CMP_SI)
993 operands[1] = hppa_compare_op0;
994 operands[2] = hppa_compare_op1;
997 (define_expand "bgeu"
999 (if_then_else (geu (match_dup 1) (match_dup 2))
1000 (label_ref (match_operand 0 "" ""))
1005 if (hppa_branch_type != CMP_SI)
1007 operands[1] = hppa_compare_op0;
1008 operands[2] = hppa_compare_op1;
1011 (define_expand "bleu"
1013 (if_then_else (leu (match_dup 1) (match_dup 2))
1014 (label_ref (match_operand 0 "" ""))
1019 if (hppa_branch_type != CMP_SI)
1021 operands[1] = hppa_compare_op0;
1022 operands[2] = hppa_compare_op1;
1025 ;; Match the branch patterns.
1028 ;; Note a long backward conditional branch with an annulled delay slot
1029 ;; has a length of 12.
1033 (match_operator 3 "comparison_operator"
1034 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1035 (match_operand:SI 2 "arith5_operand" "rL")])
1036 (label_ref (match_operand 0 "" ""))
1041 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1042 get_attr_length (insn), 0, insn);
1044 [(set_attr "type" "cbranch")
1045 (set (attr "length")
1046 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1049 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1052 (eq (symbol_ref "flag_pic") (const_int 0))
1056 ;; Match the negated branch.
1061 (match_operator 3 "comparison_operator"
1062 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1063 (match_operand:SI 2 "arith5_operand" "rL")])
1065 (label_ref (match_operand 0 "" ""))))]
1069 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1070 get_attr_length (insn), 1, insn);
1072 [(set_attr "type" "cbranch")
1073 (set (attr "length")
1074 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1077 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1080 (eq (symbol_ref "flag_pic") (const_int 0))
1084 ;; Branch on Bit patterns.
1088 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1090 (match_operand:SI 1 "uint5_operand" ""))
1092 (label_ref (match_operand 2 "" ""))
1097 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1098 get_attr_length (insn), 0, insn, 0);
1100 [(set_attr "type" "cbranch")
1101 (set (attr "length")
1102 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1110 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1112 (match_operand:SI 1 "uint5_operand" ""))
1115 (label_ref (match_operand 2 "" ""))))]
1119 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1120 get_attr_length (insn), 1, insn, 0);
1122 [(set_attr "type" "cbranch")
1123 (set (attr "length")
1124 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1132 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1134 (match_operand:SI 1 "uint5_operand" ""))
1136 (label_ref (match_operand 2 "" ""))
1141 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1142 get_attr_length (insn), 0, insn, 1);
1144 [(set_attr "type" "cbranch")
1145 (set (attr "length")
1146 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1154 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1156 (match_operand:SI 1 "uint5_operand" ""))
1159 (label_ref (match_operand 2 "" ""))))]
1163 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1164 get_attr_length (insn), 1, insn, 1);
1166 [(set_attr "type" "cbranch")
1167 (set (attr "length")
1168 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1173 ;; Branch on Variable Bit patterns.
1177 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1179 (match_operand:SI 1 "register_operand" "q"))
1181 (label_ref (match_operand 2 "" ""))
1186 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1187 get_attr_length (insn), 0, insn, 0);
1189 [(set_attr "type" "cbranch")
1190 (set (attr "length")
1191 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1199 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1201 (match_operand:SI 1 "register_operand" "q"))
1204 (label_ref (match_operand 2 "" ""))))]
1208 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1209 get_attr_length (insn), 1, insn, 0);
1211 [(set_attr "type" "cbranch")
1212 (set (attr "length")
1213 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1221 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1223 (match_operand:SI 1 "register_operand" "q"))
1225 (label_ref (match_operand 2 "" ""))
1230 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1231 get_attr_length (insn), 0, insn, 1);
1233 [(set_attr "type" "cbranch")
1234 (set (attr "length")
1235 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1243 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1245 (match_operand:SI 1 "register_operand" "q"))
1248 (label_ref (match_operand 2 "" ""))))]
1252 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1253 get_attr_length (insn), 1, insn, 1);
1255 [(set_attr "type" "cbranch")
1256 (set (attr "length")
1257 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1262 ;; Floating point branches
1264 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1265 (label_ref (match_operand 0 "" ""))
1267 "! TARGET_SOFT_FLOAT"
1270 if (INSN_ANNULLED_BRANCH_P (insn))
1271 return \"ftest\;bl,n %0,0\";
1273 return \"ftest\;bl%* %0,0\";
1275 [(set_attr "type" "fbranch")
1276 (set_attr "length" "8")])
1279 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1281 (label_ref (match_operand 0 "" ""))))]
1282 "! TARGET_SOFT_FLOAT"
1285 if (INSN_ANNULLED_BRANCH_P (insn))
1286 return \"ftest\;add,tr 0,0,0\;bl,n %0,0\";
1288 return \"ftest\;add,tr 0,0,0\;bl%* %0,0\";
1290 [(set_attr "type" "fbranch")
1291 (set_attr "length" "12")])
1293 ;; Move instructions
1295 (define_expand "movsi"
1296 [(set (match_operand:SI 0 "general_operand" "")
1297 (match_operand:SI 1 "general_operand" ""))]
1301 if (emit_move_sequence (operands, SImode, 0))
1305 ;; Reloading an SImode or DImode value requires a scratch register if
1306 ;; going in to or out of float point registers.
1308 (define_expand "reload_insi"
1309 [(set (match_operand:SI 0 "register_operand" "=Z")
1310 (match_operand:SI 1 "non_hard_reg_operand" ""))
1311 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1315 if (emit_move_sequence (operands, SImode, operands[2]))
1318 /* We don't want the clobber emitted, so handle this ourselves. */
1319 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1323 (define_expand "reload_outsi"
1324 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1325 (match_operand:SI 1 "register_operand" "Z"))
1326 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1330 if (emit_move_sequence (operands, SImode, operands[2]))
1333 /* We don't want the clobber emitted, so handle this ourselves. */
1334 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1338 ;;; pic symbol references
1341 [(set (match_operand:SI 0 "register_operand" "=r")
1342 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1343 (match_operand:SI 2 "symbolic_operand" ""))))]
1344 "flag_pic && operands[1] == pic_offset_table_rtx"
1346 [(set_attr "type" "load")
1347 (set_attr "length" "4")])
1350 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1351 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1352 (match_operand:SI 1 "move_operand"
1353 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1354 "(register_operand (operands[0], SImode)
1355 || reg_or_0_operand (operands[1], SImode))
1356 && ! TARGET_SOFT_FLOAT"
1368 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1369 (set_attr "pa_combine_type" "addmove")
1370 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1373 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1375 (match_operand:SI 1 "move_operand"
1376 "r,J,N,K,RQ,rM,rM"))]
1377 "(register_operand (operands[0], SImode)
1378 || reg_or_0_operand (operands[1], SImode))
1379 && TARGET_SOFT_FLOAT"
1388 [(set_attr "type" "move,move,move,move,load,store,move")
1389 (set_attr "pa_combine_type" "addmove")
1390 (set_attr "length" "4,4,4,4,4,4,4")])
1393 [(set (match_operand:SI 0 "register_operand" "=r")
1394 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1395 (match_operand:SI 2 "register_operand" "r"))))]
1396 "! TARGET_DISABLE_INDEXING"
1399 /* Reload can create backwards (relative to cse) unscaled index
1400 address modes when eliminating registers and possibly for
1401 pseudos that don't get hard registers. Deal with it. */
1402 if (operands[2] == hard_frame_pointer_rtx
1403 || operands[2] == stack_pointer_rtx)
1404 return \"ldwx %1(0,%2),%0\";
1406 return \"ldwx %2(0,%1),%0\";
1408 [(set_attr "type" "load")
1409 (set_attr "length" "4")])
1412 [(set (match_operand:SI 0 "register_operand" "=r")
1413 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1414 (match_operand:SI 2 "basereg_operand" "r"))))]
1415 "! TARGET_DISABLE_INDEXING"
1418 /* Reload can create backwards (relative to cse) unscaled index
1419 address modes when eliminating registers and possibly for
1420 pseudos that don't get hard registers. Deal with it. */
1421 if (operands[1] == hard_frame_pointer_rtx
1422 || operands[1] == stack_pointer_rtx)
1423 return \"ldwx %2(0,%1),%0\";
1425 return \"ldwx %1(0,%2),%0\";
1427 [(set_attr "type" "load")
1428 (set_attr "length" "4")])
1430 ;; Load or store with base-register modification.
1432 (define_insn "pre_ldwm"
1433 [(set (match_operand:SI 0 "register_operand" "=r")
1434 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1435 (match_operand:SI 2 "pre_cint_operand" ""))))
1437 (plus:SI (match_dup 1) (match_dup 2)))]
1441 if (INTVAL (operands[2]) < 0)
1442 return \"ldwm %2(0,%1),%0\";
1443 return \"ldws,mb %2(0,%1),%0\";
1445 [(set_attr "type" "load")
1446 (set_attr "length" "4")])
1448 (define_insn "pre_stwm"
1449 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1450 (match_operand:SI 1 "pre_cint_operand" "")))
1451 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1453 (plus:SI (match_dup 0) (match_dup 1)))]
1457 if (INTVAL (operands[1]) < 0)
1458 return \"stwm %r2,%1(0,%0)\";
1459 return \"stws,mb %r2,%1(0,%0)\";
1461 [(set_attr "type" "store")
1462 (set_attr "length" "4")])
1464 (define_insn "post_ldwm"
1465 [(set (match_operand:SI 0 "register_operand" "=r")
1466 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1468 (plus:SI (match_dup 1)
1469 (match_operand:SI 2 "post_cint_operand" "")))]
1473 if (INTVAL (operands[2]) > 0)
1474 return \"ldwm %2(0,%1),%0\";
1475 return \"ldws,ma %2(0,%1),%0\";
1477 [(set_attr "type" "load")
1478 (set_attr "length" "4")])
1480 (define_insn "post_stwm"
1481 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1482 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1484 (plus:SI (match_dup 0)
1485 (match_operand:SI 2 "post_cint_operand" "")))]
1489 if (INTVAL (operands[2]) > 0)
1490 return \"stwm %r1,%2(0,%0)\";
1491 return \"stws,ma %r1,%2(0,%0)\";
1493 [(set_attr "type" "store")
1494 (set_attr "length" "4")])
1497 ;; Note since this pattern can be created at reload time (via movsi), all
1498 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1499 (define_insn "pic_load_label"
1500 [(set (match_operand:SI 0 "register_operand" "=a")
1501 (match_operand:SI 1 "pic_label_operand" ""))]
1505 rtx label_rtx = gen_label_rtx ();
1507 extern FILE *asm_out_file;
1509 xoperands[0] = operands[0];
1510 xoperands[1] = operands[1];
1511 xoperands[2] = label_rtx;
1512 output_asm_insn (\"bl .+8,%0\", xoperands);
1513 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1514 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1515 CODE_LABEL_NUMBER (label_rtx));
1517 /* If we're trying to load the address of a label that happens to be
1518 close, then we can use a shorter sequence. */
1519 if (GET_CODE (operands[1]) == LABEL_REF
1521 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1522 - insn_addresses[INSN_UID (insn)]) < 8100)
1524 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1525 always non-negative. */
1526 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1530 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1531 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1535 [(set_attr "type" "multi")
1536 (set_attr "length" "16")]) ; 12 or 16
1538 (define_insn "pic2_highpart"
1539 [(set (match_operand:SI 0 "register_operand" "=a")
1540 (plus:SI (match_operand:SI 1 "register_operand" "r")
1541 (high:SI (match_operand 2 "" ""))))]
1542 "symbolic_operand (operands[2], Pmode)
1543 && ! function_label_operand (operands[2])
1546 [(set_attr "type" "binary")
1547 (set_attr "length" "4")])
1549 ; We need this to make sure CSE doesn't simplify a memory load with a
1550 ; symbolic address, whose content it think it knows. For PIC, what CSE
1551 ; think is the real value will be the address of that value.
1552 (define_insn "pic2_lo_sum"
1553 [(set (match_operand:SI 0 "register_operand" "=r")
1554 (mem:SI (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1555 (unspec:SI [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1561 return \"ldw RT'%G2(%1),%0\";
1563 [(set_attr "type" "load")
1564 (set_attr "length" "4")])
1567 ;; Always use addil rather than ldil;add sequences. This allows the
1568 ;; HP linker to eliminate the dp relocation if the symbolic operand
1569 ;; lives in the TEXT space.
1571 [(set (match_operand:SI 0 "register_operand" "=a")
1572 (high:SI (match_operand 1 "" "")))]
1573 "symbolic_operand (operands[1], Pmode)
1574 && ! function_label_operand (operands[1])
1575 && ! read_only_operand (operands[1])
1579 if (TARGET_LONG_LOAD_STORE)
1580 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1582 return \"addil LR'%H1,%%r27\";
1584 [(set_attr "type" "binary")
1585 (set (attr "length")
1586 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1591 ;; This is for use in the prologue/epilogue code. We need it
1592 ;; to add large constants to a stack pointer or frame pointer.
1593 ;; Because of the additional %r1 pressure, we probably do not
1594 ;; want to use this in general code, so make it available
1595 ;; only after reload.
1596 (define_insn "add_high_const"
1597 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1598 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1599 (high:SI (match_operand 2 "const_int_operand" ""))))]
1603 ldil L'%G2,%0\;addl %0,%1,%0"
1604 [(set_attr "type" "binary,binary")
1605 (set_attr "length" "4,8")])
1608 [(set (match_operand:SI 0 "register_operand" "=r")
1609 (high:SI (match_operand 1 "" "")))]
1610 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1611 && !is_function_label_plus_const (operands[1])"
1614 if (symbolic_operand (operands[1], Pmode))
1615 return \"ldil LR'%H1,%0\";
1617 return \"ldil L'%G1,%0\";
1619 [(set_attr "type" "move")
1620 (set_attr "length" "4")])
1623 [(set (match_operand:SI 0 "register_operand" "=r")
1624 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1625 (match_operand:SI 2 "immediate_operand" "i")))]
1626 "!is_function_label_plus_const (operands[2])"
1629 if (flag_pic && symbolic_operand (operands[2], Pmode))
1631 else if (symbolic_operand (operands[2], Pmode))
1632 return \"ldo RR'%G2(%1),%0\";
1634 return \"ldo R'%G2(%1),%0\";
1636 [(set_attr "type" "move")
1637 (set_attr "length" "4")])
1639 ;; Now that a symbolic_address plus a constant is broken up early
1640 ;; in the compilation phase (for better CSE) we need a special
1641 ;; combiner pattern to load the symbolic address plus the constant
1642 ;; in only 2 instructions. (For cases where the symbolic address
1643 ;; was not a common subexpression.)
1645 [(set (match_operand:SI 0 "register_operand" "")
1646 (match_operand:SI 1 "symbolic_operand" ""))
1647 (clobber (match_operand:SI 2 "register_operand" ""))]
1648 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1649 [(set (match_dup 2) (high:SI (match_dup 1)))
1650 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1653 ;; hppa_legitimize_address goes to a great deal of trouble to
1654 ;; create addresses which use indexing. In some cases, this
1655 ;; is a lose because there isn't any store instructions which
1656 ;; allow indexed addresses (with integer register source).
1658 ;; These define_splits try to turn a 3 insn store into
1659 ;; a 2 insn store with some creative RTL rewriting.
1661 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1662 (match_operand:SI 1 "shadd_operand" ""))
1663 (plus:SI (match_operand:SI 2 "register_operand" "")
1664 (match_operand:SI 3 "const_int_operand" ""))))
1665 (match_operand:SI 4 "register_operand" ""))
1666 (clobber (match_operand:SI 5 "register_operand" ""))]
1668 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1670 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1674 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1675 (match_operand:SI 1 "shadd_operand" ""))
1676 (plus:SI (match_operand:SI 2 "register_operand" "")
1677 (match_operand:SI 3 "const_int_operand" ""))))
1678 (match_operand:HI 4 "register_operand" ""))
1679 (clobber (match_operand:SI 5 "register_operand" ""))]
1681 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1683 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1687 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1688 (match_operand:SI 1 "shadd_operand" ""))
1689 (plus:SI (match_operand:SI 2 "register_operand" "")
1690 (match_operand:SI 3 "const_int_operand" ""))))
1691 (match_operand:QI 4 "register_operand" ""))
1692 (clobber (match_operand:SI 5 "register_operand" ""))]
1694 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1696 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1699 (define_expand "movhi"
1700 [(set (match_operand:HI 0 "general_operand" "")
1701 (match_operand:HI 1 "general_operand" ""))]
1705 if (emit_move_sequence (operands, HImode, 0))
1710 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1711 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1712 "register_operand (operands[0], HImode)
1713 || reg_or_0_operand (operands[1], HImode)"
1723 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1724 (set_attr "pa_combine_type" "addmove")
1725 (set_attr "length" "4,4,4,4,4,4,4,4")])
1728 [(set (match_operand:HI 0 "register_operand" "=r")
1729 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1730 (match_operand:SI 2 "register_operand" "r"))))]
1731 "! TARGET_DISABLE_INDEXING"
1734 /* Reload can create backwards (relative to cse) unscaled index
1735 address modes when eliminating registers and possibly for
1736 pseudos that don't get hard registers. Deal with it. */
1737 if (operands[2] == hard_frame_pointer_rtx
1738 || operands[2] == stack_pointer_rtx)
1739 return \"ldhx %1(0,%2),%0\";
1741 return \"ldhx %2(0,%1),%0\";
1743 [(set_attr "type" "load")
1744 (set_attr "length" "4")])
1747 [(set (match_operand:HI 0 "register_operand" "=r")
1748 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1749 (match_operand:SI 2 "basereg_operand" "r"))))]
1750 "! TARGET_DISABLE_INDEXING"
1753 /* Reload can create backwards (relative to cse) unscaled index
1754 address modes when eliminating registers and possibly for
1755 pseudos that don't get hard registers. Deal with it. */
1756 if (operands[1] == hard_frame_pointer_rtx
1757 || operands[1] == stack_pointer_rtx)
1758 return \"ldhx %2(0,%1),%0\";
1760 return \"ldhx %1(0,%2),%0\";
1762 [(set_attr "type" "load")
1763 (set_attr "length" "4")])
1765 ; Now zero extended variants.
1767 [(set (match_operand:SI 0 "register_operand" "=r")
1768 (zero_extend:SI (mem:HI
1770 (match_operand:SI 1 "basereg_operand" "r")
1771 (match_operand:SI 2 "register_operand" "r")))))]
1772 "! TARGET_DISABLE_INDEXING"
1775 /* Reload can create backwards (relative to cse) unscaled index
1776 address modes when eliminating registers and possibly for
1777 pseudos that don't get hard registers. Deal with it. */
1778 if (operands[2] == hard_frame_pointer_rtx
1779 || operands[2] == stack_pointer_rtx)
1780 return \"ldhx %1(0,%2),%0\";
1782 return \"ldhx %2(0,%1),%0\";
1784 [(set_attr "type" "load")
1785 (set_attr "length" "4")])
1788 [(set (match_operand:SI 0 "register_operand" "=r")
1789 (zero_extend:SI (mem:HI
1791 (match_operand:SI 1 "register_operand" "r")
1792 (match_operand:SI 2 "basereg_operand" "r")))))]
1793 "! TARGET_DISABLE_INDEXING"
1796 /* Reload can create backwards (relative to cse) unscaled index
1797 address modes when eliminating registers and possibly for
1798 pseudos that don't get hard registers. Deal with it. */
1799 if (operands[1] == hard_frame_pointer_rtx
1800 || operands[1] == stack_pointer_rtx)
1801 return \"ldhx %2(0,%1),%0\";
1803 return \"ldhx %1(0,%2),%0\";
1805 [(set_attr "type" "load")
1806 (set_attr "length" "4")])
1809 [(set (match_operand:HI 0 "register_operand" "=r")
1810 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1811 (match_operand:SI 2 "int5_operand" "L"))))
1813 (plus:SI (match_dup 1) (match_dup 2)))]
1815 "ldhs,mb %2(0,%1),%0"
1816 [(set_attr "type" "load")
1817 (set_attr "length" "4")])
1819 ; And a zero extended variant.
1821 [(set (match_operand:SI 0 "register_operand" "=r")
1822 (zero_extend:SI (mem:HI
1824 (match_operand:SI 1 "register_operand" "+r")
1825 (match_operand:SI 2 "int5_operand" "L")))))
1827 (plus:SI (match_dup 1) (match_dup 2)))]
1829 "ldhs,mb %2(0,%1),%0"
1830 [(set_attr "type" "load")
1831 (set_attr "length" "4")])
1834 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1835 (match_operand:SI 1 "int5_operand" "L")))
1836 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1838 (plus:SI (match_dup 0) (match_dup 1)))]
1840 "sths,mb %r2,%1(0,%0)"
1841 [(set_attr "type" "store")
1842 (set_attr "length" "4")])
1845 [(set (match_operand:HI 0 "register_operand" "=r")
1846 (high:HI (match_operand 1 "const_int_operand" "")))]
1849 [(set_attr "type" "move")
1850 (set_attr "length" "4")])
1853 [(set (match_operand:HI 0 "register_operand" "=r")
1854 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1855 (match_operand 2 "const_int_operand" "")))]
1858 [(set_attr "type" "move")
1859 (set_attr "length" "4")])
1861 (define_expand "movqi"
1862 [(set (match_operand:QI 0 "general_operand" "")
1863 (match_operand:QI 1 "general_operand" ""))]
1867 if (emit_move_sequence (operands, QImode, 0))
1872 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1873 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1874 "register_operand (operands[0], QImode)
1875 || reg_or_0_operand (operands[1], QImode)"
1885 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1886 (set_attr "pa_combine_type" "addmove")
1887 (set_attr "length" "4,4,4,4,4,4,4,4")])
1890 [(set (match_operand:QI 0 "register_operand" "=r")
1891 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1892 (match_operand:SI 2 "register_operand" "r"))))]
1893 "! TARGET_DISABLE_INDEXING"
1896 /* Reload can create backwards (relative to cse) unscaled index
1897 address modes when eliminating registers and possibly for
1898 pseudos that don't get hard registers. Deal with it. */
1899 if (operands[2] == hard_frame_pointer_rtx
1900 || operands[2] == stack_pointer_rtx)
1901 return \"ldbx %1(0,%2),%0\";
1903 return \"ldbx %2(0,%1),%0\";
1905 [(set_attr "type" "load")
1906 (set_attr "length" "4")])
1909 [(set (match_operand:QI 0 "register_operand" "=r")
1910 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1911 (match_operand:SI 2 "basereg_operand" "r"))))]
1912 "! TARGET_DISABLE_INDEXING"
1915 /* Reload can create backwards (relative to cse) unscaled index
1916 address modes when eliminating registers and possibly for
1917 pseudos that don't get hard registers. Deal with it. */
1918 if (operands[1] == hard_frame_pointer_rtx
1919 || operands[1] == stack_pointer_rtx)
1920 return \"ldbx %2(0,%1),%0\";
1922 return \"ldbx %1(0,%2),%0\";
1924 [(set_attr "type" "load")
1925 (set_attr "length" "4")])
1927 ; Indexed byte load with zero extension to SImode or HImode.
1929 [(set (match_operand:SI 0 "register_operand" "=r")
1930 (zero_extend:SI (mem:QI
1932 (match_operand:SI 1 "basereg_operand" "r")
1933 (match_operand:SI 2 "register_operand" "r")))))]
1934 "! TARGET_DISABLE_INDEXING"
1937 /* Reload can create backwards (relative to cse) unscaled index
1938 address modes when eliminating registers and possibly for
1939 pseudos that don't get hard registers. Deal with it. */
1940 if (operands[2] == hard_frame_pointer_rtx
1941 || operands[2] == stack_pointer_rtx)
1942 return \"ldbx %1(0,%2),%0\";
1944 return \"ldbx %2(0,%1),%0\";
1946 [(set_attr "type" "load")
1947 (set_attr "length" "4")])
1950 [(set (match_operand:SI 0 "register_operand" "=r")
1951 (zero_extend:SI (mem:QI
1953 (match_operand:SI 1 "register_operand" "r")
1954 (match_operand:SI 2 "basereg_operand" "r")))))]
1955 "! TARGET_DISABLE_INDEXING"
1958 /* Reload can create backwards (relative to cse) unscaled index
1959 address modes when eliminating registers and possibly for
1960 pseudos that don't get hard registers. Deal with it. */
1961 if (operands[1] == hard_frame_pointer_rtx
1962 || operands[1] == stack_pointer_rtx)
1963 return \"ldbx %2(0,%1),%0\";
1965 return \"ldbx %1(0,%2),%0\";
1967 [(set_attr "type" "load")
1968 (set_attr "length" "4")])
1971 [(set (match_operand:HI 0 "register_operand" "=r")
1972 (zero_extend:HI (mem:QI
1974 (match_operand:SI 1 "basereg_operand" "r")
1975 (match_operand:SI 2 "register_operand" "r")))))]
1976 "! TARGET_DISABLE_INDEXING"
1979 /* Reload can create backwards (relative to cse) unscaled index
1980 address modes when eliminating registers and possibly for
1981 pseudos that don't get hard registers. Deal with it. */
1982 if (operands[2] == hard_frame_pointer_rtx
1983 || operands[2] == stack_pointer_rtx)
1984 return \"ldbx %1(0,%2),%0\";
1986 return \"ldbx %2(0,%1),%0\";
1988 [(set_attr "type" "load")
1989 (set_attr "length" "4")])
1992 [(set (match_operand:HI 0 "register_operand" "=r")
1993 (zero_extend:HI (mem:QI
1995 (match_operand:SI 1 "register_operand" "r")
1996 (match_operand:SI 2 "basereg_operand" "r")))))]
1997 "! TARGET_DISABLE_INDEXING"
2000 /* Reload can create backwards (relative to cse) unscaled index
2001 address modes when eliminating registers and possibly for
2002 pseudos that don't get hard registers. Deal with it. */
2003 if (operands[1] == hard_frame_pointer_rtx
2004 || operands[1] == stack_pointer_rtx)
2005 return \"ldbx %2(0,%1),%0\";
2007 return \"ldbx %1(0,%2),%0\";
2009 [(set_attr "type" "load")
2010 (set_attr "length" "4")])
2013 [(set (match_operand:QI 0 "register_operand" "=r")
2014 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2015 (match_operand:SI 2 "int5_operand" "L"))))
2016 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2018 "ldbs,mb %2(0,%1),%0"
2019 [(set_attr "type" "load")
2020 (set_attr "length" "4")])
2022 ; Now the same thing with zero extensions.
2024 [(set (match_operand:SI 0 "register_operand" "=r")
2025 (zero_extend:SI (mem:QI (plus:SI
2026 (match_operand:SI 1 "register_operand" "+r")
2027 (match_operand:SI 2 "int5_operand" "L")))))
2028 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2030 "ldbs,mb %2(0,%1),%0"
2031 [(set_attr "type" "load")
2032 (set_attr "length" "4")])
2035 [(set (match_operand:HI 0 "register_operand" "=r")
2036 (zero_extend:HI (mem:QI (plus:SI
2037 (match_operand:SI 1 "register_operand" "+r")
2038 (match_operand:SI 2 "int5_operand" "L")))))
2039 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2041 "ldbs,mb %2(0,%1),%0"
2042 [(set_attr "type" "load")
2043 (set_attr "length" "4")])
2046 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2047 (match_operand:SI 1 "int5_operand" "L")))
2048 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2050 (plus:SI (match_dup 0) (match_dup 1)))]
2052 "stbs,mb %r2,%1(0,%0)"
2053 [(set_attr "type" "store")
2054 (set_attr "length" "4")])
2056 ;; The definition of this insn does not really explain what it does,
2057 ;; but it should suffice
2058 ;; that anything generated as this insn will be recognized as one
2059 ;; and that it will not successfully combine with anything.
2060 (define_expand "movstrsi"
2061 [(parallel [(set (match_operand:BLK 0 "" "")
2062 (match_operand:BLK 1 "" ""))
2063 (clobber (match_dup 7))
2064 (clobber (match_dup 8))
2065 (clobber (match_dup 4))
2066 (clobber (match_dup 5))
2067 (clobber (match_dup 6))
2068 (use (match_operand:SI 2 "arith_operand" ""))
2069 (use (match_operand:SI 3 "const_int_operand" ""))])]
2075 /* HP provides very fast block move library routine for the PA;
2076 this routine includes:
2078 4x4 byte at a time block moves,
2079 1x4 byte at a time with alignment checked at runtime with
2080 attempts to align the source and destination as needed
2083 With that in mind, here's the heuristics to try and guess when
2084 the inlined block move will be better than the library block
2087 If the size isn't constant, then always use the library routines.
2089 If the size is large in respect to the known alignment, then use
2090 the library routines.
2092 If the size is small in repsect to the known alignment, then open
2093 code the copy (since that will lead to better scheduling).
2095 Else use the block move pattern. */
2097 /* Undetermined size, use the library routine. */
2098 if (GET_CODE (operands[2]) != CONST_INT)
2101 size = INTVAL (operands[2]);
2102 align = INTVAL (operands[3]);
2103 align = align > 4 ? 4 : align;
2105 /* If size/alignment > 8 (eg size is large in respect to alignment),
2106 then use the library routines. */
2107 if (size / align > 16)
2110 /* This does happen, but not often enough to worry much about. */
2111 if (size / align < MOVE_RATIO)
2114 /* Fall through means we're going to use our block move pattern. */
2116 = change_address (operands[0], VOIDmode,
2117 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2119 = change_address (operands[1], VOIDmode,
2120 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2121 operands[4] = gen_reg_rtx (SImode);
2122 operands[5] = gen_reg_rtx (SImode);
2123 operands[6] = gen_reg_rtx (SImode);
2124 operands[7] = XEXP (operands[0], 0);
2125 operands[8] = XEXP (operands[1], 0);
2128 ;; The operand constraints are written like this to support both compile-time
2129 ;; and run-time determined byte count. If the count is run-time determined,
2130 ;; the register with the byte count is clobbered by the copying code, and
2131 ;; therefore it is forced to operand 2. If the count is compile-time
2132 ;; determined, we need two scratch registers for the unrolled code.
2133 (define_insn "movstrsi_internal"
2134 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2135 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2136 (clobber (match_dup 0))
2137 (clobber (match_dup 1))
2138 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2139 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2140 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2141 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2142 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2144 "* return output_block_move (operands, !which_alternative);"
2145 [(set_attr "type" "multi,multi")])
2147 ;; Floating point move insns
2149 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2150 ;; to be reloaded by putting the constant into memory when
2151 ;; reg is a floating point register.
2153 ;; For integer registers we use ldil;ldo to set the appropriate
2156 ;; This must come before the movdf pattern, and it must be present
2157 ;; to handle obscure reloading cases.
2159 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2160 (match_operand:DF 1 "" "?F,m"))]
2161 "GET_CODE (operands[1]) == CONST_DOUBLE
2162 && operands[1] != CONST0_RTX (DFmode)
2163 && ! TARGET_SOFT_FLOAT"
2164 "* return (which_alternative == 0 ? output_move_double (operands)
2165 : \"fldd%F1 %1,%0\");"
2166 [(set_attr "type" "move,fpload")
2167 (set_attr "length" "16,4")])
2169 (define_expand "movdf"
2170 [(set (match_operand:DF 0 "general_operand" "")
2171 (match_operand:DF 1 "general_operand" ""))]
2175 if (emit_move_sequence (operands, DFmode, 0))
2179 ;; Reloading an SImode or DImode value requires a scratch register if
2180 ;; going in to or out of float point registers.
2182 (define_expand "reload_indf"
2183 [(set (match_operand:DF 0 "register_operand" "=Z")
2184 (match_operand:DF 1 "non_hard_reg_operand" ""))
2185 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2189 if (emit_move_sequence (operands, DFmode, operands[2]))
2192 /* We don't want the clobber emitted, so handle this ourselves. */
2193 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2197 (define_expand "reload_outdf"
2198 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2199 (match_operand:DF 1 "register_operand" "Z"))
2200 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2204 if (emit_move_sequence (operands, DFmode, operands[2]))
2207 /* We don't want the clobber emitted, so handle this ourselves. */
2208 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2213 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2214 "=f,*r,RQ,?o,?Q,f,*r,*r")
2215 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2216 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2217 "(register_operand (operands[0], DFmode)
2218 || reg_or_0_operand (operands[1], DFmode))
2219 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2220 && GET_CODE (operands[0]) == MEM)
2221 && ! TARGET_SOFT_FLOAT"
2224 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2225 || operands[1] == CONST0_RTX (DFmode))
2226 return output_fp_move_double (operands);
2227 return output_move_double (operands);
2229 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2230 (set_attr "length" "4,8,4,8,16,4,8,16")])
2233 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2235 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2237 "(register_operand (operands[0], DFmode)
2238 || reg_or_0_operand (operands[1], DFmode))
2239 && TARGET_SOFT_FLOAT"
2242 return output_move_double (operands);
2244 [(set_attr "type" "move,store,store,load,load")
2245 (set_attr "length" "8,8,16,8,16")])
2248 [(set (match_operand:DF 0 "register_operand" "=fx")
2249 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2250 (match_operand:SI 2 "register_operand" "r"))))]
2251 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2254 /* Reload can create backwards (relative to cse) unscaled index
2255 address modes when eliminating registers and possibly for
2256 pseudos that don't get hard registers. Deal with it. */
2257 if (operands[2] == hard_frame_pointer_rtx
2258 || operands[2] == stack_pointer_rtx)
2259 return \"flddx %1(0,%2),%0\";
2261 return \"flddx %2(0,%1),%0\";
2263 [(set_attr "type" "fpload")
2264 (set_attr "length" "4")])
2267 [(set (match_operand:DF 0 "register_operand" "=fx")
2268 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2269 (match_operand:SI 2 "basereg_operand" "r"))))]
2270 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2273 /* Reload can create backwards (relative to cse) unscaled index
2274 address modes when eliminating registers and possibly for
2275 pseudos that don't get hard registers. Deal with it. */
2276 if (operands[1] == hard_frame_pointer_rtx
2277 || operands[1] == stack_pointer_rtx)
2278 return \"flddx %2(0,%1),%0\";
2280 return \"flddx %1(0,%2),%0\";
2282 [(set_attr "type" "fpload")
2283 (set_attr "length" "4")])
2286 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2287 (match_operand:SI 2 "register_operand" "r")))
2288 (match_operand:DF 0 "register_operand" "fx"))]
2289 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2292 /* Reload can create backwards (relative to cse) unscaled index
2293 address modes when eliminating registers and possibly for
2294 pseudos that don't get hard registers. Deal with it. */
2295 if (operands[2] == hard_frame_pointer_rtx
2296 || operands[2] == stack_pointer_rtx)
2297 return \"fstdx %0,%1(0,%2)\";
2299 return \"fstdx %0,%2(0,%1)\";
2301 [(set_attr "type" "fpstore")
2302 (set_attr "length" "4")])
2305 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2306 (match_operand:SI 2 "basereg_operand" "r")))
2307 (match_operand:DF 0 "register_operand" "fx"))]
2308 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2311 /* Reload can create backwards (relative to cse) unscaled index
2312 address modes when eliminating registers and possibly for
2313 pseudos that don't get hard registers. Deal with it. */
2314 if (operands[1] == hard_frame_pointer_rtx
2315 || operands[1] == stack_pointer_rtx)
2316 return \"fstdx %0,%2(0,%1)\";
2318 return \"fstdx %0,%1(0,%2)\";
2320 [(set_attr "type" "fpstore")
2321 (set_attr "length" "4")])
2323 (define_expand "movdi"
2324 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2325 (match_operand:DI 1 "general_operand" ""))]
2329 if (emit_move_sequence (operands, DImode, 0))
2333 (define_expand "reload_indi"
2334 [(set (match_operand:DI 0 "register_operand" "=f")
2335 (match_operand:DI 1 "non_hard_reg_operand" ""))
2336 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2340 if (emit_move_sequence (operands, DImode, operands[2]))
2343 /* We don't want the clobber emitted, so handle this ourselves. */
2344 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2348 (define_expand "reload_outdi"
2349 [(set (match_operand:DI 0 "general_operand" "")
2350 (match_operand:DI 1 "register_operand" "f"))
2351 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2355 if (emit_move_sequence (operands, DImode, operands[2]))
2358 /* We don't want the clobber emitted, so handle this ourselves. */
2359 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2364 [(set (match_operand:DI 0 "register_operand" "=r")
2365 (high:DI (match_operand 1 "" "")))]
2369 rtx op0 = operands[0];
2370 rtx op1 = operands[1];
2372 if (GET_CODE (op1) == CONST_INT)
2374 operands[0] = operand_subword (op0, 1, 0, DImode);
2375 output_asm_insn (\"ldil L'%1,%0\", operands);
2377 operands[0] = operand_subword (op0, 0, 0, DImode);
2378 if (INTVAL (op1) < 0)
2379 output_asm_insn (\"ldi -1,%0\", operands);
2381 output_asm_insn (\"ldi 0,%0\", operands);
2384 else if (GET_CODE (op1) == CONST_DOUBLE)
2386 operands[0] = operand_subword (op0, 1, 0, DImode);
2387 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2388 output_asm_insn (\"ldil L'%1,%0\", operands);
2390 operands[0] = operand_subword (op0, 0, 0, DImode);
2391 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2392 output_asm_insn (singlemove_string (operands), operands);
2398 [(set_attr "type" "move")
2399 (set_attr "length" "8")])
2404 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2405 "=r,o,Q,r,r,r,f,f,*TR")
2406 (match_operand:DI 1 "general_operand"
2407 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2408 "(register_operand (operands[0], DImode)
2409 || reg_or_0_operand (operands[1], DImode))
2410 && ! TARGET_SOFT_FLOAT"
2413 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2414 || (operands[1] == CONST0_RTX (DImode)))
2415 return output_fp_move_double (operands);
2416 return output_move_double (operands);
2418 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2419 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2422 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2424 (match_operand:DI 1 "general_operand"
2426 "(register_operand (operands[0], DImode)
2427 || reg_or_0_operand (operands[1], DImode))
2428 && TARGET_SOFT_FLOAT"
2431 return output_move_double (operands);
2433 [(set_attr "type" "move,store,store,load,load,multi")
2434 (set_attr "length" "8,8,16,8,16,16")])
2437 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2438 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2439 (match_operand:DI 2 "immediate_operand" "i,i")))]
2443 /* Don't output a 64 bit constant, since we can't trust the assembler to
2444 handle it correctly. */
2445 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2446 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2447 if (which_alternative == 1)
2448 output_asm_insn (\"copy %1,%0\", operands);
2449 return \"ldo R'%G2(%R1),%R0\";
2451 [(set_attr "type" "move,move")
2452 (set_attr "length" "4,8")])
2454 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2455 ;; to be reloaded by putting the constant into memory when
2456 ;; reg is a floating point register.
2458 ;; For integer registers we use ldil;ldo to set the appropriate
2461 ;; This must come before the movsf pattern, and it must be present
2462 ;; to handle obscure reloading cases.
2464 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2465 (match_operand:SF 1 "" "?F,m"))]
2466 "GET_CODE (operands[1]) == CONST_DOUBLE
2467 && operands[1] != CONST0_RTX (SFmode)
2468 && ! TARGET_SOFT_FLOAT"
2469 "* return (which_alternative == 0 ? singlemove_string (operands)
2470 : \" fldw%F1 %1,%0\");"
2471 [(set_attr "type" "move,fpload")
2472 (set_attr "length" "8,4")])
2474 (define_expand "movsf"
2475 [(set (match_operand:SF 0 "general_operand" "")
2476 (match_operand:SF 1 "general_operand" ""))]
2480 if (emit_move_sequence (operands, SFmode, 0))
2484 ;; Reloading an SImode or DImode value requires a scratch register if
2485 ;; going in to or out of float point registers.
2487 (define_expand "reload_insf"
2488 [(set (match_operand:SF 0 "register_operand" "=Z")
2489 (match_operand:SF 1 "non_hard_reg_operand" ""))
2490 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2494 if (emit_move_sequence (operands, SFmode, operands[2]))
2497 /* We don't want the clobber emitted, so handle this ourselves. */
2498 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2502 (define_expand "reload_outsf"
2503 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2504 (match_operand:SF 1 "register_operand" "Z"))
2505 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2509 if (emit_move_sequence (operands, SFmode, operands[2]))
2512 /* We don't want the clobber emitted, so handle this ourselves. */
2513 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2518 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2520 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2521 "fG,rG,RQ,RQ,f,rG"))]
2522 "(register_operand (operands[0], SFmode)
2523 || reg_or_0_operand (operands[1], SFmode))
2524 && ! TARGET_SOFT_FLOAT"
2532 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2533 (set_attr "pa_combine_type" "addmove")
2534 (set_attr "length" "4,4,4,4,4,4")])
2537 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2539 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2541 "(register_operand (operands[0], SFmode)
2542 || reg_or_0_operand (operands[1], SFmode))
2543 && TARGET_SOFT_FLOAT"
2548 [(set_attr "type" "move,load,store")
2549 (set_attr "pa_combine_type" "addmove")
2550 (set_attr "length" "4,4,4")])
2553 [(set (match_operand:SF 0 "register_operand" "=fx")
2554 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2555 (match_operand:SI 2 "register_operand" "r"))))]
2556 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2559 /* Reload can create backwards (relative to cse) unscaled index
2560 address modes when eliminating registers and possibly for
2561 pseudos that don't get hard registers. Deal with it. */
2562 if (operands[2] == hard_frame_pointer_rtx
2563 || operands[2] == stack_pointer_rtx)
2564 return \"fldwx %1(0,%2),%0\";
2566 return \"fldwx %2(0,%1),%0\";
2568 [(set_attr "type" "fpload")
2569 (set_attr "length" "4")])
2572 [(set (match_operand:SF 0 "register_operand" "=fx")
2573 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2574 (match_operand:SI 2 "basereg_operand" "r"))))]
2575 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2578 /* Reload can create backwards (relative to cse) unscaled index
2579 address modes when eliminating registers and possibly for
2580 pseudos that don't get hard registers. Deal with it. */
2581 if (operands[1] == hard_frame_pointer_rtx
2582 || operands[1] == stack_pointer_rtx)
2583 return \"fldwx %2(0,%1),%0\";
2585 return \"fldwx %1(0,%2),%0\";
2587 [(set_attr "type" "fpload")
2588 (set_attr "length" "4")])
2591 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2592 (match_operand:SI 2 "register_operand" "r")))
2593 (match_operand:SF 0 "register_operand" "fx"))]
2594 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2597 /* Reload can create backwards (relative to cse) unscaled index
2598 address modes when eliminating registers and possibly for
2599 pseudos that don't get hard registers. Deal with it. */
2600 if (operands[2] == hard_frame_pointer_rtx
2601 || operands[2] == stack_pointer_rtx)
2602 return \"fstwx %0,%1(0,%2)\";
2604 return \"fstwx %0,%2(0,%1)\";
2606 [(set_attr "type" "fpstore")
2607 (set_attr "length" "4")])
2610 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2611 (match_operand:SI 2 "basereg_operand" "r")))
2612 (match_operand:SF 0 "register_operand" "fx"))]
2613 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2616 /* Reload can create backwards (relative to cse) unscaled index
2617 address modes when eliminating registers and possibly for
2618 pseudos that don't get hard registers. Deal with it. */
2619 if (operands[1] == hard_frame_pointer_rtx
2620 || operands[1] == stack_pointer_rtx)
2621 return \"fstwx %0,%2(0,%1)\";
2623 return \"fstwx %0,%1(0,%2)\";
2625 [(set_attr "type" "fpstore")
2626 (set_attr "length" "4")])
2629 ;;- zero extension instructions
2630 ;; We have define_expand for zero extension patterns to make sure the
2631 ;; operands get loaded into registers. The define_insns accept
2632 ;; memory operands. This gives us better overall code than just
2633 ;; having a pattern that does or does not accept memory operands.
2635 (define_expand "zero_extendhisi2"
2636 [(set (match_operand:SI 0 "register_operand" "")
2638 (match_operand:HI 1 "register_operand" "")))]
2643 [(set (match_operand:SI 0 "register_operand" "=r,r")
2645 (match_operand:HI 1 "move_operand" "r,RQ")))]
2646 "GET_CODE (operands[1]) != CONST_INT"
2650 [(set_attr "type" "shift,load")
2651 (set_attr "length" "4,4")])
2653 (define_expand "zero_extendqihi2"
2654 [(set (match_operand:HI 0 "register_operand" "")
2656 (match_operand:QI 1 "register_operand" "")))]
2661 [(set (match_operand:HI 0 "register_operand" "=r,r")
2663 (match_operand:QI 1 "move_operand" "r,RQ")))]
2664 "GET_CODE (operands[1]) != CONST_INT"
2668 [(set_attr "type" "shift,load")
2669 (set_attr "length" "4,4")])
2671 (define_expand "zero_extendqisi2"
2672 [(set (match_operand:SI 0 "register_operand" "")
2674 (match_operand:QI 1 "register_operand" "")))]
2679 [(set (match_operand:SI 0 "register_operand" "=r,r")
2681 (match_operand:QI 1 "move_operand" "r,RQ")))]
2682 "GET_CODE (operands[1]) != CONST_INT"
2686 [(set_attr "type" "shift,load")
2687 (set_attr "length" "4,4")])
2689 ;;- sign extension instructions
2691 (define_insn "extendhisi2"
2692 [(set (match_operand:SI 0 "register_operand" "=r")
2693 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2696 [(set_attr "type" "shift")
2697 (set_attr "length" "4")])
2699 (define_insn "extendqihi2"
2700 [(set (match_operand:HI 0 "register_operand" "=r")
2701 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2704 [(set_attr "type" "shift")
2705 (set_attr "length" "4")])
2707 (define_insn "extendqisi2"
2708 [(set (match_operand:SI 0 "register_operand" "=r")
2709 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2712 [(set_attr "type" "shift")
2713 (set_attr "length" "4")])
2715 ;; Conversions between float and double.
2717 (define_insn "extendsfdf2"
2718 [(set (match_operand:DF 0 "register_operand" "=f")
2720 (match_operand:SF 1 "register_operand" "f")))]
2721 "! TARGET_SOFT_FLOAT"
2722 "fcnvff,sgl,dbl %1,%0"
2723 [(set_attr "type" "fpalu")
2724 (set_attr "length" "4")])
2726 (define_insn "truncdfsf2"
2727 [(set (match_operand:SF 0 "register_operand" "=f")
2729 (match_operand:DF 1 "register_operand" "f")))]
2730 "! TARGET_SOFT_FLOAT"
2731 "fcnvff,dbl,sgl %1,%0"
2732 [(set_attr "type" "fpalu")
2733 (set_attr "length" "4")])
2735 ;; Conversion between fixed point and floating point.
2736 ;; Note that among the fix-to-float insns
2737 ;; the ones that start with SImode come first.
2738 ;; That is so that an operand that is a CONST_INT
2739 ;; (and therefore lacks a specific machine mode).
2740 ;; will be recognized as SImode (which is always valid)
2741 ;; rather than as QImode or HImode.
2743 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2744 ;; to be reloaded by putting the constant into memory.
2745 ;; It must come before the more general floatsisf2 pattern.
2747 [(set (match_operand:SF 0 "register_operand" "=f")
2748 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2749 "! TARGET_SOFT_FLOAT"
2750 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2751 [(set_attr "type" "fpalu")
2752 (set_attr "length" "8")])
2754 (define_insn "floatsisf2"
2755 [(set (match_operand:SF 0 "register_operand" "=f")
2756 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2757 "! TARGET_SOFT_FLOAT"
2758 "fcnvxf,sgl,sgl %1,%0"
2759 [(set_attr "type" "fpalu")
2760 (set_attr "length" "4")])
2762 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2763 ;; to be reloaded by putting the constant into memory.
2764 ;; It must come before the more general floatsidf2 pattern.
2766 [(set (match_operand:DF 0 "register_operand" "=f")
2767 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2768 "! TARGET_SOFT_FLOAT"
2769 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2770 [(set_attr "type" "fpalu")
2771 (set_attr "length" "8")])
2773 (define_insn "floatsidf2"
2774 [(set (match_operand:DF 0 "register_operand" "=f")
2775 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2776 "! TARGET_SOFT_FLOAT"
2777 "fcnvxf,sgl,dbl %1,%0"
2778 [(set_attr "type" "fpalu")
2779 (set_attr "length" "4")])
2781 (define_expand "floatunssisf2"
2782 [(set (subreg:SI (match_dup 2) 1)
2783 (match_operand:SI 1 "register_operand" ""))
2784 (set (subreg:SI (match_dup 2) 0)
2786 (set (match_operand:SF 0 "register_operand" "")
2787 (float:SF (match_dup 2)))]
2788 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2789 "operands[2] = gen_reg_rtx (DImode);")
2791 (define_expand "floatunssidf2"
2792 [(set (subreg:SI (match_dup 2) 1)
2793 (match_operand:SI 1 "register_operand" ""))
2794 (set (subreg:SI (match_dup 2) 0)
2796 (set (match_operand:DF 0 "register_operand" "")
2797 (float:DF (match_dup 2)))]
2798 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2799 "operands[2] = gen_reg_rtx (DImode);")
2801 (define_insn "floatdisf2"
2802 [(set (match_operand:SF 0 "register_operand" "=f")
2803 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2804 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2805 "fcnvxf,dbl,sgl %1,%0"
2806 [(set_attr "type" "fpalu")
2807 (set_attr "length" "4")])
2809 (define_insn "floatdidf2"
2810 [(set (match_operand:DF 0 "register_operand" "=f")
2811 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2812 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2813 "fcnvxf,dbl,dbl %1,%0"
2814 [(set_attr "type" "fpalu")
2815 (set_attr "length" "4")])
2817 ;; Convert a float to an actual integer.
2818 ;; Truncation is performed as part of the conversion.
2820 (define_insn "fix_truncsfsi2"
2821 [(set (match_operand:SI 0 "register_operand" "=f")
2822 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2823 "! TARGET_SOFT_FLOAT"
2824 "fcnvfxt,sgl,sgl %1,%0"
2825 [(set_attr "type" "fpalu")
2826 (set_attr "length" "4")])
2828 (define_insn "fix_truncdfsi2"
2829 [(set (match_operand:SI 0 "register_operand" "=f")
2830 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2831 "! TARGET_SOFT_FLOAT"
2832 "fcnvfxt,dbl,sgl %1,%0"
2833 [(set_attr "type" "fpalu")
2834 (set_attr "length" "4")])
2836 (define_insn "fix_truncsfdi2"
2837 [(set (match_operand:DI 0 "register_operand" "=f")
2838 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2839 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2840 "fcnvfxt,sgl,dbl %1,%0"
2841 [(set_attr "type" "fpalu")
2842 (set_attr "length" "4")])
2844 (define_insn "fix_truncdfdi2"
2845 [(set (match_operand:DI 0 "register_operand" "=f")
2846 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2847 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2848 "fcnvfxt,dbl,dbl %1,%0"
2849 [(set_attr "type" "fpalu")
2850 (set_attr "length" "4")])
2852 ;;- arithmetic instructions
2854 (define_insn "adddi3"
2855 [(set (match_operand:DI 0 "register_operand" "=r")
2856 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2857 (match_operand:DI 2 "arith11_operand" "rI")))]
2861 if (GET_CODE (operands[2]) == CONST_INT)
2863 if (INTVAL (operands[2]) >= 0)
2864 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2866 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2869 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2871 [(set_attr "type" "binary")
2872 (set_attr "length" "8")])
2875 [(set (match_operand:SI 0 "register_operand" "=r")
2876 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2877 (match_operand:SI 2 "register_operand" "r")))]
2880 [(set_attr "type" "binary")
2881 (set_attr "length" "4")])
2883 ;; define_splits to optimize cases of adding a constant integer
2884 ;; to a register when the constant does not fit in 14 bits. */
2886 [(set (match_operand:SI 0 "register_operand" "")
2887 (plus:SI (match_operand:SI 1 "register_operand" "")
2888 (match_operand:SI 2 "const_int_operand" "")))
2889 (clobber (match_operand:SI 4 "register_operand" ""))]
2890 "! cint_ok_for_move (INTVAL (operands[2]))
2891 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2892 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2893 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2896 int val = INTVAL (operands[2]);
2897 int low = (val < 0) ? -0x2000 : 0x1fff;
2898 int rest = val - low;
2900 operands[2] = GEN_INT (rest);
2901 operands[3] = GEN_INT (low);
2905 [(set (match_operand:SI 0 "register_operand" "")
2906 (plus:SI (match_operand:SI 1 "register_operand" "")
2907 (match_operand:SI 2 "const_int_operand" "")))
2908 (clobber (match_operand:SI 4 "register_operand" ""))]
2909 "! cint_ok_for_move (INTVAL (operands[2]))"
2910 [(set (match_dup 4) (match_dup 2))
2911 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2915 HOST_WIDE_INT intval = INTVAL (operands[2]);
2917 /* Try dividing the constant by 2, then 4, and finally 8 to see
2918 if we can get a constant which can be loaded into a register
2919 in a single instruction (cint_ok_for_move).
2921 If that fails, try to negate the constant and subtract it
2922 from our input operand. */
2923 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
2925 operands[2] = GEN_INT (intval / 2);
2926 operands[3] = GEN_INT (2);
2928 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
2930 operands[2] = GEN_INT (intval / 4);
2931 operands[3] = GEN_INT (4);
2933 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
2935 operands[2] = GEN_INT (intval / 8);
2936 operands[3] = GEN_INT (8);
2938 else if (cint_ok_for_move (-intval))
2940 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
2941 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
2948 (define_insn "addsi3"
2949 [(set (match_operand:SI 0 "register_operand" "=r,r")
2950 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
2951 (match_operand:SI 2 "arith_operand" "r,J")))]
2956 [(set_attr "type" "binary,binary")
2957 (set_attr "pa_combine_type" "addmove")
2958 (set_attr "length" "4,4")])
2960 ;; Disgusting kludge to work around reload bugs with frame pointer
2961 ;; elimination. Similar to other magic reload patterns in the
2962 ;; indexed memory operations.
2964 [(set (match_operand:SI 0 "register_operand" "=&r")
2965 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
2966 (match_operand:SI 2 "register_operand" "r"))
2967 (match_operand:SI 3 "const_int_operand" "rL")))]
2968 "reload_in_progress"
2971 if (GET_CODE (operands[3]) == CONST_INT)
2972 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
2974 return \"addl %3,%2,%0\;addl %1,%0,%0\";
2976 [(set_attr "type" "binary")
2977 (set_attr "length" "8")])
2979 (define_insn "subdi3"
2980 [(set (match_operand:DI 0 "register_operand" "=r")
2981 (minus:DI (match_operand:DI 1 "register_operand" "r")
2982 (match_operand:DI 2 "register_operand" "r")))]
2984 "sub %R1,%R2,%R0\;subb %1,%2,%0"
2985 [(set_attr "type" "binary")
2986 (set_attr "length" "8")])
2988 (define_insn "subsi3"
2989 [(set (match_operand:SI 0 "register_operand" "=r,r")
2990 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
2991 (match_operand:SI 2 "register_operand" "r,r")))]
2996 [(set_attr "type" "binary,binary")
2997 (set_attr "length" "4,4")])
2999 ;; Clobbering a "register_operand" instead of a match_scratch
3000 ;; in operand3 of millicode calls avoids spilling %r1 and
3001 ;; produces better code.
3003 ;; The mulsi3 insns set up registers for the millicode call.
3004 (define_expand "mulsi3"
3005 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3006 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3007 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3008 (clobber (match_dup 3))
3009 (clobber (reg:SI 26))
3010 (clobber (reg:SI 25))
3011 (clobber (reg:SI 31))])
3012 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3016 if (TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3018 rtx scratch = gen_reg_rtx (DImode);
3019 operands[1] = force_reg (SImode, operands[1]);
3020 operands[2] = force_reg (SImode, operands[2]);
3021 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3022 emit_insn (gen_rtx_SET (VOIDmode,
3024 gen_rtx_SUBREG (SImode, scratch, 1)));
3027 operands[3] = gen_reg_rtx (SImode);
3030 (define_insn "umulsidi3"
3031 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3032 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3033 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3034 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3036 [(set_attr "type" "fpmuldbl")
3037 (set_attr "length" "4")])
3040 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3041 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3042 (match_operand:DI 2 "uint32_operand" "f")))]
3043 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3045 [(set_attr "type" "fpmuldbl")
3046 (set_attr "length" "4")])
3049 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3050 (clobber (match_operand:SI 0 "register_operand" "=a"))
3051 (clobber (reg:SI 26))
3052 (clobber (reg:SI 25))
3053 (clobber (reg:SI 31))]
3055 "* return output_mul_insn (0, insn);"
3056 [(set_attr "type" "milli")
3057 (set (attr "length")
3059 ;; Target (or stub) within reach
3060 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3062 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3067 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3071 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3072 ;; same as NO_SPACE_REGS code
3073 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3075 (eq (symbol_ref "flag_pic")
3079 ;; Out of range and either PIC or PORTABLE_RUNTIME
3082 ;;; Division and mod.
3083 (define_expand "divsi3"
3084 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3085 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3086 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3087 (clobber (match_dup 3))
3088 (clobber (reg:SI 26))
3089 (clobber (reg:SI 25))
3090 (clobber (reg:SI 31))])
3091 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3095 operands[3] = gen_reg_rtx (SImode);
3096 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3102 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3103 (clobber (match_operand:SI 1 "register_operand" "=a"))
3104 (clobber (reg:SI 26))
3105 (clobber (reg:SI 25))
3106 (clobber (reg:SI 31))]
3109 return output_div_insn (operands, 0, insn);"
3110 [(set_attr "type" "milli")
3111 (set (attr "length")
3113 ;; Target (or stub) within reach
3114 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3116 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3121 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3125 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3126 ;; same as NO_SPACE_REGS code
3127 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3129 (eq (symbol_ref "flag_pic")
3133 ;; Out of range and either PIC or PORTABLE_RUNTIME
3136 (define_expand "udivsi3"
3137 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3138 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3139 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3140 (clobber (match_dup 3))
3141 (clobber (reg:SI 26))
3142 (clobber (reg:SI 25))
3143 (clobber (reg:SI 31))])
3144 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3148 operands[3] = gen_reg_rtx (SImode);
3149 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3155 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3156 (clobber (match_operand:SI 1 "register_operand" "=a"))
3157 (clobber (reg:SI 26))
3158 (clobber (reg:SI 25))
3159 (clobber (reg:SI 31))]
3162 return output_div_insn (operands, 1, insn);"
3163 [(set_attr "type" "milli")
3164 (set (attr "length")
3166 ;; Target (or stub) within reach
3167 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3169 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3174 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3178 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3179 ;; same as NO_SPACE_REGS code
3180 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3182 (eq (symbol_ref "flag_pic")
3186 ;; Out of range and either PIC or PORTABLE_RUNTIME
3189 (define_expand "modsi3"
3190 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3191 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3192 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3193 (clobber (match_dup 3))
3194 (clobber (reg:SI 26))
3195 (clobber (reg:SI 25))
3196 (clobber (reg:SI 31))])
3197 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3201 operands[3] = gen_reg_rtx (SImode);
3205 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3206 (clobber (match_operand:SI 0 "register_operand" "=a"))
3207 (clobber (reg:SI 26))
3208 (clobber (reg:SI 25))
3209 (clobber (reg:SI 31))]
3212 return output_mod_insn (0, insn);"
3213 [(set_attr "type" "milli")
3214 (set (attr "length")
3216 ;; Target (or stub) within reach
3217 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3219 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3224 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3228 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3229 ;; same as NO_SPACE_REGS code
3230 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3232 (eq (symbol_ref "flag_pic")
3236 ;; Out of range and either PIC or PORTABLE_RUNTIME
3239 (define_expand "umodsi3"
3240 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3241 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3242 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3243 (clobber (match_dup 3))
3244 (clobber (reg:SI 26))
3245 (clobber (reg:SI 25))
3246 (clobber (reg:SI 31))])
3247 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3251 operands[3] = gen_reg_rtx (SImode);
3255 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3256 (clobber (match_operand:SI 0 "register_operand" "=a"))
3257 (clobber (reg:SI 26))
3258 (clobber (reg:SI 25))
3259 (clobber (reg:SI 31))]
3262 return output_mod_insn (1, insn);"
3263 [(set_attr "type" "milli")
3264 (set (attr "length")
3266 ;; Target (or stub) within reach
3267 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3269 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3274 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3278 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3279 ;; same as NO_SPACE_REGS code
3280 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3282 (eq (symbol_ref "flag_pic")
3286 ;; Out of range and either PIC or PORTABLE_RUNTIME
3289 ;;- and instructions
3290 ;; We define DImode `and` so with DImode `not` we can get
3291 ;; DImode `andn`. Other combinations are possible.
3293 (define_expand "anddi3"
3294 [(set (match_operand:DI 0 "register_operand" "")
3295 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3296 (match_operand:DI 2 "arith_double_operand" "")))]
3300 if (! register_operand (operands[1], DImode)
3301 || ! register_operand (operands[2], DImode))
3302 /* Let GCC break this into word-at-a-time operations. */
3307 [(set (match_operand:DI 0 "register_operand" "=r")
3308 (and:DI (match_operand:DI 1 "register_operand" "%r")
3309 (match_operand:DI 2 "register_operand" "r")))]
3311 "and %1,%2,%0\;and %R1,%R2,%R0"
3312 [(set_attr "type" "binary")
3313 (set_attr "length" "8")])
3315 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3316 ; constant with ldil;ldo.
3317 (define_insn "andsi3"
3318 [(set (match_operand:SI 0 "register_operand" "=r,r")
3319 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3320 (match_operand:SI 2 "and_operand" "rO,P")))]
3322 "* return output_and (operands); "
3323 [(set_attr "type" "binary,shift")
3324 (set_attr "length" "4,4")])
3327 [(set (match_operand:DI 0 "register_operand" "=r")
3328 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3329 (match_operand:DI 2 "register_operand" "r")))]
3331 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3332 [(set_attr "type" "binary")
3333 (set_attr "length" "8")])
3336 [(set (match_operand:SI 0 "register_operand" "=r")
3337 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3338 (match_operand:SI 2 "register_operand" "r")))]
3341 [(set_attr "type" "binary")
3342 (set_attr "length" "4")])
3344 (define_expand "iordi3"
3345 [(set (match_operand:DI 0 "register_operand" "")
3346 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3347 (match_operand:DI 2 "arith_double_operand" "")))]
3351 if (! register_operand (operands[1], DImode)
3352 || ! register_operand (operands[2], DImode))
3353 /* Let GCC break this into word-at-a-time operations. */
3358 [(set (match_operand:DI 0 "register_operand" "=r")
3359 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3360 (match_operand:DI 2 "register_operand" "r")))]
3362 "or %1,%2,%0\;or %R1,%R2,%R0"
3363 [(set_attr "type" "binary")
3364 (set_attr "length" "8")])
3366 ;; Need a define_expand because we've run out of CONST_OK... characters.
3367 (define_expand "iorsi3"
3368 [(set (match_operand:SI 0 "register_operand" "")
3369 (ior:SI (match_operand:SI 1 "register_operand" "")
3370 (match_operand:SI 2 "arith32_operand" "")))]
3374 if (! (ior_operand (operands[2], SImode)
3375 || register_operand (operands[2], SImode)))
3376 operands[2] = force_reg (SImode, operands[2]);
3380 [(set (match_operand:SI 0 "register_operand" "=r,r")
3381 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3382 (match_operand:SI 2 "ior_operand" "M,i")))]
3384 "* return output_ior (operands); "
3385 [(set_attr "type" "binary,shift")
3386 (set_attr "length" "4,4")])
3389 [(set (match_operand:SI 0 "register_operand" "=r")
3390 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3391 (match_operand:SI 2 "register_operand" "r")))]
3394 [(set_attr "type" "binary")
3395 (set_attr "length" "4")])
3397 (define_expand "xordi3"
3398 [(set (match_operand:DI 0 "register_operand" "")
3399 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3400 (match_operand:DI 2 "arith_double_operand" "")))]
3404 if (! register_operand (operands[1], DImode)
3405 || ! register_operand (operands[2], DImode))
3406 /* Let GCC break this into word-at-a-time operations. */
3411 [(set (match_operand:DI 0 "register_operand" "=r")
3412 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3413 (match_operand:DI 2 "register_operand" "r")))]
3415 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3416 [(set_attr "type" "binary")
3417 (set_attr "length" "8")])
3419 (define_insn "xorsi3"
3420 [(set (match_operand:SI 0 "register_operand" "=r")
3421 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3422 (match_operand:SI 2 "register_operand" "r")))]
3425 [(set_attr "type" "binary")
3426 (set_attr "length" "4")])
3428 (define_insn "negdi2"
3429 [(set (match_operand:DI 0 "register_operand" "=r")
3430 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3432 "sub 0,%R1,%R0\;subb 0,%1,%0"
3433 [(set_attr "type" "unary")
3434 (set_attr "length" "8")])
3436 (define_insn "negsi2"
3437 [(set (match_operand:SI 0 "register_operand" "=r")
3438 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3441 [(set_attr "type" "unary")
3442 (set_attr "length" "4")])
3444 (define_expand "one_cmpldi2"
3445 [(set (match_operand:DI 0 "register_operand" "")
3446 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3450 if (! register_operand (operands[1], DImode))
3455 [(set (match_operand:DI 0 "register_operand" "=r")
3456 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3458 "uaddcm 0,%1,%0\;uaddcm 0,%R1,%R0"
3459 [(set_attr "type" "unary")
3460 (set_attr "length" "8")])
3462 (define_insn "one_cmplsi2"
3463 [(set (match_operand:SI 0 "register_operand" "=r")
3464 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3467 [(set_attr "type" "unary")
3468 (set_attr "length" "4")])
3470 ;; Floating point arithmetic instructions.
3472 (define_insn "adddf3"
3473 [(set (match_operand:DF 0 "register_operand" "=f")
3474 (plus:DF (match_operand:DF 1 "register_operand" "f")
3475 (match_operand:DF 2 "register_operand" "f")))]
3476 "! TARGET_SOFT_FLOAT"
3478 [(set_attr "type" "fpalu")
3479 (set_attr "pa_combine_type" "faddsub")
3480 (set_attr "length" "4")])
3482 (define_insn "addsf3"
3483 [(set (match_operand:SF 0 "register_operand" "=f")
3484 (plus:SF (match_operand:SF 1 "register_operand" "f")
3485 (match_operand:SF 2 "register_operand" "f")))]
3486 "! TARGET_SOFT_FLOAT"
3488 [(set_attr "type" "fpalu")
3489 (set_attr "pa_combine_type" "faddsub")
3490 (set_attr "length" "4")])
3492 (define_insn "subdf3"
3493 [(set (match_operand:DF 0 "register_operand" "=f")
3494 (minus:DF (match_operand:DF 1 "register_operand" "f")
3495 (match_operand:DF 2 "register_operand" "f")))]
3496 "! TARGET_SOFT_FLOAT"
3498 [(set_attr "type" "fpalu")
3499 (set_attr "pa_combine_type" "faddsub")
3500 (set_attr "length" "4")])
3502 (define_insn "subsf3"
3503 [(set (match_operand:SF 0 "register_operand" "=f")
3504 (minus:SF (match_operand:SF 1 "register_operand" "f")
3505 (match_operand:SF 2 "register_operand" "f")))]
3506 "! TARGET_SOFT_FLOAT"
3508 [(set_attr "type" "fpalu")
3509 (set_attr "pa_combine_type" "faddsub")
3510 (set_attr "length" "4")])
3512 (define_insn "muldf3"
3513 [(set (match_operand:DF 0 "register_operand" "=f")
3514 (mult:DF (match_operand:DF 1 "register_operand" "f")
3515 (match_operand:DF 2 "register_operand" "f")))]
3516 "! TARGET_SOFT_FLOAT"
3518 [(set_attr "type" "fpmuldbl")
3519 (set_attr "pa_combine_type" "fmpy")
3520 (set_attr "length" "4")])
3522 (define_insn "mulsf3"
3523 [(set (match_operand:SF 0 "register_operand" "=f")
3524 (mult:SF (match_operand:SF 1 "register_operand" "f")
3525 (match_operand:SF 2 "register_operand" "f")))]
3526 "! TARGET_SOFT_FLOAT"
3528 [(set_attr "type" "fpmulsgl")
3529 (set_attr "pa_combine_type" "fmpy")
3530 (set_attr "length" "4")])
3532 (define_insn "divdf3"
3533 [(set (match_operand:DF 0 "register_operand" "=f")
3534 (div:DF (match_operand:DF 1 "register_operand" "f")
3535 (match_operand:DF 2 "register_operand" "f")))]
3536 "! TARGET_SOFT_FLOAT"
3538 [(set_attr "type" "fpdivdbl")
3539 (set_attr "length" "4")])
3541 (define_insn "divsf3"
3542 [(set (match_operand:SF 0 "register_operand" "=f")
3543 (div:SF (match_operand:SF 1 "register_operand" "f")
3544 (match_operand:SF 2 "register_operand" "f")))]
3545 "! TARGET_SOFT_FLOAT"
3547 [(set_attr "type" "fpdivsgl")
3548 (set_attr "length" "4")])
3550 (define_insn "negdf2"
3551 [(set (match_operand:DF 0 "register_operand" "=f")
3552 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3553 "! TARGET_SOFT_FLOAT"
3555 [(set_attr "type" "fpalu")
3556 (set_attr "length" "4")])
3558 (define_insn "negsf2"
3559 [(set (match_operand:SF 0 "register_operand" "=f")
3560 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3561 "! TARGET_SOFT_FLOAT"
3563 [(set_attr "type" "fpalu")
3564 (set_attr "length" "4")])
3566 (define_insn "absdf2"
3567 [(set (match_operand:DF 0 "register_operand" "=f")
3568 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3569 "! TARGET_SOFT_FLOAT"
3571 [(set_attr "type" "fpalu")
3572 (set_attr "length" "4")])
3574 (define_insn "abssf2"
3575 [(set (match_operand:SF 0 "register_operand" "=f")
3576 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3577 "! TARGET_SOFT_FLOAT"
3579 [(set_attr "type" "fpalu")
3580 (set_attr "length" "4")])
3582 (define_insn "sqrtdf2"
3583 [(set (match_operand:DF 0 "register_operand" "=f")
3584 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3585 "! TARGET_SOFT_FLOAT"
3587 [(set_attr "type" "fpsqrtdbl")
3588 (set_attr "length" "4")])
3590 (define_insn "sqrtsf2"
3591 [(set (match_operand:SF 0 "register_operand" "=f")
3592 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3593 "! TARGET_SOFT_FLOAT"
3595 [(set_attr "type" "fpsqrtsgl")
3596 (set_attr "length" "4")])
3598 ;;- Shift instructions
3600 ;; Optimized special case of shifting.
3603 [(set (match_operand:SI 0 "register_operand" "=r")
3604 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3608 [(set_attr "type" "load")
3609 (set_attr "length" "4")])
3612 [(set (match_operand:SI 0 "register_operand" "=r")
3613 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3617 [(set_attr "type" "load")
3618 (set_attr "length" "4")])
3621 [(set (match_operand:SI 0 "register_operand" "=r")
3622 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3623 (match_operand:SI 3 "shadd_operand" ""))
3624 (match_operand:SI 1 "register_operand" "r")))]
3626 "sh%O3addl %2,%1,%0"
3627 [(set_attr "type" "binary")
3628 (set_attr "length" "4")])
3630 ;; This variant of the above insn can occur if the first operand
3631 ;; is the frame pointer. This is a kludge, but there doesn't
3632 ;; seem to be a way around it. Only recognize it while reloading.
3633 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3634 ;; has constraints allowing a register. I don't know how this works,
3635 ;; but it somehow makes sure that out-of-range constants are placed
3636 ;; in a register which somehow magically is a "const_int_operand".
3637 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3640 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3641 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3642 (match_operand:SI 4 "shadd_operand" ""))
3643 (match_operand:SI 1 "register_operand" "r,r"))
3644 (match_operand:SI 3 "const_int_operand" "r,J")))]
3645 "reload_in_progress"
3647 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3648 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3649 [(set_attr "type" "multi")
3650 (set_attr "length" "8")])
3652 (define_expand "ashlsi3"
3653 [(set (match_operand:SI 0 "register_operand" "")
3654 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3655 (match_operand:SI 2 "arith32_operand" "")))]
3659 if (GET_CODE (operands[2]) != CONST_INT)
3661 rtx temp = gen_reg_rtx (SImode);
3662 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3663 if (GET_CODE (operands[1]) == CONST_INT)
3664 emit_insn (gen_zvdep_imm (operands[0], operands[1], temp));
3666 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3669 /* Make sure both inputs are not constants,
3670 there are no patterns for that. */
3671 operands[1] = force_reg (SImode, operands[1]);
3675 [(set (match_operand:SI 0 "register_operand" "=r")
3676 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3677 (match_operand:SI 2 "const_int_operand" "n")))]
3679 "zdep %1,%P2,%L2,%0"
3680 [(set_attr "type" "shift")
3681 (set_attr "length" "4")])
3683 ; Match cases of op1 a CONST_INT here that zvdep_imm doesn't handle.
3684 ; Doing it like this makes slightly better code since reload can
3685 ; replace a register with a known value in range -16..15 with a
3686 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm,
3687 ; but since we have no more CONST_OK... characters, that is not
3689 (define_insn "zvdep32"
3690 [(set (match_operand:SI 0 "register_operand" "=r,r")
3691 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3692 (minus:SI (const_int 31)
3693 (match_operand:SI 2 "register_operand" "q,q"))))]
3698 [(set_attr "type" "shift,shift")
3699 (set_attr "length" "4,4")])
3701 (define_insn "zvdep_imm"
3702 [(set (match_operand:SI 0 "register_operand" "=r")
3703 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3704 (minus:SI (const_int 31)
3705 (match_operand:SI 2 "register_operand" "q"))))]
3709 int x = INTVAL (operands[1]);
3710 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3711 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3712 return \"zvdepi %1,%2,%0\";
3714 [(set_attr "type" "shift")
3715 (set_attr "length" "4")])
3717 (define_insn "vdepi_ior"
3718 [(set (match_operand:SI 0 "register_operand" "=r")
3719 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3720 (minus:SI (const_int 31)
3721 (match_operand:SI 2 "register_operand" "q")))
3722 (match_operand:SI 3 "register_operand" "0")))]
3723 ; accept ...0001...1, can this be generalized?
3724 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3727 int x = INTVAL (operands[1]);
3728 operands[2] = GEN_INT (exact_log2 (x + 1));
3729 return \"vdepi -1,%2,%0\";
3731 [(set_attr "type" "shift")
3732 (set_attr "length" "4")])
3734 (define_insn "vdepi_and"
3735 [(set (match_operand:SI 0 "register_operand" "=r")
3736 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3737 (minus:SI (const_int 31)
3738 (match_operand:SI 2 "register_operand" "q")))
3739 (match_operand:SI 3 "register_operand" "0")))]
3740 ; this can be generalized...!
3741 "INTVAL (operands[1]) == -2"
3744 int x = INTVAL (operands[1]);
3745 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3746 return \"vdepi 0,%2,%0\";
3748 [(set_attr "type" "shift")
3749 (set_attr "length" "4")])
3751 (define_expand "ashrsi3"
3752 [(set (match_operand:SI 0 "register_operand" "")
3753 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3754 (match_operand:SI 2 "arith32_operand" "")))]
3758 if (GET_CODE (operands[2]) != CONST_INT)
3760 rtx temp = gen_reg_rtx (SImode);
3761 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3762 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3768 [(set (match_operand:SI 0 "register_operand" "=r")
3769 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3770 (match_operand:SI 2 "const_int_operand" "n")))]
3772 "extrs %1,%P2,%L2,%0"
3773 [(set_attr "type" "shift")
3774 (set_attr "length" "4")])
3776 (define_insn "vextrs32"
3777 [(set (match_operand:SI 0 "register_operand" "=r")
3778 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3779 (minus:SI (const_int 31)
3780 (match_operand:SI 2 "register_operand" "q"))))]
3783 [(set_attr "type" "shift")
3784 (set_attr "length" "4")])
3786 (define_insn "lshrsi3"
3787 [(set (match_operand:SI 0 "register_operand" "=r,r")
3788 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
3789 (match_operand:SI 2 "arith32_operand" "q,n")))]
3793 extru %1,%P2,%L2,%0"
3794 [(set_attr "type" "shift")
3795 (set_attr "length" "4")])
3797 (define_insn "rotrsi3"
3798 [(set (match_operand:SI 0 "register_operand" "=r,r")
3799 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
3800 (match_operand:SI 2 "arith32_operand" "q,n")))]
3804 if (GET_CODE (operands[2]) == CONST_INT)
3806 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
3807 return \"shd %1,%1,%2,%0\";
3810 return \"vshd %1,%1,%0\";
3812 [(set_attr "type" "shift")
3813 (set_attr "length" "4")])
3816 [(set (match_operand:SI 0 "register_operand" "=r")
3817 (match_operator:SI 5 "plus_xor_ior_operator"
3818 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
3819 (match_operand:SI 3 "const_int_operand" "n"))
3820 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3821 (match_operand:SI 4 "const_int_operand" "n"))]))]
3822 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3824 [(set_attr "type" "shift")
3825 (set_attr "length" "4")])
3828 [(set (match_operand:SI 0 "register_operand" "=r")
3829 (match_operator:SI 5 "plus_xor_ior_operator"
3830 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3831 (match_operand:SI 4 "const_int_operand" "n"))
3832 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3833 (match_operand:SI 3 "const_int_operand" "n"))]))]
3834 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3836 [(set_attr "type" "shift")
3837 (set_attr "length" "4")])
3840 [(set (match_operand:SI 0 "register_operand" "=r")
3841 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
3842 (match_operand:SI 2 "const_int_operand" ""))
3843 (match_operand:SI 3 "const_int_operand" "")))]
3844 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
3847 int cnt = INTVAL (operands[2]) & 31;
3848 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
3849 operands[2] = GEN_INT (31 - cnt);
3850 return \"zdep %1,%2,%3,%0\";
3852 [(set_attr "type" "shift")
3853 (set_attr "length" "4")])
3855 ;; Unconditional and other jump instructions.
3857 (define_insn "return"
3859 "hppa_can_use_return_insn_p ()"
3861 [(set_attr "type" "branch")
3862 (set_attr "length" "4")])
3864 ;; Use a different pattern for functions which have non-trivial
3865 ;; epilogues so as not to confuse jump and reorg.
3866 (define_insn "return_internal"
3871 [(set_attr "type" "branch")
3872 (set_attr "length" "4")])
3874 (define_expand "prologue"
3877 "hppa_expand_prologue ();DONE;")
3879 (define_expand "epilogue"
3884 /* Try to use the trivial return first. Else use the full
3886 if (hppa_can_use_return_insn_p ())
3887 emit_jump_insn (gen_return ());
3890 hppa_expand_epilogue ();
3891 emit_jump_insn (gen_return_internal ());
3896 ;; Special because we use the value placed in %r2 by the bl instruction
3897 ;; from within its delay slot to set the value for the 2nd parameter to
3899 (define_insn "call_profiler"
3900 [(unspec_volatile [(const_int 0)] 0)
3901 (use (match_operand:SI 0 "const_int_operand" ""))]
3903 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
3904 [(set_attr "type" "multi")
3905 (set_attr "length" "8")])
3907 (define_insn "blockage"
3908 [(unspec_volatile [(const_int 2)] 0)]
3911 [(set_attr "length" "0")])
3914 [(set (pc) (label_ref (match_operand 0 "" "")))]
3918 extern int optimize;
3920 if (GET_MODE (insn) == SImode)
3921 return \"bl %l0,0%#\";
3923 /* An unconditional branch which can reach its target. */
3924 if (get_attr_length (insn) != 24
3925 && get_attr_length (insn) != 16)
3926 return \"bl%* %l0,0\";
3928 /* An unconditional branch which can not reach its target.
3930 We need to be able to use %r1 as a scratch register; however,
3931 we can never be sure whether or not it's got a live value in
3932 it. Therefore, we must restore its original value after the
3935 To make matters worse, we don't have a stack slot which we
3936 can always clobber. sp-12/sp-16 shouldn't ever have a live
3937 value during a non-optimizing compilation, so we use those
3938 slots for now. We don't support very long branches when
3939 optimizing -- they should be quite rare when optimizing.
3941 Really the way to go long term is a register scavenger; goto
3942 the target of the jump and find a register which we can use
3943 as a scratch to hold the value in %r1. */
3945 /* We don't know how to register scavenge yet. */
3949 /* First store %r1 into the stack. */
3950 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
3952 /* Now load the target address into %r1 and do an indirect jump
3953 to the value specified in %r1. Be careful to generate PIC
3958 xoperands[0] = operands[0];
3959 xoperands[1] = gen_label_rtx ();
3961 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
3962 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
3963 CODE_LABEL_NUMBER (xoperands[1]));
3964 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv 0(%%r1)\",
3968 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
3970 /* And restore the value of %r1 in the delay slot. We're not optimizing,
3971 so we know nothing else can be in the delay slot. */
3972 return \"ldw -16(%%r30),%%r1\";
3974 [(set_attr "type" "uncond_branch")
3975 (set_attr "pa_combine_type" "uncond_branch")
3976 (set (attr "length")
3977 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
3978 (if_then_else (lt (abs (minus (match_dup 0)
3979 (plus (pc) (const_int 8))))
3983 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
3985 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
3990 ;; Subroutines of "casesi".
3991 ;; operand 0 is index
3992 ;; operand 1 is the minimum bound
3993 ;; operand 2 is the maximum bound - minimum bound + 1
3994 ;; operand 3 is CODE_LABEL for the table;
3995 ;; operand 4 is the CODE_LABEL to go to if index out of range.
3997 (define_expand "casesi"
3998 [(match_operand:SI 0 "general_operand" "")
3999 (match_operand:SI 1 "const_int_operand" "")
4000 (match_operand:SI 2 "const_int_operand" "")
4001 (match_operand 3 "" "")
4002 (match_operand 4 "" "")]
4006 if (GET_CODE (operands[0]) != REG)
4007 operands[0] = force_reg (SImode, operands[0]);
4009 if (operands[1] != const0_rtx)
4011 rtx reg = gen_reg_rtx (SImode);
4013 operands[1] = GEN_INT (-INTVAL (operands[1]));
4014 if (!INT_14_BITS (operands[1]))
4015 operands[1] = force_reg (SImode, operands[1]);
4016 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4021 if (!INT_5_BITS (operands[2]))
4022 operands[2] = force_reg (SImode, operands[2]);
4024 emit_insn (gen_cmpsi (operands[0], operands[2]));
4025 emit_jump_insn (gen_bgtu (operands[4]));
4026 if (TARGET_BIG_SWITCH)
4028 rtx temp = gen_reg_rtx (SImode);
4029 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4032 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4036 (define_insn "casesi0"
4038 (mem:SI (plus:SI (pc)
4039 (match_operand:SI 0 "register_operand" "r")))
4040 (label_ref (match_operand 1 "" ""))))]
4043 [(set_attr "type" "multi")
4044 (set_attr "length" "8")])
4046 ;; Need nops for the calls because execution is supposed to continue
4047 ;; past; we don't want to nullify an instruction that we need.
4048 ;;- jump to subroutine
4050 (define_expand "call"
4051 [(parallel [(call (match_operand:SI 0 "" "")
4052 (match_operand 1 "" ""))
4053 (clobber (reg:SI 2))])]
4060 if (TARGET_PORTABLE_RUNTIME)
4061 op = force_reg (SImode, XEXP (operands[0], 0));
4063 op = XEXP (operands[0], 0);
4065 /* Use two different patterns for calls to explicitly named functions
4066 and calls through function pointers. This is necessary as these two
4067 types of calls use different calling conventions, and CSE might try
4068 to change the named call into an indirect call in some cases (using
4069 two patterns keeps CSE from performing this optimization). */
4070 if (GET_CODE (op) == SYMBOL_REF)
4071 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4074 rtx tmpreg = gen_rtx_REG (SImode, 22);
4075 emit_move_insn (tmpreg, force_reg (SImode, op));
4076 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4081 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4083 /* After each call we must restore the PIC register, even if it
4084 doesn't appear to be used.
4086 This will set regs_ever_live for the callee saved register we
4087 stored the PIC register in. */
4088 emit_move_insn (pic_offset_table_rtx,
4089 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4090 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4092 /* Gross. We have to keep the scheduler from moving the restore
4093 of the PIC register away from the call. SCHED_GROUP_P is
4094 supposed to do this, but for some reason the compiler will
4095 go into an infinite loop when we use that.
4097 This method (blockage insn) may make worse code (then again
4098 it may not since calls are nearly blockages anyway), but at
4099 least it should work. */
4100 emit_insn (gen_blockage ());
4105 (define_insn "call_internal_symref"
4106 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4107 (match_operand 1 "" "i"))
4108 (clobber (reg:SI 2))
4109 (use (const_int 0))]
4110 "! TARGET_PORTABLE_RUNTIME"
4113 output_arg_descriptor (insn);
4114 return output_call (insn, operands[0], gen_rtx_REG (SImode, 2));
4116 [(set_attr "type" "call")
4117 (set (attr "length")
4118 ;; If we're sure that we can either reach the target or that the
4119 ;; linker can use a long-branch stub, then the length is 4 bytes.
4121 ;; For long-calls the length will be either 52 bytes (non-pic)
4122 ;; or 68 bytes (pic). */
4123 ;; Else we have to use a long-call;
4124 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4127 (if_then_else (eq (symbol_ref "flag_pic")
4132 (define_insn "call_internal_reg"
4133 [(call (mem:SI (reg:SI 22))
4134 (match_operand 0 "" "i"))
4135 (clobber (reg:SI 2))
4136 (use (const_int 1))]
4142 /* First the special case for kernels, level 0 systems, etc. */
4143 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4144 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4146 /* Now the normal case -- we can reach $$dyncall directly or
4147 we're sure that we can get there via a long-branch stub.
4149 No need to check target flags as the length uniquely identifies
4150 the remaining cases. */
4151 if (get_attr_length (insn) == 8)
4152 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4154 /* Long millicode call, but we are not generating PIC or portable runtime
4156 if (get_attr_length (insn) == 12)
4157 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4159 /* Long millicode call for portable runtime. */
4160 if (get_attr_length (insn) == 20)
4161 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n 0(%%r31)\;nop\";
4163 /* If we're generating PIC code. */
4164 xoperands[0] = operands[0];
4165 xoperands[1] = gen_label_rtx ();
4166 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4167 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4168 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4169 CODE_LABEL_NUMBER (xoperands[1]));
4170 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4171 output_asm_insn (\"blr 0,%%r2\", xoperands);
4172 output_asm_insn (\"bv,n 0(%%r1)\\n\\tnop\", xoperands);
4175 [(set_attr "type" "dyncall")
4176 (set (attr "length")
4178 ;; First NO_SPACE_REGS
4179 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4183 ;; Target (or stub) within reach
4184 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4186 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4190 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4191 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4193 (eq (symbol_ref "flag_pic")
4197 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4201 ;; Out of range PIC case
4204 (define_expand "call_value"
4205 [(parallel [(set (match_operand 0 "" "")
4206 (call (match_operand:SI 1 "" "")
4207 (match_operand 2 "" "")))
4208 (clobber (reg:SI 2))])]
4215 if (TARGET_PORTABLE_RUNTIME)
4216 op = force_reg (SImode, XEXP (operands[1], 0));
4218 op = XEXP (operands[1], 0);
4220 /* Use two different patterns for calls to explicitly named functions
4221 and calls through function pointers. This is necessary as these two
4222 types of calls use different calling conventions, and CSE might try
4223 to change the named call into an indirect call in some cases (using
4224 two patterns keeps CSE from performing this optimization). */
4225 if (GET_CODE (op) == SYMBOL_REF)
4226 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4231 rtx tmpreg = gen_rtx_REG (SImode, 22);
4232 emit_move_insn (tmpreg, force_reg (SImode, op));
4233 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4238 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4240 /* After each call we must restore the PIC register, even if it
4241 doesn't appear to be used.
4243 This will set regs_ever_live for the callee saved register we
4244 stored the PIC register in. */
4245 emit_move_insn (pic_offset_table_rtx,
4246 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4247 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4249 /* Gross. We have to keep the scheduler from moving the restore
4250 of the PIC register away from the call. SCHED_GROUP_P is
4251 supposed to do this, but for some reason the compiler will
4252 go into an infinite loop when we use that.
4254 This method (blockage insn) may make worse code (then again
4255 it may not since calls are nearly blockages anyway), but at
4256 least it should work. */
4257 emit_insn (gen_blockage ());
4262 (define_insn "call_value_internal_symref"
4263 [(set (match_operand 0 "" "=rf")
4264 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4265 (match_operand 2 "" "i")))
4266 (clobber (reg:SI 2))
4267 (use (const_int 0))]
4268 ;;- Don't use operand 1 for most machines.
4269 "! TARGET_PORTABLE_RUNTIME"
4272 output_arg_descriptor (insn);
4273 return output_call (insn, operands[1], gen_rtx_REG (SImode, 2));
4275 [(set_attr "type" "call")
4276 (set (attr "length")
4277 ;; If we're sure that we can either reach the target or that the
4278 ;; linker can use a long-branch stub, then the length is 4 bytes.
4280 ;; For long-calls the length will be either 52 bytes (non-pic)
4281 ;; or 68 bytes (pic). */
4282 ;; Else we have to use a long-call;
4283 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4286 (if_then_else (eq (symbol_ref "flag_pic")
4291 (define_insn "call_value_internal_reg"
4292 [(set (match_operand 0 "" "=rf")
4293 (call (mem:SI (reg:SI 22))
4294 (match_operand 1 "" "i")))
4295 (clobber (reg:SI 2))
4296 (use (const_int 1))]
4302 /* First the special case for kernels, level 0 systems, etc. */
4303 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4304 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4306 /* Now the normal case -- we can reach $$dyncall directly or
4307 we're sure that we can get there via a long-branch stub.
4309 No need to check target flags as the length uniquely identifies
4310 the remaining cases. */
4311 if (get_attr_length (insn) == 8)
4312 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4314 /* Long millicode call, but we are not generating PIC or portable runtime
4316 if (get_attr_length (insn) == 12)
4317 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4319 /* Long millicode call for portable runtime. */
4320 if (get_attr_length (insn) == 20)
4321 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n 0(%%r31)\;nop\";
4323 /* If we're generating PIC code. */
4324 xoperands[0] = operands[1];
4325 xoperands[1] = gen_label_rtx ();
4326 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4327 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4328 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4329 CODE_LABEL_NUMBER (xoperands[1]));
4330 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4331 output_asm_insn (\"blr 0,%%r2\", xoperands);
4332 output_asm_insn (\"bv,n 0(%%r1)\\n\\tnop\", xoperands);
4335 [(set_attr "type" "dyncall")
4336 (set (attr "length")
4338 ;; First NO_SPACE_REGS
4339 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4343 ;; Target (or stub) within reach
4344 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4346 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4350 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4351 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4353 (eq (symbol_ref "flag_pic")
4357 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4361 ;; Out of range PIC case
4364 ;; Call subroutine returning any type.
4366 (define_expand "untyped_call"
4367 [(parallel [(call (match_operand 0 "" "")
4369 (match_operand 1 "" "")
4370 (match_operand 2 "" "")])]
4376 emit_call_insn (gen_call (operands[0], const0_rtx));
4378 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4380 rtx set = XVECEXP (operands[2], 0, i);
4381 emit_move_insn (SET_DEST (set), SET_SRC (set));
4384 /* The optimizer does not know that the call sets the function value
4385 registers we stored in the result block. We avoid problems by
4386 claiming that all hard registers are used and clobbered at this
4388 emit_insn (gen_blockage ());
4396 [(set_attr "type" "move")
4397 (set_attr "length" "4")])
4399 ;; These are just placeholders so we know where branch tables
4401 (define_insn "begin_brtab"
4406 /* Only GAS actually supports this pseudo-op. */
4408 return \".begin_brtab\";
4412 [(set_attr "type" "move")
4413 (set_attr "length" "0")])
4415 (define_insn "end_brtab"
4420 /* Only GAS actually supports this pseudo-op. */
4422 return \".end_brtab\";
4426 [(set_attr "type" "move")
4427 (set_attr "length" "0")])
4429 ;;; Hope this is only within a function...
4430 (define_insn "indirect_jump"
4431 [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
4434 [(set_attr "type" "branch")
4435 (set_attr "length" "4")])
4437 (define_insn "extzv"
4438 [(set (match_operand:SI 0 "register_operand" "=r")
4439 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4440 (match_operand:SI 2 "uint5_operand" "")
4441 (match_operand:SI 3 "uint5_operand" "")))]
4443 "extru %1,%3+%2-1,%2,%0"
4444 [(set_attr "type" "shift")
4445 (set_attr "length" "4")])
4448 [(set (match_operand:SI 0 "register_operand" "=r")
4449 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4451 (match_operand:SI 3 "register_operand" "q")))]
4454 [(set_attr "type" "shift")
4455 (set_attr "length" "4")])
4458 [(set (match_operand:SI 0 "register_operand" "=r")
4459 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4460 (match_operand:SI 2 "uint5_operand" "")
4461 (match_operand:SI 3 "uint5_operand" "")))]
4463 "extrs %1,%3+%2-1,%2,%0"
4464 [(set_attr "type" "shift")
4465 (set_attr "length" "4")])
4468 [(set (match_operand:SI 0 "register_operand" "=r")
4469 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4471 (match_operand:SI 3 "register_operand" "q")))]
4474 [(set_attr "type" "shift")
4475 (set_attr "length" "4")])
4478 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4479 (match_operand:SI 1 "uint5_operand" "")
4480 (match_operand:SI 2 "uint5_operand" ""))
4481 (match_operand:SI 3 "arith5_operand" "r,L"))]
4484 dep %3,%2+%1-1,%1,%0
4485 depi %3,%2+%1-1,%1,%0"
4486 [(set_attr "type" "shift,shift")
4487 (set_attr "length" "4,4")])
4489 ;; Optimize insertion of const_int values of type 1...1xxxx.
4491 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4492 (match_operand:SI 1 "uint5_operand" "")
4493 (match_operand:SI 2 "uint5_operand" ""))
4494 (match_operand:SI 3 "const_int_operand" ""))]
4495 "(INTVAL (operands[3]) & 0x10) != 0 &&
4496 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4499 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4500 return \"depi %3,%2+%1-1,%1,%0\";
4502 [(set_attr "type" "shift")
4503 (set_attr "length" "4")])
4505 ;; This insn is used for some loop tests, typically loops reversed when
4506 ;; strength reduction is used. It is actually created when the instruction
4507 ;; combination phase combines the special loop test. Since this insn
4508 ;; is both a jump insn and has an output, it must deal with its own
4509 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4510 ;; to not choose the register alternatives in the event a reload is needed.
4511 (define_insn "decrement_and_branch_until_zero"
4514 (match_operator 2 "comparison_operator"
4515 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4516 (match_operand:SI 1 "int5_operand" "L,L,L"))
4518 (label_ref (match_operand 3 "" ""))
4521 (plus:SI (match_dup 0) (match_dup 1)))
4522 (clobber (match_scratch:SI 4 "=X,r,r"))]
4524 "* return output_dbra (operands, insn, which_alternative); "
4525 ;; Do not expect to understand this the first time through.
4526 [(set_attr "type" "cbranch,multi,multi")
4527 (set (attr "length")
4528 (if_then_else (eq_attr "alternative" "0")
4529 ;; Loop counter in register case
4530 ;; Short branch has length of 4
4531 ;; Long branch has length of 8
4532 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4537 ;; Loop counter in FP reg case.
4538 ;; Extra goo to deal with additional reload insns.
4539 (if_then_else (eq_attr "alternative" "1")
4540 (if_then_else (lt (match_dup 3) (pc))
4542 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4547 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4551 ;; Loop counter in memory case.
4552 ;; Extra goo to deal with additional reload insns.
4553 (if_then_else (lt (match_dup 3) (pc))
4555 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4560 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4563 (const_int 16))))))])
4568 (match_operator 2 "movb_comparison_operator"
4569 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4570 (label_ref (match_operand 3 "" ""))
4572 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4575 "* return output_movb (operands, insn, which_alternative, 0); "
4576 ;; Do not expect to understand this the first time through.
4577 [(set_attr "type" "cbranch,multi,multi,multi")
4578 (set (attr "length")
4579 (if_then_else (eq_attr "alternative" "0")
4580 ;; Loop counter in register case
4581 ;; Short branch has length of 4
4582 ;; Long branch has length of 8
4583 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4588 ;; Loop counter in FP reg case.
4589 ;; Extra goo to deal with additional reload insns.
4590 (if_then_else (eq_attr "alternative" "1")
4591 (if_then_else (lt (match_dup 3) (pc))
4593 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4598 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4602 ;; Loop counter in memory or sar case.
4603 ;; Extra goo to deal with additional reload insns.
4605 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4608 (const_int 12)))))])
4610 ;; Handle negated branch.
4614 (match_operator 2 "movb_comparison_operator"
4615 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4617 (label_ref (match_operand 3 "" ""))))
4618 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4621 "* return output_movb (operands, insn, which_alternative, 1); "
4622 ;; Do not expect to understand this the first time through.
4623 [(set_attr "type" "cbranch,multi,multi,multi")
4624 (set (attr "length")
4625 (if_then_else (eq_attr "alternative" "0")
4626 ;; Loop counter in register case
4627 ;; Short branch has length of 4
4628 ;; Long branch has length of 8
4629 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4634 ;; Loop counter in FP reg case.
4635 ;; Extra goo to deal with additional reload insns.
4636 (if_then_else (eq_attr "alternative" "1")
4637 (if_then_else (lt (match_dup 3) (pc))
4639 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4644 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4648 ;; Loop counter in memory or SAR case.
4649 ;; Extra goo to deal with additional reload insns.
4651 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4654 (const_int 12)))))])
4656 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4657 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4659 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4662 [(set (pc) (label_ref (match_operand 3 "" "" )))
4663 (set (match_operand:SI 0 "register_operand" "=r")
4664 (plus:SI (match_operand:SI 1 "register_operand" "r")
4665 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4666 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4669 return output_parallel_addb (operands, get_attr_length (insn));
4671 [(set_attr "type" "parallel_branch")
4672 (set (attr "length")
4673 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4679 [(set (pc) (label_ref (match_operand 2 "" "" )))
4680 (set (match_operand:SF 0 "register_operand" "=r")
4681 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4685 return output_parallel_movb (operands, get_attr_length (insn));
4687 [(set_attr "type" "parallel_branch")
4688 (set (attr "length")
4689 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4695 [(set (pc) (label_ref (match_operand 2 "" "" )))
4696 (set (match_operand:SI 0 "register_operand" "=r")
4697 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4701 return output_parallel_movb (operands, get_attr_length (insn));
4703 [(set_attr "type" "parallel_branch")
4704 (set (attr "length")
4705 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4711 [(set (pc) (label_ref (match_operand 2 "" "" )))
4712 (set (match_operand:HI 0 "register_operand" "=r")
4713 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4717 return output_parallel_movb (operands, get_attr_length (insn));
4719 [(set_attr "type" "parallel_branch")
4720 (set (attr "length")
4721 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4727 [(set (pc) (label_ref (match_operand 2 "" "" )))
4728 (set (match_operand:QI 0 "register_operand" "=r")
4729 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4733 return output_parallel_movb (operands, get_attr_length (insn));
4735 [(set_attr "type" "parallel_branch")
4736 (set (attr "length")
4737 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4743 [(set (match_operand 0 "register_operand" "=f")
4744 (mult (match_operand 1 "register_operand" "f")
4745 (match_operand 2 "register_operand" "f")))
4746 (set (match_operand 3 "register_operand" "+f")
4747 (plus (match_operand 4 "register_operand" "f")
4748 (match_operand 5 "register_operand" "f")))]
4749 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4750 && reload_completed && fmpyaddoperands (operands)"
4753 if (GET_MODE (operands[0]) == DFmode)
4755 if (rtx_equal_p (operands[3], operands[5]))
4756 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4758 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4762 if (rtx_equal_p (operands[3], operands[5]))
4763 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4765 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4768 [(set_attr "type" "fpalu")
4769 (set_attr "length" "4")])
4772 [(set (match_operand 3 "register_operand" "+f")
4773 (plus (match_operand 4 "register_operand" "f")
4774 (match_operand 5 "register_operand" "f")))
4775 (set (match_operand 0 "register_operand" "=f")
4776 (mult (match_operand 1 "register_operand" "f")
4777 (match_operand 2 "register_operand" "f")))]
4778 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4779 && reload_completed && fmpyaddoperands (operands)"
4782 if (GET_MODE (operands[0]) == DFmode)
4784 if (rtx_equal_p (operands[3], operands[5]))
4785 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4787 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4791 if (rtx_equal_p (operands[3], operands[5]))
4792 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4794 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4797 [(set_attr "type" "fpalu")
4798 (set_attr "length" "4")])
4801 [(set (match_operand 0 "register_operand" "=f")
4802 (mult (match_operand 1 "register_operand" "f")
4803 (match_operand 2 "register_operand" "f")))
4804 (set (match_operand 3 "register_operand" "+f")
4805 (minus (match_operand 4 "register_operand" "f")
4806 (match_operand 5 "register_operand" "f")))]
4807 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4808 && reload_completed && fmpysuboperands (operands)"
4811 if (GET_MODE (operands[0]) == DFmode)
4812 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4814 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4816 [(set_attr "type" "fpalu")
4817 (set_attr "length" "4")])
4820 [(set (match_operand 3 "register_operand" "+f")
4821 (minus (match_operand 4 "register_operand" "f")
4822 (match_operand 5 "register_operand" "f")))
4823 (set (match_operand 0 "register_operand" "=f")
4824 (mult (match_operand 1 "register_operand" "f")
4825 (match_operand 2 "register_operand" "f")))]
4826 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4827 && reload_completed && fmpysuboperands (operands)"
4830 if (GET_MODE (operands[0]) == DFmode)
4831 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4833 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4835 [(set_attr "type" "fpalu")
4836 (set_attr "length" "4")])
4838 ;; Clean up turds left by reload.
4840 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
4841 (match_operand 1 "register_operand" "fr"))
4842 (set (match_operand 2 "register_operand" "fr")
4844 "! TARGET_SOFT_FLOAT
4845 && GET_CODE (operands[0]) == MEM
4846 && ! MEM_VOLATILE_P (operands[0])
4847 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4848 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4849 && GET_MODE (operands[0]) == DFmode
4850 && GET_CODE (operands[1]) == REG
4851 && GET_CODE (operands[2]) == REG
4852 && ! side_effects_p (XEXP (operands[0], 0))
4853 && REGNO_REG_CLASS (REGNO (operands[1]))
4854 == REGNO_REG_CLASS (REGNO (operands[2]))"
4859 if (FP_REG_P (operands[1]))
4860 output_asm_insn (output_fp_move_double (operands), operands);
4862 output_asm_insn (output_move_double (operands), operands);
4864 if (rtx_equal_p (operands[1], operands[2]))
4867 xoperands[0] = operands[2];
4868 xoperands[1] = operands[1];
4870 if (FP_REG_P (xoperands[1]))
4871 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4873 output_asm_insn (output_move_double (xoperands), xoperands);
4879 [(set (match_operand 0 "register_operand" "fr")
4880 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
4881 (set (match_operand 2 "register_operand" "fr")
4883 "! TARGET_SOFT_FLOAT
4884 && GET_CODE (operands[1]) == MEM
4885 && ! MEM_VOLATILE_P (operands[1])
4886 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4887 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4888 && GET_MODE (operands[0]) == DFmode
4889 && GET_CODE (operands[0]) == REG
4890 && GET_CODE (operands[2]) == REG
4891 && ! side_effects_p (XEXP (operands[1], 0))
4892 && REGNO_REG_CLASS (REGNO (operands[0]))
4893 == REGNO_REG_CLASS (REGNO (operands[2]))"
4898 if (FP_REG_P (operands[0]))
4899 output_asm_insn (output_fp_move_double (operands), operands);
4901 output_asm_insn (output_move_double (operands), operands);
4903 xoperands[0] = operands[2];
4904 xoperands[1] = operands[0];
4906 if (FP_REG_P (xoperands[1]))
4907 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4909 output_asm_insn (output_move_double (xoperands), xoperands);
4914 ;; Flush the I and D cache line found at the address in operand 0.
4915 ;; This is used by the trampoline code for nested functions.
4916 ;; So long as the trampoline itself is less than 32 bytes this
4919 (define_insn "dcacheflush"
4920 [(unspec_volatile [(const_int 1)] 0)
4921 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
4922 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))]
4924 "fdc 0(0,%0)\;fdc 0(0,%1)\;sync"
4925 [(set_attr "type" "multi")
4926 (set_attr "length" "12")])
4928 (define_insn "icacheflush"
4929 [(unspec_volatile [(const_int 2)] 0)
4930 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
4931 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))
4932 (use (match_operand:SI 2 "register_operand" "r"))
4933 (clobber (match_operand:SI 3 "register_operand" "=&r"))
4934 (clobber (match_operand:SI 4 "register_operand" "=&r"))]
4936 "mfsp %%sr0,%4\;ldsid (0,%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
4937 [(set_attr "type" "multi")
4938 (set_attr "length" "52")])
4940 ;; An out-of-line prologue.
4941 (define_insn "outline_prologue_call"
4942 [(unspec_volatile [(const_int 0)] 0)
4943 (clobber (reg:SI 31))
4944 (clobber (reg:SI 22))
4945 (clobber (reg:SI 21))
4946 (clobber (reg:SI 20))
4947 (clobber (reg:SI 19))
4948 (clobber (reg:SI 1))]
4952 extern int frame_pointer_needed;
4954 /* We need two different versions depending on whether or not we
4955 need a frame pointer. Also note that we return to the instruction
4956 immediately after the branch rather than two instructions after the
4957 break as normally is the case. */
4958 if (frame_pointer_needed)
4960 /* Must import the magic millicode routine(s). */
4961 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
4963 if (TARGET_PORTABLE_RUNTIME)
4965 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
4966 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
4970 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
4974 /* Must import the magic millicode routine(s). */
4975 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
4977 if (TARGET_PORTABLE_RUNTIME)
4979 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
4980 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
4983 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
4987 [(set_attr "type" "multi")
4988 (set_attr "length" "8")])
4990 ;; An out-of-line epilogue.
4991 (define_insn "outline_epilogue_call"
4992 [(unspec_volatile [(const_int 1)] 0)
4995 (clobber (reg:SI 31))
4996 (clobber (reg:SI 22))
4997 (clobber (reg:SI 21))
4998 (clobber (reg:SI 20))
4999 (clobber (reg:SI 19))
5000 (clobber (reg:SI 2))
5001 (clobber (reg:SI 1))]
5005 extern int frame_pointer_needed;
5007 /* We need two different versions depending on whether or not we
5008 need a frame pointer. Also note that we return to the instruction
5009 immediately after the branch rather than two instructions after the
5010 break as normally is the case. */
5011 if (frame_pointer_needed)
5013 /* Must import the magic millicode routine. */
5014 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5016 /* The out-of-line prologue will make sure we return to the right
5018 if (TARGET_PORTABLE_RUNTIME)
5020 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5021 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5025 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5029 /* Must import the magic millicode routine. */
5030 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5032 /* The out-of-line prologue will make sure we return to the right
5034 if (TARGET_PORTABLE_RUNTIME)
5036 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5037 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5040 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5044 [(set_attr "type" "multi")
5045 (set_attr "length" "8")])
5047 ;; Given a function pointer, canonicalize it so it can be
5048 ;; reliably compared to another function pointer. */
5049 (define_expand "canonicalize_funcptr_for_compare"
5050 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5051 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5052 (clobber (match_dup 2))
5053 (clobber (reg:SI 26))
5054 (clobber (reg:SI 22))
5055 (clobber (reg:SI 31))])
5056 (set (match_operand:SI 0 "register_operand" "")
5058 "! TARGET_PORTABLE_RUNTIME"
5061 operands[2] = gen_reg_rtx (SImode);
5062 if (GET_CODE (operands[1]) != REG)
5064 rtx tmp = gen_reg_rtx (Pmode);
5065 emit_move_insn (tmp, operands[1]);
5071 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5072 (clobber (match_operand:SI 0 "register_operand" "=a"))
5073 (clobber (reg:SI 26))
5074 (clobber (reg:SI 22))
5075 (clobber (reg:SI 31))]
5079 /* Must import the magic millicode routine. */
5080 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5082 /* This is absolutely amazing.
5084 First, copy our input parameter into %r29 just in case we don't
5085 need to call $$sh_func_adrs. */
5086 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5088 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5089 we use %r26 unchanged. */
5090 if (get_attr_length (insn) == 32)
5091 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5092 else if (get_attr_length (insn) == 40)
5093 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5094 else if (get_attr_length (insn) == 44)
5095 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5097 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5099 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5100 4096, then we use %r26 unchanged. */
5101 if (get_attr_length (insn) == 32)
5102 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5103 else if (get_attr_length (insn) == 40)
5104 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5105 else if (get_attr_length (insn) == 44)
5106 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5108 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5110 /* Else call $$sh_func_adrs to extract the function's real add24. */
5111 return output_millicode_call (insn,
5112 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5114 [(set_attr "type" "multi")
5115 (set (attr "length")
5117 ;; Target (or stub) within reach
5118 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5120 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5125 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5129 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5130 ;; same as NO_SPACE_REGS code
5131 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5133 (eq (symbol_ref "flag_pic")
5138 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5142 ;; Out of range and PIC
5145 ;; On the PA, the PIC register is call clobbered, so it must
5146 ;; be saved & restored around calls by the caller. If the call
5147 ;; doesn't return normally (nonlocal goto, or an exception is
5148 ;; thrown), then the code at the exception handler label must
5149 ;; restore the PIC register.
5150 (define_expand "exception_receiver"
5152 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5155 /* Load the PIC register from the stack slot (in our caller's
5157 emit_move_insn (pic_offset_table_rtx,
5158 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5159 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5160 emit_insn (gen_blockage ());