1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of insns).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
371 ;; It is not necessary to define the shifter and integer alu units.
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
380 (define_function_unit "pa8000alu" 2 1
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
392 (define_function_unit "pa8000fdiv" 2 1
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
397 (define_function_unit "pa8000fdiv" 2 1
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
411 (define_expand "cmpsi"
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
424 (define_expand "cmpsf"
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
437 (define_expand "cmpdf"
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %r0,%r1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %r0,%r1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
473 [(set (match_operand:SI 0 "register_operand" "")
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
489 [(set (match_operand:SI 0 "register_operand" "")
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
503 [(set (match_operand:SI 0 "register_operand" "")
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
517 [(set (match_operand:SI 0 "register_operand" "")
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
531 [(set (match_operand:SI 0 "register_operand" "")
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
545 [(set (match_operand:SI 0 "register_operand" "")
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
565 if (hppa_branch_type != CMP_SI)
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
578 if (hppa_branch_type != CMP_SI)
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
591 if (hppa_branch_type != CMP_SI)
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
604 if (hppa_branch_type != CMP_SI)
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
632 "com%I2clr,%S3 %2,%1,0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
660 "sub%I3 %3,%2,0\;addc 0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
672 "sub %2,%3,0\;addc 0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
685 "addi %k3,%2,0\;addc 0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
697 com%I3clr,%B4 %3,%2,0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
708 "sub%I3 %3,%2,0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
719 "sub%I3 %3,%2,0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
731 "sub %2,%3,0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
742 "sub %2,%3,0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
755 "addi %k3,%2,0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
766 "addi %k3,%2,0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
778 com%I3clr,%B4 %3,%2,0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
792 comclr,> %2,%0,0\;copy %2,%0
793 comiclr,> %2,%0,0\;ldi %2,%0
794 comclr,> %1,%2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
804 comclr,>> %2,%0,0\;copy %2,%0
805 comiclr,>> %2,%0,0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
815 comclr,< %2,%0,0\;copy %2,%0
816 comiclr,< %2,%0,0\;ldi %2,%0
817 comclr,< %1,%2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
827 comclr,<< %2,%0,0\;copy %2,%0
828 comiclr,<< %2,%0,0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
840 ;;; Experimental conditional move patterns
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
845 (match_operator 1 "comparison_operator"
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
853 enum rtx_code code = GET_CODE (operands[1]);
855 if (hppa_branch_type != CMP_SI)
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
877 com%I4clr,%S5 %4,%3,0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
895 com%I4clr,%S5 %4,%3,0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
906 ;; Conditional Branches
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
916 if (hppa_branch_type != CMP_SI)
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
936 if (hppa_branch_type != CMP_SI)
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
954 if (hppa_branch_type != CMP_SI)
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
972 if (hppa_branch_type != CMP_SI)
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
990 if (hppa_branch_type != CMP_SI)
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
1000 (define_expand "ble"
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1008 if (hppa_branch_type != CMP_SI)
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1018 (define_expand "bgtu"
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1026 if (hppa_branch_type != CMP_SI)
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1032 (define_expand "bltu"
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1040 if (hppa_branch_type != CMP_SI)
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1046 (define_expand "bgeu"
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1054 if (hppa_branch_type != CMP_SI)
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1060 (define_expand "bleu"
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1068 if (hppa_branch_type != CMP_SI)
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1074 ;; Match the branch patterns.
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1105 ;; Match the negated branch.
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1114 (label_ref (match_operand 0 "" ""))))]
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1133 ;; Branch on Bit patterns.
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1139 (match_operand:SI 1 "uint5_operand" ""))
1141 (label_ref (match_operand 2 "" ""))
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1161 (match_operand:SI 1 "uint5_operand" ""))
1164 (label_ref (match_operand 2 "" ""))))]
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1183 (match_operand:SI 1 "uint5_operand" ""))
1185 (label_ref (match_operand 2 "" ""))
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1205 (match_operand:SI 1 "uint5_operand" ""))
1208 (label_ref (match_operand 2 "" ""))))]
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1222 ;; Branch on Variable Bit patterns.
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1228 (match_operand:SI 1 "register_operand" "q"))
1230 (label_ref (match_operand 2 "" ""))
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1250 (match_operand:SI 1 "register_operand" "q"))
1253 (label_ref (match_operand 2 "" ""))))]
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1272 (match_operand:SI 1 "register_operand" "q"))
1274 (label_ref (match_operand 2 "" ""))
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1294 (match_operand:SI 1 "register_operand" "q"))
1297 (label_ref (match_operand 2 "" ""))))]
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1311 ;; Floating point branches
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1316 "! TARGET_SOFT_FLOAT"
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;bl,n %0,0\";
1322 return \"ftest\;bl%* %0,0\";
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr 0,0,0\;bl,n %0,0\";
1337 return \"ftest\;add,tr 0,0,0\;bl%* %0,0\";
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1342 ;; Move instructions
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1350 if (emit_move_sequence (operands, SImode, 0))
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1387 ;;; pic symbol references
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1455 return \"ldwx %2(%1),%0\";
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1474 return \"ldwx %1(%2),%0\";
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1479 ;; Load or store with base-register modification.
1481 (define_insn "pre_ldwm"
1482 [(set (match_operand:SI 0 "register_operand" "=r")
1483 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1484 (match_operand:SI 2 "pre_cint_operand" ""))))
1486 (plus:SI (match_dup 1) (match_dup 2)))]
1490 if (INTVAL (operands[2]) < 0)
1491 return \"ldwm %2(%1),%0\";
1492 return \"ldws,mb %2(%1),%0\";
1494 [(set_attr "type" "load")
1495 (set_attr "length" "4")])
1497 (define_insn "pre_stwm"
1498 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1499 (match_operand:SI 1 "pre_cint_operand" "")))
1500 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1502 (plus:SI (match_dup 0) (match_dup 1)))]
1506 if (INTVAL (operands[1]) < 0)
1507 return \"stwm %r2,%1(%0)\";
1508 return \"stws,mb %r2,%1(%0)\";
1510 [(set_attr "type" "store")
1511 (set_attr "length" "4")])
1513 (define_insn "post_ldwm"
1514 [(set (match_operand:SI 0 "register_operand" "=r")
1515 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1517 (plus:SI (match_dup 1)
1518 (match_operand:SI 2 "post_cint_operand" "")))]
1522 if (INTVAL (operands[2]) > 0)
1523 return \"ldwm %2(%1),%0\";
1524 return \"ldws,ma %2(%1),%0\";
1526 [(set_attr "type" "load")
1527 (set_attr "length" "4")])
1529 (define_insn "post_stwm"
1530 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1531 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1533 (plus:SI (match_dup 0)
1534 (match_operand:SI 2 "post_cint_operand" "")))]
1538 if (INTVAL (operands[2]) > 0)
1539 return \"stwm %r1,%2(%0)\";
1540 return \"stws,ma %r1,%2(%0)\";
1542 [(set_attr "type" "store")
1543 (set_attr "length" "4")])
1546 ;; Note since this pattern can be created at reload time (via movsi), all
1547 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1548 (define_insn "pic_load_label"
1549 [(set (match_operand:SI 0 "register_operand" "=a")
1550 (match_operand:SI 1 "pic_label_operand" ""))]
1554 rtx label_rtx = gen_label_rtx ();
1556 extern FILE *asm_out_file;
1558 xoperands[0] = operands[0];
1559 xoperands[1] = operands[1];
1560 xoperands[2] = label_rtx;
1561 output_asm_insn (\"bl .+8,%0\", xoperands);
1562 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1563 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1564 CODE_LABEL_NUMBER (label_rtx));
1566 /* If we're trying to load the address of a label that happens to be
1567 close, then we can use a shorter sequence. */
1568 if (GET_CODE (operands[1]) == LABEL_REF
1570 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1571 - insn_addresses[INSN_UID (insn)]) < 8100)
1573 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1574 always non-negative. */
1575 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1579 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1580 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1584 [(set_attr "type" "multi")
1585 (set_attr "length" "16")]) ; 12 or 16
1587 (define_insn "pic2_highpart"
1588 [(set (match_operand:SI 0 "register_operand" "=a")
1589 (plus:SI (match_operand:SI 1 "register_operand" "r")
1590 (high:SI (match_operand 2 "" ""))))]
1591 "symbolic_operand (operands[2], Pmode)
1592 && ! function_label_operand (operands[2])
1595 [(set_attr "type" "binary")
1596 (set_attr "length" "4")])
1598 ; We need this to make sure CSE doesn't simplify a memory load with a
1599 ; symbolic address, whose content it think it knows. For PIC, what CSE
1600 ; think is the real value will be the address of that value.
1601 (define_insn "pic2_lo_sum"
1602 [(set (match_operand:SI 0 "register_operand" "=r")
1603 (mem:SI (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1604 (unspec:SI [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1610 return \"ldw RT'%G2(%1),%0\";
1612 [(set_attr "type" "load")
1613 (set_attr "length" "4")])
1616 ;; Always use addil rather than ldil;add sequences. This allows the
1617 ;; HP linker to eliminate the dp relocation if the symbolic operand
1618 ;; lives in the TEXT space.
1620 [(set (match_operand:SI 0 "register_operand" "=a")
1621 (high:SI (match_operand 1 "" "")))]
1622 "symbolic_operand (operands[1], Pmode)
1623 && ! function_label_operand (operands[1])
1624 && ! read_only_operand (operands[1])
1628 if (TARGET_LONG_LOAD_STORE)
1629 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1631 return \"addil LR'%H1,%%r27\";
1633 [(set_attr "type" "binary")
1634 (set (attr "length")
1635 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1640 ;; This is for use in the prologue/epilogue code. We need it
1641 ;; to add large constants to a stack pointer or frame pointer.
1642 ;; Because of the additional %r1 pressure, we probably do not
1643 ;; want to use this in general code, so make it available
1644 ;; only after reload.
1645 (define_insn "add_high_const"
1646 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1647 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1648 (high:SI (match_operand 2 "const_int_operand" ""))))]
1652 ldil L'%G2,%0\;addl %0,%1,%0"
1653 [(set_attr "type" "binary,binary")
1654 (set_attr "length" "4,8")])
1657 [(set (match_operand:SI 0 "register_operand" "=r")
1658 (high:SI (match_operand 1 "" "")))]
1659 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1660 && !is_function_label_plus_const (operands[1])"
1663 if (symbolic_operand (operands[1], Pmode))
1664 return \"ldil LR'%H1,%0\";
1666 return \"ldil L'%G1,%0\";
1668 [(set_attr "type" "move")
1669 (set_attr "length" "4")])
1672 [(set (match_operand:SI 0 "register_operand" "=r")
1673 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1674 (match_operand:SI 2 "immediate_operand" "i")))]
1675 "!is_function_label_plus_const (operands[2])"
1678 if (flag_pic && symbolic_operand (operands[2], Pmode))
1680 else if (symbolic_operand (operands[2], Pmode))
1681 return \"ldo RR'%G2(%1),%0\";
1683 return \"ldo R'%G2(%1),%0\";
1685 [(set_attr "type" "move")
1686 (set_attr "length" "4")])
1688 ;; Now that a symbolic_address plus a constant is broken up early
1689 ;; in the compilation phase (for better CSE) we need a special
1690 ;; combiner pattern to load the symbolic address plus the constant
1691 ;; in only 2 instructions. (For cases where the symbolic address
1692 ;; was not a common subexpression.)
1694 [(set (match_operand:SI 0 "register_operand" "")
1695 (match_operand:SI 1 "symbolic_operand" ""))
1696 (clobber (match_operand:SI 2 "register_operand" ""))]
1697 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1698 [(set (match_dup 2) (high:SI (match_dup 1)))
1699 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1702 ;; hppa_legitimize_address goes to a great deal of trouble to
1703 ;; create addresses which use indexing. In some cases, this
1704 ;; is a lose because there isn't any store instructions which
1705 ;; allow indexed addresses (with integer register source).
1707 ;; These define_splits try to turn a 3 insn store into
1708 ;; a 2 insn store with some creative RTL rewriting.
1710 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1711 (match_operand:SI 1 "shadd_operand" ""))
1712 (plus:SI (match_operand:SI 2 "register_operand" "")
1713 (match_operand:SI 3 "const_int_operand" ""))))
1714 (match_operand:SI 4 "register_operand" ""))
1715 (clobber (match_operand:SI 5 "register_operand" ""))]
1717 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1719 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1723 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1724 (match_operand:SI 1 "shadd_operand" ""))
1725 (plus:SI (match_operand:SI 2 "register_operand" "")
1726 (match_operand:SI 3 "const_int_operand" ""))))
1727 (match_operand:HI 4 "register_operand" ""))
1728 (clobber (match_operand:SI 5 "register_operand" ""))]
1730 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1732 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1736 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1737 (match_operand:SI 1 "shadd_operand" ""))
1738 (plus:SI (match_operand:SI 2 "register_operand" "")
1739 (match_operand:SI 3 "const_int_operand" ""))))
1740 (match_operand:QI 4 "register_operand" ""))
1741 (clobber (match_operand:SI 5 "register_operand" ""))]
1743 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1745 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1748 (define_expand "movhi"
1749 [(set (match_operand:HI 0 "general_operand" "")
1750 (match_operand:HI 1 "general_operand" ""))]
1754 if (emit_move_sequence (operands, HImode, 0))
1759 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1760 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1761 "register_operand (operands[0], HImode)
1762 || reg_or_0_operand (operands[1], HImode)"
1772 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1773 (set_attr "pa_combine_type" "addmove")
1774 (set_attr "length" "4,4,4,4,4,4,4,4")])
1777 [(set (match_operand:HI 0 "register_operand" "=r")
1778 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1779 (match_operand:SI 2 "register_operand" "r"))))]
1780 "! TARGET_DISABLE_INDEXING"
1783 /* Reload can create backwards (relative to cse) unscaled index
1784 address modes when eliminating registers and possibly for
1785 pseudos that don't get hard registers. Deal with it. */
1786 if (operands[2] == hard_frame_pointer_rtx
1787 || operands[2] == stack_pointer_rtx)
1788 return \"ldhx %1(%2),%0\";
1790 return \"ldhx %2(%1),%0\";
1792 [(set_attr "type" "load")
1793 (set_attr "length" "4")])
1796 [(set (match_operand:HI 0 "register_operand" "=r")
1797 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1798 (match_operand:SI 2 "basereg_operand" "r"))))]
1799 "! TARGET_DISABLE_INDEXING"
1802 /* Reload can create backwards (relative to cse) unscaled index
1803 address modes when eliminating registers and possibly for
1804 pseudos that don't get hard registers. Deal with it. */
1805 if (operands[1] == hard_frame_pointer_rtx
1806 || operands[1] == stack_pointer_rtx)
1807 return \"ldhx %2(%1),%0\";
1809 return \"ldhx %1(%2),%0\";
1811 [(set_attr "type" "load")
1812 (set_attr "length" "4")])
1814 ; Now zero extended variants.
1816 [(set (match_operand:SI 0 "register_operand" "=r")
1817 (zero_extend:SI (mem:HI
1819 (match_operand:SI 1 "basereg_operand" "r")
1820 (match_operand:SI 2 "register_operand" "r")))))]
1821 "! TARGET_DISABLE_INDEXING"
1824 /* Reload can create backwards (relative to cse) unscaled index
1825 address modes when eliminating registers and possibly for
1826 pseudos that don't get hard registers. Deal with it. */
1827 if (operands[2] == hard_frame_pointer_rtx
1828 || operands[2] == stack_pointer_rtx)
1829 return \"ldhx %1(%2),%0\";
1831 return \"ldhx %2(%1),%0\";
1833 [(set_attr "type" "load")
1834 (set_attr "length" "4")])
1837 [(set (match_operand:SI 0 "register_operand" "=r")
1838 (zero_extend:SI (mem:HI
1840 (match_operand:SI 1 "register_operand" "r")
1841 (match_operand:SI 2 "basereg_operand" "r")))))]
1842 "! TARGET_DISABLE_INDEXING"
1845 /* Reload can create backwards (relative to cse) unscaled index
1846 address modes when eliminating registers and possibly for
1847 pseudos that don't get hard registers. Deal with it. */
1848 if (operands[1] == hard_frame_pointer_rtx
1849 || operands[1] == stack_pointer_rtx)
1850 return \"ldhx %2(%1),%0\";
1852 return \"ldhx %1(%2),%0\";
1854 [(set_attr "type" "load")
1855 (set_attr "length" "4")])
1858 [(set (match_operand:HI 0 "register_operand" "=r")
1859 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1860 (match_operand:SI 2 "int5_operand" "L"))))
1862 (plus:SI (match_dup 1) (match_dup 2)))]
1865 [(set_attr "type" "load")
1866 (set_attr "length" "4")])
1868 ; And a zero extended variant.
1870 [(set (match_operand:SI 0 "register_operand" "=r")
1871 (zero_extend:SI (mem:HI
1873 (match_operand:SI 1 "register_operand" "+r")
1874 (match_operand:SI 2 "int5_operand" "L")))))
1876 (plus:SI (match_dup 1) (match_dup 2)))]
1879 [(set_attr "type" "load")
1880 (set_attr "length" "4")])
1883 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1884 (match_operand:SI 1 "int5_operand" "L")))
1885 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1887 (plus:SI (match_dup 0) (match_dup 1)))]
1889 "sths,mb %r2,%1(%0)"
1890 [(set_attr "type" "store")
1891 (set_attr "length" "4")])
1894 [(set (match_operand:HI 0 "register_operand" "=r")
1895 (high:HI (match_operand 1 "const_int_operand" "")))]
1898 [(set_attr "type" "move")
1899 (set_attr "length" "4")])
1902 [(set (match_operand:HI 0 "register_operand" "=r")
1903 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1904 (match_operand 2 "const_int_operand" "")))]
1907 [(set_attr "type" "move")
1908 (set_attr "length" "4")])
1910 (define_expand "movqi"
1911 [(set (match_operand:QI 0 "general_operand" "")
1912 (match_operand:QI 1 "general_operand" ""))]
1916 if (emit_move_sequence (operands, QImode, 0))
1921 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1922 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1923 "register_operand (operands[0], QImode)
1924 || reg_or_0_operand (operands[1], QImode)"
1934 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1935 (set_attr "pa_combine_type" "addmove")
1936 (set_attr "length" "4,4,4,4,4,4,4,4")])
1939 [(set (match_operand:QI 0 "register_operand" "=r")
1940 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1941 (match_operand:SI 2 "register_operand" "r"))))]
1942 "! TARGET_DISABLE_INDEXING"
1945 /* Reload can create backwards (relative to cse) unscaled index
1946 address modes when eliminating registers and possibly for
1947 pseudos that don't get hard registers. Deal with it. */
1948 if (operands[2] == hard_frame_pointer_rtx
1949 || operands[2] == stack_pointer_rtx)
1950 return \"ldbx %1(%2),%0\";
1952 return \"ldbx %2(%1),%0\";
1954 [(set_attr "type" "load")
1955 (set_attr "length" "4")])
1958 [(set (match_operand:QI 0 "register_operand" "=r")
1959 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1960 (match_operand:SI 2 "basereg_operand" "r"))))]
1961 "! TARGET_DISABLE_INDEXING"
1964 /* Reload can create backwards (relative to cse) unscaled index
1965 address modes when eliminating registers and possibly for
1966 pseudos that don't get hard registers. Deal with it. */
1967 if (operands[1] == hard_frame_pointer_rtx
1968 || operands[1] == stack_pointer_rtx)
1969 return \"ldbx %2(%1),%0\";
1971 return \"ldbx %1(%2),%0\";
1973 [(set_attr "type" "load")
1974 (set_attr "length" "4")])
1976 ; Indexed byte load with zero extension to SImode or HImode.
1978 [(set (match_operand:SI 0 "register_operand" "=r")
1979 (zero_extend:SI (mem:QI
1981 (match_operand:SI 1 "basereg_operand" "r")
1982 (match_operand:SI 2 "register_operand" "r")))))]
1983 "! TARGET_DISABLE_INDEXING"
1986 /* Reload can create backwards (relative to cse) unscaled index
1987 address modes when eliminating registers and possibly for
1988 pseudos that don't get hard registers. Deal with it. */
1989 if (operands[2] == hard_frame_pointer_rtx
1990 || operands[2] == stack_pointer_rtx)
1991 return \"ldbx %1(%2),%0\";
1993 return \"ldbx %2(%1),%0\";
1995 [(set_attr "type" "load")
1996 (set_attr "length" "4")])
1999 [(set (match_operand:SI 0 "register_operand" "=r")
2000 (zero_extend:SI (mem:QI
2002 (match_operand:SI 1 "register_operand" "r")
2003 (match_operand:SI 2 "basereg_operand" "r")))))]
2004 "! TARGET_DISABLE_INDEXING"
2007 /* Reload can create backwards (relative to cse) unscaled index
2008 address modes when eliminating registers and possibly for
2009 pseudos that don't get hard registers. Deal with it. */
2010 if (operands[1] == hard_frame_pointer_rtx
2011 || operands[1] == stack_pointer_rtx)
2012 return \"ldbx %2(%1),%0\";
2014 return \"ldbx %1(%2),%0\";
2016 [(set_attr "type" "load")
2017 (set_attr "length" "4")])
2020 [(set (match_operand:HI 0 "register_operand" "=r")
2021 (zero_extend:HI (mem:QI
2023 (match_operand:SI 1 "basereg_operand" "r")
2024 (match_operand:SI 2 "register_operand" "r")))))]
2025 "! TARGET_DISABLE_INDEXING"
2028 /* Reload can create backwards (relative to cse) unscaled index
2029 address modes when eliminating registers and possibly for
2030 pseudos that don't get hard registers. Deal with it. */
2031 if (operands[2] == hard_frame_pointer_rtx
2032 || operands[2] == stack_pointer_rtx)
2033 return \"ldbx %1(%2),%0\";
2035 return \"ldbx %2(%1),%0\";
2037 [(set_attr "type" "load")
2038 (set_attr "length" "4")])
2041 [(set (match_operand:HI 0 "register_operand" "=r")
2042 (zero_extend:HI (mem:QI
2044 (match_operand:SI 1 "register_operand" "r")
2045 (match_operand:SI 2 "basereg_operand" "r")))))]
2046 "! TARGET_DISABLE_INDEXING"
2049 /* Reload can create backwards (relative to cse) unscaled index
2050 address modes when eliminating registers and possibly for
2051 pseudos that don't get hard registers. Deal with it. */
2052 if (operands[1] == hard_frame_pointer_rtx
2053 || operands[1] == stack_pointer_rtx)
2054 return \"ldbx %2(%1),%0\";
2056 return \"ldbx %1(%2),%0\";
2058 [(set_attr "type" "load")
2059 (set_attr "length" "4")])
2062 [(set (match_operand:QI 0 "register_operand" "=r")
2063 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2064 (match_operand:SI 2 "int5_operand" "L"))))
2065 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2068 [(set_attr "type" "load")
2069 (set_attr "length" "4")])
2071 ; Now the same thing with zero extensions.
2073 [(set (match_operand:SI 0 "register_operand" "=r")
2074 (zero_extend:SI (mem:QI (plus:SI
2075 (match_operand:SI 1 "register_operand" "+r")
2076 (match_operand:SI 2 "int5_operand" "L")))))
2077 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2080 [(set_attr "type" "load")
2081 (set_attr "length" "4")])
2084 [(set (match_operand:HI 0 "register_operand" "=r")
2085 (zero_extend:HI (mem:QI (plus:SI
2086 (match_operand:SI 1 "register_operand" "+r")
2087 (match_operand:SI 2 "int5_operand" "L")))))
2088 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2091 [(set_attr "type" "load")
2092 (set_attr "length" "4")])
2095 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2096 (match_operand:SI 1 "int5_operand" "L")))
2097 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2099 (plus:SI (match_dup 0) (match_dup 1)))]
2101 "stbs,mb %r2,%1(%0)"
2102 [(set_attr "type" "store")
2103 (set_attr "length" "4")])
2105 ;; The definition of this insn does not really explain what it does,
2106 ;; but it should suffice
2107 ;; that anything generated as this insn will be recognized as one
2108 ;; and that it will not successfully combine with anything.
2109 (define_expand "movstrsi"
2110 [(parallel [(set (match_operand:BLK 0 "" "")
2111 (match_operand:BLK 1 "" ""))
2112 (clobber (match_dup 7))
2113 (clobber (match_dup 8))
2114 (clobber (match_dup 4))
2115 (clobber (match_dup 5))
2116 (clobber (match_dup 6))
2117 (use (match_operand:SI 2 "arith_operand" ""))
2118 (use (match_operand:SI 3 "const_int_operand" ""))])]
2124 /* HP provides very fast block move library routine for the PA;
2125 this routine includes:
2127 4x4 byte at a time block moves,
2128 1x4 byte at a time with alignment checked at runtime with
2129 attempts to align the source and destination as needed
2132 With that in mind, here's the heuristics to try and guess when
2133 the inlined block move will be better than the library block
2136 If the size isn't constant, then always use the library routines.
2138 If the size is large in respect to the known alignment, then use
2139 the library routines.
2141 If the size is small in repsect to the known alignment, then open
2142 code the copy (since that will lead to better scheduling).
2144 Else use the block move pattern. */
2146 /* Undetermined size, use the library routine. */
2147 if (GET_CODE (operands[2]) != CONST_INT)
2150 size = INTVAL (operands[2]);
2151 align = INTVAL (operands[3]);
2152 align = align > 4 ? 4 : align;
2154 /* If size/alignment > 8 (eg size is large in respect to alignment),
2155 then use the library routines. */
2156 if (size / align > 16)
2159 /* This does happen, but not often enough to worry much about. */
2160 if (size / align < MOVE_RATIO)
2163 /* Fall through means we're going to use our block move pattern. */
2165 = change_address (operands[0], VOIDmode,
2166 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2168 = change_address (operands[1], VOIDmode,
2169 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2170 operands[4] = gen_reg_rtx (SImode);
2171 operands[5] = gen_reg_rtx (SImode);
2172 operands[6] = gen_reg_rtx (SImode);
2173 operands[7] = XEXP (operands[0], 0);
2174 operands[8] = XEXP (operands[1], 0);
2177 ;; The operand constraints are written like this to support both compile-time
2178 ;; and run-time determined byte count. If the count is run-time determined,
2179 ;; the register with the byte count is clobbered by the copying code, and
2180 ;; therefore it is forced to operand 2. If the count is compile-time
2181 ;; determined, we need two scratch registers for the unrolled code.
2182 (define_insn "movstrsi_internal"
2183 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2184 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2185 (clobber (match_dup 0))
2186 (clobber (match_dup 1))
2187 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2188 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2189 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2190 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2191 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2193 "* return output_block_move (operands, !which_alternative);"
2194 [(set_attr "type" "multi,multi")])
2196 ;; Floating point move insns
2198 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2199 ;; to be reloaded by putting the constant into memory when
2200 ;; reg is a floating point register.
2202 ;; For integer registers we use ldil;ldo to set the appropriate
2205 ;; This must come before the movdf pattern, and it must be present
2206 ;; to handle obscure reloading cases.
2208 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2209 (match_operand:DF 1 "" "?F,m"))]
2210 "GET_CODE (operands[1]) == CONST_DOUBLE
2211 && operands[1] != CONST0_RTX (DFmode)
2212 && ! TARGET_SOFT_FLOAT"
2213 "* return (which_alternative == 0 ? output_move_double (operands)
2214 : \"fldd%F1 %1,%0\");"
2215 [(set_attr "type" "move,fpload")
2216 (set_attr "length" "16,4")])
2218 (define_expand "movdf"
2219 [(set (match_operand:DF 0 "general_operand" "")
2220 (match_operand:DF 1 "general_operand" ""))]
2224 if (emit_move_sequence (operands, DFmode, 0))
2228 ;; Reloading an SImode or DImode value requires a scratch register if
2229 ;; going in to or out of float point registers.
2231 (define_expand "reload_indf"
2232 [(set (match_operand:DF 0 "register_operand" "=Z")
2233 (match_operand:DF 1 "non_hard_reg_operand" ""))
2234 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2238 if (emit_move_sequence (operands, DFmode, operands[2]))
2241 /* We don't want the clobber emitted, so handle this ourselves. */
2242 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2246 (define_expand "reload_outdf"
2247 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2248 (match_operand:DF 1 "register_operand" "Z"))
2249 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2253 if (emit_move_sequence (operands, DFmode, operands[2]))
2256 /* We don't want the clobber emitted, so handle this ourselves. */
2257 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2262 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2263 "=f,*r,RQ,?o,?Q,f,*r,*r")
2264 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2265 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2266 "(register_operand (operands[0], DFmode)
2267 || reg_or_0_operand (operands[1], DFmode))
2268 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2269 && GET_CODE (operands[0]) == MEM)
2270 && ! TARGET_SOFT_FLOAT"
2273 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2274 || operands[1] == CONST0_RTX (DFmode))
2275 return output_fp_move_double (operands);
2276 return output_move_double (operands);
2278 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2279 (set_attr "length" "4,8,4,8,16,4,8,16")])
2282 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2284 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2286 "(register_operand (operands[0], DFmode)
2287 || reg_or_0_operand (operands[1], DFmode))
2288 && TARGET_SOFT_FLOAT"
2291 return output_move_double (operands);
2293 [(set_attr "type" "move,store,store,load,load")
2294 (set_attr "length" "8,8,16,8,16")])
2297 [(set (match_operand:DF 0 "register_operand" "=fx")
2298 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2299 (match_operand:SI 2 "register_operand" "r"))))]
2300 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2303 /* Reload can create backwards (relative to cse) unscaled index
2304 address modes when eliminating registers and possibly for
2305 pseudos that don't get hard registers. Deal with it. */
2306 if (operands[2] == hard_frame_pointer_rtx
2307 || operands[2] == stack_pointer_rtx)
2308 return \"flddx %1(%2),%0\";
2310 return \"flddx %2(%1),%0\";
2312 [(set_attr "type" "fpload")
2313 (set_attr "length" "4")])
2316 [(set (match_operand:DF 0 "register_operand" "=fx")
2317 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2318 (match_operand:SI 2 "basereg_operand" "r"))))]
2319 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2322 /* Reload can create backwards (relative to cse) unscaled index
2323 address modes when eliminating registers and possibly for
2324 pseudos that don't get hard registers. Deal with it. */
2325 if (operands[1] == hard_frame_pointer_rtx
2326 || operands[1] == stack_pointer_rtx)
2327 return \"flddx %2(%1),%0\";
2329 return \"flddx %1(%2),%0\";
2331 [(set_attr "type" "fpload")
2332 (set_attr "length" "4")])
2335 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2336 (match_operand:SI 2 "register_operand" "r")))
2337 (match_operand:DF 0 "register_operand" "fx"))]
2338 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2341 /* Reload can create backwards (relative to cse) unscaled index
2342 address modes when eliminating registers and possibly for
2343 pseudos that don't get hard registers. Deal with it. */
2344 if (operands[2] == hard_frame_pointer_rtx
2345 || operands[2] == stack_pointer_rtx)
2346 return \"fstdx %0,%1(%2)\";
2348 return \"fstdx %0,%2(%1)\";
2350 [(set_attr "type" "fpstore")
2351 (set_attr "length" "4")])
2354 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2355 (match_operand:SI 2 "basereg_operand" "r")))
2356 (match_operand:DF 0 "register_operand" "fx"))]
2357 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2360 /* Reload can create backwards (relative to cse) unscaled index
2361 address modes when eliminating registers and possibly for
2362 pseudos that don't get hard registers. Deal with it. */
2363 if (operands[1] == hard_frame_pointer_rtx
2364 || operands[1] == stack_pointer_rtx)
2365 return \"fstdx %0,%2(%1)\";
2367 return \"fstdx %0,%1(%2)\";
2369 [(set_attr "type" "fpstore")
2370 (set_attr "length" "4")])
2372 (define_expand "movdi"
2373 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2374 (match_operand:DI 1 "general_operand" ""))]
2378 if (emit_move_sequence (operands, DImode, 0))
2382 (define_expand "reload_indi"
2383 [(set (match_operand:DI 0 "register_operand" "=f")
2384 (match_operand:DI 1 "non_hard_reg_operand" ""))
2385 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2389 if (emit_move_sequence (operands, DImode, operands[2]))
2392 /* We don't want the clobber emitted, so handle this ourselves. */
2393 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2397 (define_expand "reload_outdi"
2398 [(set (match_operand:DI 0 "general_operand" "")
2399 (match_operand:DI 1 "register_operand" "f"))
2400 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2404 if (emit_move_sequence (operands, DImode, operands[2]))
2407 /* We don't want the clobber emitted, so handle this ourselves. */
2408 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2413 [(set (match_operand:DI 0 "register_operand" "=r")
2414 (high:DI (match_operand 1 "" "")))]
2418 rtx op0 = operands[0];
2419 rtx op1 = operands[1];
2421 if (GET_CODE (op1) == CONST_INT)
2423 operands[0] = operand_subword (op0, 1, 0, DImode);
2424 output_asm_insn (\"ldil L'%1,%0\", operands);
2426 operands[0] = operand_subword (op0, 0, 0, DImode);
2427 if (INTVAL (op1) < 0)
2428 output_asm_insn (\"ldi -1,%0\", operands);
2430 output_asm_insn (\"ldi 0,%0\", operands);
2433 else if (GET_CODE (op1) == CONST_DOUBLE)
2435 operands[0] = operand_subword (op0, 1, 0, DImode);
2436 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2437 output_asm_insn (\"ldil L'%1,%0\", operands);
2439 operands[0] = operand_subword (op0, 0, 0, DImode);
2440 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2441 output_asm_insn (singlemove_string (operands), operands);
2447 [(set_attr "type" "move")
2448 (set_attr "length" "8")])
2453 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2454 "=r,o,Q,r,r,r,f,f,*TR")
2455 (match_operand:DI 1 "general_operand"
2456 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2457 "(register_operand (operands[0], DImode)
2458 || reg_or_0_operand (operands[1], DImode))
2459 && ! TARGET_SOFT_FLOAT"
2462 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2463 || (operands[1] == CONST0_RTX (DImode)))
2464 return output_fp_move_double (operands);
2465 return output_move_double (operands);
2467 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2468 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2471 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2473 (match_operand:DI 1 "general_operand"
2475 "(register_operand (operands[0], DImode)
2476 || reg_or_0_operand (operands[1], DImode))
2477 && TARGET_SOFT_FLOAT"
2480 return output_move_double (operands);
2482 [(set_attr "type" "move,store,store,load,load,multi")
2483 (set_attr "length" "8,8,16,8,16,16")])
2486 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2487 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2488 (match_operand:DI 2 "immediate_operand" "i,i")))]
2492 /* Don't output a 64 bit constant, since we can't trust the assembler to
2493 handle it correctly. */
2494 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2495 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2496 if (which_alternative == 1)
2497 output_asm_insn (\"copy %1,%0\", operands);
2498 return \"ldo R'%G2(%R1),%R0\";
2500 [(set_attr "type" "move,move")
2501 (set_attr "length" "4,8")])
2503 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2504 ;; to be reloaded by putting the constant into memory when
2505 ;; reg is a floating point register.
2507 ;; For integer registers we use ldil;ldo to set the appropriate
2510 ;; This must come before the movsf pattern, and it must be present
2511 ;; to handle obscure reloading cases.
2513 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2514 (match_operand:SF 1 "" "?F,m"))]
2515 "GET_CODE (operands[1]) == CONST_DOUBLE
2516 && operands[1] != CONST0_RTX (SFmode)
2517 && ! TARGET_SOFT_FLOAT"
2518 "* return (which_alternative == 0 ? singlemove_string (operands)
2519 : \" fldw%F1 %1,%0\");"
2520 [(set_attr "type" "move,fpload")
2521 (set_attr "length" "8,4")])
2523 (define_expand "movsf"
2524 [(set (match_operand:SF 0 "general_operand" "")
2525 (match_operand:SF 1 "general_operand" ""))]
2529 if (emit_move_sequence (operands, SFmode, 0))
2533 ;; Reloading an SImode or DImode value requires a scratch register if
2534 ;; going in to or out of float point registers.
2536 (define_expand "reload_insf"
2537 [(set (match_operand:SF 0 "register_operand" "=Z")
2538 (match_operand:SF 1 "non_hard_reg_operand" ""))
2539 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2543 if (emit_move_sequence (operands, SFmode, operands[2]))
2546 /* We don't want the clobber emitted, so handle this ourselves. */
2547 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2551 (define_expand "reload_outsf"
2552 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2553 (match_operand:SF 1 "register_operand" "Z"))
2554 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2558 if (emit_move_sequence (operands, SFmode, operands[2]))
2561 /* We don't want the clobber emitted, so handle this ourselves. */
2562 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2567 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2569 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2570 "fG,rG,RQ,RQ,f,rG"))]
2571 "(register_operand (operands[0], SFmode)
2572 || reg_or_0_operand (operands[1], SFmode))
2573 && ! TARGET_SOFT_FLOAT"
2581 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2582 (set_attr "pa_combine_type" "addmove")
2583 (set_attr "length" "4,4,4,4,4,4")])
2586 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2588 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2590 "(register_operand (operands[0], SFmode)
2591 || reg_or_0_operand (operands[1], SFmode))
2592 && TARGET_SOFT_FLOAT"
2597 [(set_attr "type" "move,load,store")
2598 (set_attr "pa_combine_type" "addmove")
2599 (set_attr "length" "4,4,4")])
2602 [(set (match_operand:SF 0 "register_operand" "=fx")
2603 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2604 (match_operand:SI 2 "register_operand" "r"))))]
2605 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2608 /* Reload can create backwards (relative to cse) unscaled index
2609 address modes when eliminating registers and possibly for
2610 pseudos that don't get hard registers. Deal with it. */
2611 if (operands[2] == hard_frame_pointer_rtx
2612 || operands[2] == stack_pointer_rtx)
2613 return \"fldwx %1(%2),%0\";
2615 return \"fldwx %2(%1),%0\";
2617 [(set_attr "type" "fpload")
2618 (set_attr "length" "4")])
2621 [(set (match_operand:SF 0 "register_operand" "=fx")
2622 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2623 (match_operand:SI 2 "basereg_operand" "r"))))]
2624 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2627 /* Reload can create backwards (relative to cse) unscaled index
2628 address modes when eliminating registers and possibly for
2629 pseudos that don't get hard registers. Deal with it. */
2630 if (operands[1] == hard_frame_pointer_rtx
2631 || operands[1] == stack_pointer_rtx)
2632 return \"fldwx %2(%1),%0\";
2634 return \"fldwx %1(%2),%0\";
2636 [(set_attr "type" "fpload")
2637 (set_attr "length" "4")])
2640 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2641 (match_operand:SI 2 "register_operand" "r")))
2642 (match_operand:SF 0 "register_operand" "fx"))]
2643 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2646 /* Reload can create backwards (relative to cse) unscaled index
2647 address modes when eliminating registers and possibly for
2648 pseudos that don't get hard registers. Deal with it. */
2649 if (operands[2] == hard_frame_pointer_rtx
2650 || operands[2] == stack_pointer_rtx)
2651 return \"fstwx %0,%1(%2)\";
2653 return \"fstwx %0,%2(%1)\";
2655 [(set_attr "type" "fpstore")
2656 (set_attr "length" "4")])
2659 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2660 (match_operand:SI 2 "basereg_operand" "r")))
2661 (match_operand:SF 0 "register_operand" "fx"))]
2662 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2665 /* Reload can create backwards (relative to cse) unscaled index
2666 address modes when eliminating registers and possibly for
2667 pseudos that don't get hard registers. Deal with it. */
2668 if (operands[1] == hard_frame_pointer_rtx
2669 || operands[1] == stack_pointer_rtx)
2670 return \"fstwx %0,%2(%1)\";
2672 return \"fstwx %0,%1(%2)\";
2674 [(set_attr "type" "fpstore")
2675 (set_attr "length" "4")])
2678 ;;- zero extension instructions
2679 ;; We have define_expand for zero extension patterns to make sure the
2680 ;; operands get loaded into registers. The define_insns accept
2681 ;; memory operands. This gives us better overall code than just
2682 ;; having a pattern that does or does not accept memory operands.
2684 (define_expand "zero_extendhisi2"
2685 [(set (match_operand:SI 0 "register_operand" "")
2687 (match_operand:HI 1 "register_operand" "")))]
2692 [(set (match_operand:SI 0 "register_operand" "=r,r")
2694 (match_operand:HI 1 "move_operand" "r,RQ")))]
2695 "GET_CODE (operands[1]) != CONST_INT"
2699 [(set_attr "type" "shift,load")
2700 (set_attr "length" "4,4")])
2702 (define_expand "zero_extendqihi2"
2703 [(set (match_operand:HI 0 "register_operand" "")
2705 (match_operand:QI 1 "register_operand" "")))]
2710 [(set (match_operand:HI 0 "register_operand" "=r,r")
2712 (match_operand:QI 1 "move_operand" "r,RQ")))]
2713 "GET_CODE (operands[1]) != CONST_INT"
2717 [(set_attr "type" "shift,load")
2718 (set_attr "length" "4,4")])
2720 (define_expand "zero_extendqisi2"
2721 [(set (match_operand:SI 0 "register_operand" "")
2723 (match_operand:QI 1 "register_operand" "")))]
2728 [(set (match_operand:SI 0 "register_operand" "=r,r")
2730 (match_operand:QI 1 "move_operand" "r,RQ")))]
2731 "GET_CODE (operands[1]) != CONST_INT"
2735 [(set_attr "type" "shift,load")
2736 (set_attr "length" "4,4")])
2738 ;;- sign extension instructions
2740 (define_insn "extendhisi2"
2741 [(set (match_operand:SI 0 "register_operand" "=r")
2742 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2745 [(set_attr "type" "shift")
2746 (set_attr "length" "4")])
2748 (define_insn "extendqihi2"
2749 [(set (match_operand:HI 0 "register_operand" "=r")
2750 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2753 [(set_attr "type" "shift")
2754 (set_attr "length" "4")])
2756 (define_insn "extendqisi2"
2757 [(set (match_operand:SI 0 "register_operand" "=r")
2758 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2761 [(set_attr "type" "shift")
2762 (set_attr "length" "4")])
2764 ;; Conversions between float and double.
2766 (define_insn "extendsfdf2"
2767 [(set (match_operand:DF 0 "register_operand" "=f")
2769 (match_operand:SF 1 "register_operand" "f")))]
2770 "! TARGET_SOFT_FLOAT"
2771 "fcnvff,sgl,dbl %1,%0"
2772 [(set_attr "type" "fpalu")
2773 (set_attr "length" "4")])
2775 (define_insn "truncdfsf2"
2776 [(set (match_operand:SF 0 "register_operand" "=f")
2778 (match_operand:DF 1 "register_operand" "f")))]
2779 "! TARGET_SOFT_FLOAT"
2780 "fcnvff,dbl,sgl %1,%0"
2781 [(set_attr "type" "fpalu")
2782 (set_attr "length" "4")])
2784 ;; Conversion between fixed point and floating point.
2785 ;; Note that among the fix-to-float insns
2786 ;; the ones that start with SImode come first.
2787 ;; That is so that an operand that is a CONST_INT
2788 ;; (and therefore lacks a specific machine mode).
2789 ;; will be recognized as SImode (which is always valid)
2790 ;; rather than as QImode or HImode.
2792 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2793 ;; to be reloaded by putting the constant into memory.
2794 ;; It must come before the more general floatsisf2 pattern.
2796 [(set (match_operand:SF 0 "register_operand" "=f")
2797 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2798 "! TARGET_SOFT_FLOAT"
2799 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2800 [(set_attr "type" "fpalu")
2801 (set_attr "length" "8")])
2803 (define_insn "floatsisf2"
2804 [(set (match_operand:SF 0 "register_operand" "=f")
2805 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2806 "! TARGET_SOFT_FLOAT"
2807 "fcnvxf,sgl,sgl %1,%0"
2808 [(set_attr "type" "fpalu")
2809 (set_attr "length" "4")])
2811 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2812 ;; to be reloaded by putting the constant into memory.
2813 ;; It must come before the more general floatsidf2 pattern.
2815 [(set (match_operand:DF 0 "register_operand" "=f")
2816 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2817 "! TARGET_SOFT_FLOAT"
2818 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2819 [(set_attr "type" "fpalu")
2820 (set_attr "length" "8")])
2822 (define_insn "floatsidf2"
2823 [(set (match_operand:DF 0 "register_operand" "=f")
2824 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2825 "! TARGET_SOFT_FLOAT"
2826 "fcnvxf,sgl,dbl %1,%0"
2827 [(set_attr "type" "fpalu")
2828 (set_attr "length" "4")])
2830 (define_expand "floatunssisf2"
2831 [(set (subreg:SI (match_dup 2) 1)
2832 (match_operand:SI 1 "register_operand" ""))
2833 (set (subreg:SI (match_dup 2) 0)
2835 (set (match_operand:SF 0 "register_operand" "")
2836 (float:SF (match_dup 2)))]
2837 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2838 "operands[2] = gen_reg_rtx (DImode);")
2840 (define_expand "floatunssidf2"
2841 [(set (subreg:SI (match_dup 2) 1)
2842 (match_operand:SI 1 "register_operand" ""))
2843 (set (subreg:SI (match_dup 2) 0)
2845 (set (match_operand:DF 0 "register_operand" "")
2846 (float:DF (match_dup 2)))]
2847 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2848 "operands[2] = gen_reg_rtx (DImode);")
2850 (define_insn "floatdisf2"
2851 [(set (match_operand:SF 0 "register_operand" "=f")
2852 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2853 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2854 "fcnvxf,dbl,sgl %1,%0"
2855 [(set_attr "type" "fpalu")
2856 (set_attr "length" "4")])
2858 (define_insn "floatdidf2"
2859 [(set (match_operand:DF 0 "register_operand" "=f")
2860 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2861 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2862 "fcnvxf,dbl,dbl %1,%0"
2863 [(set_attr "type" "fpalu")
2864 (set_attr "length" "4")])
2866 ;; Convert a float to an actual integer.
2867 ;; Truncation is performed as part of the conversion.
2869 (define_insn "fix_truncsfsi2"
2870 [(set (match_operand:SI 0 "register_operand" "=f")
2871 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2872 "! TARGET_SOFT_FLOAT"
2873 "fcnvfxt,sgl,sgl %1,%0"
2874 [(set_attr "type" "fpalu")
2875 (set_attr "length" "4")])
2877 (define_insn "fix_truncdfsi2"
2878 [(set (match_operand:SI 0 "register_operand" "=f")
2879 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2880 "! TARGET_SOFT_FLOAT"
2881 "fcnvfxt,dbl,sgl %1,%0"
2882 [(set_attr "type" "fpalu")
2883 (set_attr "length" "4")])
2885 (define_insn "fix_truncsfdi2"
2886 [(set (match_operand:DI 0 "register_operand" "=f")
2887 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2888 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2889 "fcnvfxt,sgl,dbl %1,%0"
2890 [(set_attr "type" "fpalu")
2891 (set_attr "length" "4")])
2893 (define_insn "fix_truncdfdi2"
2894 [(set (match_operand:DI 0 "register_operand" "=f")
2895 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2896 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2897 "fcnvfxt,dbl,dbl %1,%0"
2898 [(set_attr "type" "fpalu")
2899 (set_attr "length" "4")])
2901 ;;- arithmetic instructions
2903 (define_insn "adddi3"
2904 [(set (match_operand:DI 0 "register_operand" "=r")
2905 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2906 (match_operand:DI 2 "arith11_operand" "rI")))]
2910 if (GET_CODE (operands[2]) == CONST_INT)
2912 if (INTVAL (operands[2]) >= 0)
2913 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2915 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2918 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2920 [(set_attr "type" "binary")
2921 (set_attr "length" "8")])
2924 [(set (match_operand:SI 0 "register_operand" "=r")
2925 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2926 (match_operand:SI 2 "register_operand" "r")))]
2929 [(set_attr "type" "binary")
2930 (set_attr "length" "4")])
2932 ;; define_splits to optimize cases of adding a constant integer
2933 ;; to a register when the constant does not fit in 14 bits. */
2935 [(set (match_operand:SI 0 "register_operand" "")
2936 (plus:SI (match_operand:SI 1 "register_operand" "")
2937 (match_operand:SI 2 "const_int_operand" "")))
2938 (clobber (match_operand:SI 4 "register_operand" ""))]
2939 "! cint_ok_for_move (INTVAL (operands[2]))
2940 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2941 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2942 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2945 int val = INTVAL (operands[2]);
2946 int low = (val < 0) ? -0x2000 : 0x1fff;
2947 int rest = val - low;
2949 operands[2] = GEN_INT (rest);
2950 operands[3] = GEN_INT (low);
2954 [(set (match_operand:SI 0 "register_operand" "")
2955 (plus:SI (match_operand:SI 1 "register_operand" "")
2956 (match_operand:SI 2 "const_int_operand" "")))
2957 (clobber (match_operand:SI 4 "register_operand" ""))]
2958 "! cint_ok_for_move (INTVAL (operands[2]))"
2959 [(set (match_dup 4) (match_dup 2))
2960 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2964 HOST_WIDE_INT intval = INTVAL (operands[2]);
2966 /* Try dividing the constant by 2, then 4, and finally 8 to see
2967 if we can get a constant which can be loaded into a register
2968 in a single instruction (cint_ok_for_move).
2970 If that fails, try to negate the constant and subtract it
2971 from our input operand. */
2972 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
2974 operands[2] = GEN_INT (intval / 2);
2975 operands[3] = GEN_INT (2);
2977 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
2979 operands[2] = GEN_INT (intval / 4);
2980 operands[3] = GEN_INT (4);
2982 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
2984 operands[2] = GEN_INT (intval / 8);
2985 operands[3] = GEN_INT (8);
2987 else if (cint_ok_for_move (-intval))
2989 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
2990 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
2997 (define_insn "addsi3"
2998 [(set (match_operand:SI 0 "register_operand" "=r,r")
2999 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3000 (match_operand:SI 2 "arith_operand" "r,J")))]
3005 [(set_attr "type" "binary,binary")
3006 (set_attr "pa_combine_type" "addmove")
3007 (set_attr "length" "4,4")])
3009 ;; Disgusting kludge to work around reload bugs with frame pointer
3010 ;; elimination. Similar to other magic reload patterns in the
3011 ;; indexed memory operations.
3013 [(set (match_operand:SI 0 "register_operand" "=&r")
3014 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3015 (match_operand:SI 2 "register_operand" "r"))
3016 (match_operand:SI 3 "const_int_operand" "rL")))]
3017 "reload_in_progress"
3020 if (GET_CODE (operands[3]) == CONST_INT)
3021 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3023 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3025 [(set_attr "type" "binary")
3026 (set_attr "length" "8")])
3028 (define_insn "subdi3"
3029 [(set (match_operand:DI 0 "register_operand" "=r")
3030 (minus:DI (match_operand:DI 1 "register_operand" "r")
3031 (match_operand:DI 2 "register_operand" "r")))]
3033 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3034 [(set_attr "type" "binary")
3035 (set_attr "length" "8")])
3037 (define_insn "subsi3"
3038 [(set (match_operand:SI 0 "register_operand" "=r,r")
3039 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3040 (match_operand:SI 2 "register_operand" "r,r")))]
3045 [(set_attr "type" "binary,binary")
3046 (set_attr "length" "4,4")])
3048 ;; Clobbering a "register_operand" instead of a match_scratch
3049 ;; in operand3 of millicode calls avoids spilling %r1 and
3050 ;; produces better code.
3052 ;; The mulsi3 insns set up registers for the millicode call.
3053 (define_expand "mulsi3"
3054 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3055 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3056 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3057 (clobber (match_dup 3))
3058 (clobber (reg:SI 26))
3059 (clobber (reg:SI 25))
3060 (clobber (reg:SI 31))])
3061 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3065 if (TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3067 rtx scratch = gen_reg_rtx (DImode);
3068 operands[1] = force_reg (SImode, operands[1]);
3069 operands[2] = force_reg (SImode, operands[2]);
3070 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3071 emit_insn (gen_rtx_SET (VOIDmode,
3073 gen_rtx_SUBREG (SImode, scratch, 1)));
3076 operands[3] = gen_reg_rtx (SImode);
3079 (define_insn "umulsidi3"
3080 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3081 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3082 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3083 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3085 [(set_attr "type" "fpmuldbl")
3086 (set_attr "length" "4")])
3089 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3090 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3091 (match_operand:DI 2 "uint32_operand" "f")))]
3092 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3094 [(set_attr "type" "fpmuldbl")
3095 (set_attr "length" "4")])
3098 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3099 (clobber (match_operand:SI 0 "register_operand" "=a"))
3100 (clobber (reg:SI 26))
3101 (clobber (reg:SI 25))
3102 (clobber (reg:SI 31))]
3104 "* return output_mul_insn (0, insn);"
3105 [(set_attr "type" "milli")
3106 (set (attr "length")
3108 ;; Target (or stub) within reach
3109 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3116 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3120 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3121 ;; same as NO_SPACE_REGS code
3122 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3124 (eq (symbol_ref "flag_pic")
3128 ;; Out of range and either PIC or PORTABLE_RUNTIME
3131 ;;; Division and mod.
3132 (define_expand "divsi3"
3133 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3134 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3135 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3136 (clobber (match_dup 3))
3137 (clobber (reg:SI 26))
3138 (clobber (reg:SI 25))
3139 (clobber (reg:SI 31))])
3140 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3144 operands[3] = gen_reg_rtx (SImode);
3145 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3151 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3152 (clobber (match_operand:SI 1 "register_operand" "=a"))
3153 (clobber (reg:SI 26))
3154 (clobber (reg:SI 25))
3155 (clobber (reg:SI 31))]
3158 return output_div_insn (operands, 0, insn);"
3159 [(set_attr "type" "milli")
3160 (set (attr "length")
3162 ;; Target (or stub) within reach
3163 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3165 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3170 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3174 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3175 ;; same as NO_SPACE_REGS code
3176 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3178 (eq (symbol_ref "flag_pic")
3182 ;; Out of range and either PIC or PORTABLE_RUNTIME
3185 (define_expand "udivsi3"
3186 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3187 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3188 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3189 (clobber (match_dup 3))
3190 (clobber (reg:SI 26))
3191 (clobber (reg:SI 25))
3192 (clobber (reg:SI 31))])
3193 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3197 operands[3] = gen_reg_rtx (SImode);
3198 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3204 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3205 (clobber (match_operand:SI 1 "register_operand" "=a"))
3206 (clobber (reg:SI 26))
3207 (clobber (reg:SI 25))
3208 (clobber (reg:SI 31))]
3211 return output_div_insn (operands, 1, insn);"
3212 [(set_attr "type" "milli")
3213 (set (attr "length")
3215 ;; Target (or stub) within reach
3216 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3218 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3223 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3227 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3228 ;; same as NO_SPACE_REGS code
3229 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3231 (eq (symbol_ref "flag_pic")
3235 ;; Out of range and either PIC or PORTABLE_RUNTIME
3238 (define_expand "modsi3"
3239 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3240 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3241 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3242 (clobber (match_dup 3))
3243 (clobber (reg:SI 26))
3244 (clobber (reg:SI 25))
3245 (clobber (reg:SI 31))])
3246 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3250 operands[3] = gen_reg_rtx (SImode);
3254 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3255 (clobber (match_operand:SI 0 "register_operand" "=a"))
3256 (clobber (reg:SI 26))
3257 (clobber (reg:SI 25))
3258 (clobber (reg:SI 31))]
3261 return output_mod_insn (0, insn);"
3262 [(set_attr "type" "milli")
3263 (set (attr "length")
3265 ;; Target (or stub) within reach
3266 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3268 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3273 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3277 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3278 ;; same as NO_SPACE_REGS code
3279 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3281 (eq (symbol_ref "flag_pic")
3285 ;; Out of range and either PIC or PORTABLE_RUNTIME
3288 (define_expand "umodsi3"
3289 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3290 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3291 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3292 (clobber (match_dup 3))
3293 (clobber (reg:SI 26))
3294 (clobber (reg:SI 25))
3295 (clobber (reg:SI 31))])
3296 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3300 operands[3] = gen_reg_rtx (SImode);
3304 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3305 (clobber (match_operand:SI 0 "register_operand" "=a"))
3306 (clobber (reg:SI 26))
3307 (clobber (reg:SI 25))
3308 (clobber (reg:SI 31))]
3311 return output_mod_insn (1, insn);"
3312 [(set_attr "type" "milli")
3313 (set (attr "length")
3315 ;; Target (or stub) within reach
3316 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3318 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3323 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3327 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3328 ;; same as NO_SPACE_REGS code
3329 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3331 (eq (symbol_ref "flag_pic")
3335 ;; Out of range and either PIC or PORTABLE_RUNTIME
3338 ;;- and instructions
3339 ;; We define DImode `and` so with DImode `not` we can get
3340 ;; DImode `andn`. Other combinations are possible.
3342 (define_expand "anddi3"
3343 [(set (match_operand:DI 0 "register_operand" "")
3344 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3345 (match_operand:DI 2 "arith_double_operand" "")))]
3349 if (! register_operand (operands[1], DImode)
3350 || ! register_operand (operands[2], DImode))
3351 /* Let GCC break this into word-at-a-time operations. */
3356 [(set (match_operand:DI 0 "register_operand" "=r")
3357 (and:DI (match_operand:DI 1 "register_operand" "%r")
3358 (match_operand:DI 2 "register_operand" "r")))]
3360 "and %1,%2,%0\;and %R1,%R2,%R0"
3361 [(set_attr "type" "binary")
3362 (set_attr "length" "8")])
3364 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3365 ; constant with ldil;ldo.
3366 (define_insn "andsi3"
3367 [(set (match_operand:SI 0 "register_operand" "=r,r")
3368 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3369 (match_operand:SI 2 "and_operand" "rO,P")))]
3371 "* return output_and (operands); "
3372 [(set_attr "type" "binary,shift")
3373 (set_attr "length" "4,4")])
3376 [(set (match_operand:DI 0 "register_operand" "=r")
3377 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3378 (match_operand:DI 2 "register_operand" "r")))]
3380 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3381 [(set_attr "type" "binary")
3382 (set_attr "length" "8")])
3385 [(set (match_operand:SI 0 "register_operand" "=r")
3386 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3387 (match_operand:SI 2 "register_operand" "r")))]
3390 [(set_attr "type" "binary")
3391 (set_attr "length" "4")])
3393 (define_expand "iordi3"
3394 [(set (match_operand:DI 0 "register_operand" "")
3395 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3396 (match_operand:DI 2 "arith_double_operand" "")))]
3400 if (! register_operand (operands[1], DImode)
3401 || ! register_operand (operands[2], DImode))
3402 /* Let GCC break this into word-at-a-time operations. */
3407 [(set (match_operand:DI 0 "register_operand" "=r")
3408 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3409 (match_operand:DI 2 "register_operand" "r")))]
3411 "or %1,%2,%0\;or %R1,%R2,%R0"
3412 [(set_attr "type" "binary")
3413 (set_attr "length" "8")])
3415 ;; Need a define_expand because we've run out of CONST_OK... characters.
3416 (define_expand "iorsi3"
3417 [(set (match_operand:SI 0 "register_operand" "")
3418 (ior:SI (match_operand:SI 1 "register_operand" "")
3419 (match_operand:SI 2 "arith32_operand" "")))]
3423 if (! (ior_operand (operands[2], SImode)
3424 || register_operand (operands[2], SImode)))
3425 operands[2] = force_reg (SImode, operands[2]);
3429 [(set (match_operand:SI 0 "register_operand" "=r,r")
3430 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3431 (match_operand:SI 2 "ior_operand" "M,i")))]
3433 "* return output_ior (operands); "
3434 [(set_attr "type" "binary,shift")
3435 (set_attr "length" "4,4")])
3438 [(set (match_operand:SI 0 "register_operand" "=r")
3439 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3440 (match_operand:SI 2 "register_operand" "r")))]
3443 [(set_attr "type" "binary")
3444 (set_attr "length" "4")])
3446 (define_expand "xordi3"
3447 [(set (match_operand:DI 0 "register_operand" "")
3448 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3449 (match_operand:DI 2 "arith_double_operand" "")))]
3453 if (! register_operand (operands[1], DImode)
3454 || ! register_operand (operands[2], DImode))
3455 /* Let GCC break this into word-at-a-time operations. */
3460 [(set (match_operand:DI 0 "register_operand" "=r")
3461 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3462 (match_operand:DI 2 "register_operand" "r")))]
3464 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3465 [(set_attr "type" "binary")
3466 (set_attr "length" "8")])
3468 (define_insn "xorsi3"
3469 [(set (match_operand:SI 0 "register_operand" "=r")
3470 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3471 (match_operand:SI 2 "register_operand" "r")))]
3474 [(set_attr "type" "binary")
3475 (set_attr "length" "4")])
3477 (define_insn "negdi2"
3478 [(set (match_operand:DI 0 "register_operand" "=r")
3479 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3481 "sub 0,%R1,%R0\;subb 0,%1,%0"
3482 [(set_attr "type" "unary")
3483 (set_attr "length" "8")])
3485 (define_insn "negsi2"
3486 [(set (match_operand:SI 0 "register_operand" "=r")
3487 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3490 [(set_attr "type" "unary")
3491 (set_attr "length" "4")])
3493 (define_expand "one_cmpldi2"
3494 [(set (match_operand:DI 0 "register_operand" "")
3495 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3499 if (! register_operand (operands[1], DImode))
3504 [(set (match_operand:DI 0 "register_operand" "=r")
3505 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3507 "uaddcm 0,%1,%0\;uaddcm 0,%R1,%R0"
3508 [(set_attr "type" "unary")
3509 (set_attr "length" "8")])
3511 (define_insn "one_cmplsi2"
3512 [(set (match_operand:SI 0 "register_operand" "=r")
3513 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3516 [(set_attr "type" "unary")
3517 (set_attr "length" "4")])
3519 ;; Floating point arithmetic instructions.
3521 (define_insn "adddf3"
3522 [(set (match_operand:DF 0 "register_operand" "=f")
3523 (plus:DF (match_operand:DF 1 "register_operand" "f")
3524 (match_operand:DF 2 "register_operand" "f")))]
3525 "! TARGET_SOFT_FLOAT"
3527 [(set_attr "type" "fpalu")
3528 (set_attr "pa_combine_type" "faddsub")
3529 (set_attr "length" "4")])
3531 (define_insn "addsf3"
3532 [(set (match_operand:SF 0 "register_operand" "=f")
3533 (plus:SF (match_operand:SF 1 "register_operand" "f")
3534 (match_operand:SF 2 "register_operand" "f")))]
3535 "! TARGET_SOFT_FLOAT"
3537 [(set_attr "type" "fpalu")
3538 (set_attr "pa_combine_type" "faddsub")
3539 (set_attr "length" "4")])
3541 (define_insn "subdf3"
3542 [(set (match_operand:DF 0 "register_operand" "=f")
3543 (minus:DF (match_operand:DF 1 "register_operand" "f")
3544 (match_operand:DF 2 "register_operand" "f")))]
3545 "! TARGET_SOFT_FLOAT"
3547 [(set_attr "type" "fpalu")
3548 (set_attr "pa_combine_type" "faddsub")
3549 (set_attr "length" "4")])
3551 (define_insn "subsf3"
3552 [(set (match_operand:SF 0 "register_operand" "=f")
3553 (minus:SF (match_operand:SF 1 "register_operand" "f")
3554 (match_operand:SF 2 "register_operand" "f")))]
3555 "! TARGET_SOFT_FLOAT"
3557 [(set_attr "type" "fpalu")
3558 (set_attr "pa_combine_type" "faddsub")
3559 (set_attr "length" "4")])
3561 (define_insn "muldf3"
3562 [(set (match_operand:DF 0 "register_operand" "=f")
3563 (mult:DF (match_operand:DF 1 "register_operand" "f")
3564 (match_operand:DF 2 "register_operand" "f")))]
3565 "! TARGET_SOFT_FLOAT"
3567 [(set_attr "type" "fpmuldbl")
3568 (set_attr "pa_combine_type" "fmpy")
3569 (set_attr "length" "4")])
3571 (define_insn "mulsf3"
3572 [(set (match_operand:SF 0 "register_operand" "=f")
3573 (mult:SF (match_operand:SF 1 "register_operand" "f")
3574 (match_operand:SF 2 "register_operand" "f")))]
3575 "! TARGET_SOFT_FLOAT"
3577 [(set_attr "type" "fpmulsgl")
3578 (set_attr "pa_combine_type" "fmpy")
3579 (set_attr "length" "4")])
3581 (define_insn "divdf3"
3582 [(set (match_operand:DF 0 "register_operand" "=f")
3583 (div:DF (match_operand:DF 1 "register_operand" "f")
3584 (match_operand:DF 2 "register_operand" "f")))]
3585 "! TARGET_SOFT_FLOAT"
3587 [(set_attr "type" "fpdivdbl")
3588 (set_attr "length" "4")])
3590 (define_insn "divsf3"
3591 [(set (match_operand:SF 0 "register_operand" "=f")
3592 (div:SF (match_operand:SF 1 "register_operand" "f")
3593 (match_operand:SF 2 "register_operand" "f")))]
3594 "! TARGET_SOFT_FLOAT"
3596 [(set_attr "type" "fpdivsgl")
3597 (set_attr "length" "4")])
3599 (define_insn "negdf2"
3600 [(set (match_operand:DF 0 "register_operand" "=f")
3601 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3602 "! TARGET_SOFT_FLOAT"
3604 [(set_attr "type" "fpalu")
3605 (set_attr "length" "4")])
3607 (define_insn "negsf2"
3608 [(set (match_operand:SF 0 "register_operand" "=f")
3609 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3610 "! TARGET_SOFT_FLOAT"
3612 [(set_attr "type" "fpalu")
3613 (set_attr "length" "4")])
3615 (define_insn "absdf2"
3616 [(set (match_operand:DF 0 "register_operand" "=f")
3617 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3618 "! TARGET_SOFT_FLOAT"
3620 [(set_attr "type" "fpalu")
3621 (set_attr "length" "4")])
3623 (define_insn "abssf2"
3624 [(set (match_operand:SF 0 "register_operand" "=f")
3625 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3626 "! TARGET_SOFT_FLOAT"
3628 [(set_attr "type" "fpalu")
3629 (set_attr "length" "4")])
3631 (define_insn "sqrtdf2"
3632 [(set (match_operand:DF 0 "register_operand" "=f")
3633 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3634 "! TARGET_SOFT_FLOAT"
3636 [(set_attr "type" "fpsqrtdbl")
3637 (set_attr "length" "4")])
3639 (define_insn "sqrtsf2"
3640 [(set (match_operand:SF 0 "register_operand" "=f")
3641 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3642 "! TARGET_SOFT_FLOAT"
3644 [(set_attr "type" "fpsqrtsgl")
3645 (set_attr "length" "4")])
3647 ;;- Shift instructions
3649 ;; Optimized special case of shifting.
3652 [(set (match_operand:SI 0 "register_operand" "=r")
3653 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3657 [(set_attr "type" "load")
3658 (set_attr "length" "4")])
3661 [(set (match_operand:SI 0 "register_operand" "=r")
3662 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3666 [(set_attr "type" "load")
3667 (set_attr "length" "4")])
3670 [(set (match_operand:SI 0 "register_operand" "=r")
3671 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3672 (match_operand:SI 3 "shadd_operand" ""))
3673 (match_operand:SI 1 "register_operand" "r")))]
3675 "sh%O3addl %2,%1,%0"
3676 [(set_attr "type" "binary")
3677 (set_attr "length" "4")])
3679 ;; This variant of the above insn can occur if the first operand
3680 ;; is the frame pointer. This is a kludge, but there doesn't
3681 ;; seem to be a way around it. Only recognize it while reloading.
3682 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3683 ;; has constraints allowing a register. I don't know how this works,
3684 ;; but it somehow makes sure that out-of-range constants are placed
3685 ;; in a register which somehow magically is a "const_int_operand".
3686 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3689 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3690 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3691 (match_operand:SI 4 "shadd_operand" ""))
3692 (match_operand:SI 1 "register_operand" "r,r"))
3693 (match_operand:SI 3 "const_int_operand" "r,J")))]
3694 "reload_in_progress"
3696 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3697 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3698 [(set_attr "type" "multi")
3699 (set_attr "length" "8")])
3701 ;; This anonymous pattern and splitter wins because it reduces the latency
3702 ;; of the shadd sequence without increasing the latency of the shift.
3704 ;; We want to make sure and split up the operations for the scheduler since
3705 ;; these instructions can (and should) schedule independently.
3707 ;; It would be clearer if combine used the same operator for both expressions,
3708 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3711 ;; If this pattern is not split before register allocation, then we must expose
3712 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3714 [(set (match_operand:SI 0 "register_operand" "=r")
3715 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3716 (match_operand:SI 3 "shadd_operand" ""))
3717 (match_operand:SI 1 "register_operand" "r")))
3718 (set (match_operand:SI 4 "register_operand" "=&r")
3719 (ashift:SI (match_dup 2)
3720 (match_operand:SI 5 "const_int_operand" "i")))]
3721 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3723 [(set_attr "type" "binary")
3724 (set_attr "length" "8")])
3727 [(set (match_operand:SI 0 "register_operand" "=r")
3728 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3729 (match_operand:SI 3 "shadd_operand" ""))
3730 (match_operand:SI 1 "register_operand" "r")))
3731 (set (match_operand:SI 4 "register_operand" "=&r")
3732 (ashift:SI (match_dup 2)
3733 (match_operand:SI 5 "const_int_operand" "i")))]
3734 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3735 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3736 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3740 (define_expand "ashlsi3"
3741 [(set (match_operand:SI 0 "register_operand" "")
3742 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3743 (match_operand:SI 2 "arith32_operand" "")))]
3747 if (GET_CODE (operands[2]) != CONST_INT)
3749 rtx temp = gen_reg_rtx (SImode);
3750 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3751 if (GET_CODE (operands[1]) == CONST_INT)
3752 emit_insn (gen_zvdep_imm (operands[0], operands[1], temp));
3754 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3757 /* Make sure both inputs are not constants,
3758 there are no patterns for that. */
3759 operands[1] = force_reg (SImode, operands[1]);
3763 [(set (match_operand:SI 0 "register_operand" "=r")
3764 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3765 (match_operand:SI 2 "const_int_operand" "n")))]
3767 "zdep %1,%P2,%L2,%0"
3768 [(set_attr "type" "shift")
3769 (set_attr "length" "4")])
3771 ; Match cases of op1 a CONST_INT here that zvdep_imm doesn't handle.
3772 ; Doing it like this makes slightly better code since reload can
3773 ; replace a register with a known value in range -16..15 with a
3774 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm,
3775 ; but since we have no more CONST_OK... characters, that is not
3777 (define_insn "zvdep32"
3778 [(set (match_operand:SI 0 "register_operand" "=r,r")
3779 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3780 (minus:SI (const_int 31)
3781 (match_operand:SI 2 "register_operand" "q,q"))))]
3786 [(set_attr "type" "shift,shift")
3787 (set_attr "length" "4,4")])
3789 (define_insn "zvdep_imm"
3790 [(set (match_operand:SI 0 "register_operand" "=r")
3791 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3792 (minus:SI (const_int 31)
3793 (match_operand:SI 2 "register_operand" "q"))))]
3797 int x = INTVAL (operands[1]);
3798 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3799 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3800 return \"zvdepi %1,%2,%0\";
3802 [(set_attr "type" "shift")
3803 (set_attr "length" "4")])
3805 (define_insn "vdepi_ior"
3806 [(set (match_operand:SI 0 "register_operand" "=r")
3807 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3808 (minus:SI (const_int 31)
3809 (match_operand:SI 2 "register_operand" "q")))
3810 (match_operand:SI 3 "register_operand" "0")))]
3811 ; accept ...0001...1, can this be generalized?
3812 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3815 int x = INTVAL (operands[1]);
3816 operands[2] = GEN_INT (exact_log2 (x + 1));
3817 return \"vdepi -1,%2,%0\";
3819 [(set_attr "type" "shift")
3820 (set_attr "length" "4")])
3822 (define_insn "vdepi_and"
3823 [(set (match_operand:SI 0 "register_operand" "=r")
3824 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3825 (minus:SI (const_int 31)
3826 (match_operand:SI 2 "register_operand" "q")))
3827 (match_operand:SI 3 "register_operand" "0")))]
3828 ; this can be generalized...!
3829 "INTVAL (operands[1]) == -2"
3832 int x = INTVAL (operands[1]);
3833 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3834 return \"vdepi 0,%2,%0\";
3836 [(set_attr "type" "shift")
3837 (set_attr "length" "4")])
3839 (define_expand "ashrsi3"
3840 [(set (match_operand:SI 0 "register_operand" "")
3841 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3842 (match_operand:SI 2 "arith32_operand" "")))]
3846 if (GET_CODE (operands[2]) != CONST_INT)
3848 rtx temp = gen_reg_rtx (SImode);
3849 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3850 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3856 [(set (match_operand:SI 0 "register_operand" "=r")
3857 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3858 (match_operand:SI 2 "const_int_operand" "n")))]
3860 "extrs %1,%P2,%L2,%0"
3861 [(set_attr "type" "shift")
3862 (set_attr "length" "4")])
3864 (define_insn "vextrs32"
3865 [(set (match_operand:SI 0 "register_operand" "=r")
3866 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3867 (minus:SI (const_int 31)
3868 (match_operand:SI 2 "register_operand" "q"))))]
3871 [(set_attr "type" "shift")
3872 (set_attr "length" "4")])
3874 (define_insn "lshrsi3"
3875 [(set (match_operand:SI 0 "register_operand" "=r,r")
3876 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
3877 (match_operand:SI 2 "arith32_operand" "q,n")))]
3881 extru %1,%P2,%L2,%0"
3882 [(set_attr "type" "shift")
3883 (set_attr "length" "4")])
3885 (define_insn "rotrsi3"
3886 [(set (match_operand:SI 0 "register_operand" "=r,r")
3887 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
3888 (match_operand:SI 2 "arith32_operand" "q,n")))]
3892 if (GET_CODE (operands[2]) == CONST_INT)
3894 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
3895 return \"shd %1,%1,%2,%0\";
3898 return \"vshd %1,%1,%0\";
3900 [(set_attr "type" "shift")
3901 (set_attr "length" "4")])
3903 (define_expand "rotlsi3"
3904 [(set (match_operand:SI 0 "register_operand" "")
3905 (rotate:SI (match_operand:SI 1 "register_operand" "")
3906 (match_operand:SI 2 "arith32_operand" "")))]
3910 if (GET_CODE (operands[2]) != CONST_INT)
3912 rtx temp = gen_reg_rtx (SImode);
3913 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
3914 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
3917 /* Else expand normally. */
3921 [(set (match_operand:SI 0 "register_operand" "=r")
3922 (rotate:SI (match_operand:SI 1 "register_operand" "r")
3923 (match_operand:SI 2 "const_int_operand" "n")))]
3927 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
3928 return \"shd %1,%1,%2,%0\";
3930 [(set_attr "type" "shift")
3931 (set_attr "length" "4")])
3934 [(set (match_operand:SI 0 "register_operand" "=r")
3935 (match_operator:SI 5 "plus_xor_ior_operator"
3936 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
3937 (match_operand:SI 3 "const_int_operand" "n"))
3938 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3939 (match_operand:SI 4 "const_int_operand" "n"))]))]
3940 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3942 [(set_attr "type" "shift")
3943 (set_attr "length" "4")])
3946 [(set (match_operand:SI 0 "register_operand" "=r")
3947 (match_operator:SI 5 "plus_xor_ior_operator"
3948 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3949 (match_operand:SI 4 "const_int_operand" "n"))
3950 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3951 (match_operand:SI 3 "const_int_operand" "n"))]))]
3952 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3954 [(set_attr "type" "shift")
3955 (set_attr "length" "4")])
3958 [(set (match_operand:SI 0 "register_operand" "=r")
3959 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
3960 (match_operand:SI 2 "const_int_operand" ""))
3961 (match_operand:SI 3 "const_int_operand" "")))]
3962 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
3965 int cnt = INTVAL (operands[2]) & 31;
3966 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
3967 operands[2] = GEN_INT (31 - cnt);
3968 return \"zdep %1,%2,%3,%0\";
3970 [(set_attr "type" "shift")
3971 (set_attr "length" "4")])
3973 ;; Unconditional and other jump instructions.
3975 (define_insn "return"
3977 "hppa_can_use_return_insn_p ()"
3979 [(set_attr "type" "branch")
3980 (set_attr "length" "4")])
3982 ;; Use a different pattern for functions which have non-trivial
3983 ;; epilogues so as not to confuse jump and reorg.
3984 (define_insn "return_internal"
3989 [(set_attr "type" "branch")
3990 (set_attr "length" "4")])
3992 (define_expand "prologue"
3995 "hppa_expand_prologue ();DONE;")
3997 (define_expand "epilogue"
4002 /* Try to use the trivial return first. Else use the full
4004 if (hppa_can_use_return_insn_p ())
4005 emit_jump_insn (gen_return ());
4008 hppa_expand_epilogue ();
4009 emit_jump_insn (gen_return_internal ());
4014 ;; Special because we use the value placed in %r2 by the bl instruction
4015 ;; from within its delay slot to set the value for the 2nd parameter to
4017 (define_insn "call_profiler"
4018 [(unspec_volatile [(const_int 0)] 0)
4019 (use (match_operand:SI 0 "const_int_operand" ""))]
4021 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4022 [(set_attr "type" "multi")
4023 (set_attr "length" "8")])
4025 (define_insn "blockage"
4026 [(unspec_volatile [(const_int 2)] 0)]
4029 [(set_attr "length" "0")])
4032 [(set (pc) (label_ref (match_operand 0 "" "")))]
4036 extern int optimize;
4038 if (GET_MODE (insn) == SImode)
4039 return \"bl %l0,0%#\";
4041 /* An unconditional branch which can reach its target. */
4042 if (get_attr_length (insn) != 24
4043 && get_attr_length (insn) != 16)
4044 return \"bl%* %l0,0\";
4046 /* An unconditional branch which can not reach its target.
4048 We need to be able to use %r1 as a scratch register; however,
4049 we can never be sure whether or not it's got a live value in
4050 it. Therefore, we must restore its original value after the
4053 To make matters worse, we don't have a stack slot which we
4054 can always clobber. sp-12/sp-16 shouldn't ever have a live
4055 value during a non-optimizing compilation, so we use those
4056 slots for now. We don't support very long branches when
4057 optimizing -- they should be quite rare when optimizing.
4059 Really the way to go long term is a register scavenger; goto
4060 the target of the jump and find a register which we can use
4061 as a scratch to hold the value in %r1. */
4063 /* We don't know how to register scavenge yet. */
4067 /* First store %r1 into the stack. */
4068 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4070 /* Now load the target address into %r1 and do an indirect jump
4071 to the value specified in %r1. Be careful to generate PIC
4076 xoperands[0] = operands[0];
4077 xoperands[1] = gen_label_rtx ();
4079 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4080 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4081 CODE_LABEL_NUMBER (xoperands[1]));
4082 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4086 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4088 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4089 so we know nothing else can be in the delay slot. */
4090 return \"ldw -16(%%r30),%%r1\";
4092 [(set_attr "type" "uncond_branch")
4093 (set_attr "pa_combine_type" "uncond_branch")
4094 (set (attr "length")
4095 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4096 (if_then_else (lt (abs (minus (match_dup 0)
4097 (plus (pc) (const_int 8))))
4101 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4103 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4108 ;; Subroutines of "casesi".
4109 ;; operand 0 is index
4110 ;; operand 1 is the minimum bound
4111 ;; operand 2 is the maximum bound - minimum bound + 1
4112 ;; operand 3 is CODE_LABEL for the table;
4113 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4115 (define_expand "casesi"
4116 [(match_operand:SI 0 "general_operand" "")
4117 (match_operand:SI 1 "const_int_operand" "")
4118 (match_operand:SI 2 "const_int_operand" "")
4119 (match_operand 3 "" "")
4120 (match_operand 4 "" "")]
4124 if (GET_CODE (operands[0]) != REG)
4125 operands[0] = force_reg (SImode, operands[0]);
4127 if (operands[1] != const0_rtx)
4129 rtx reg = gen_reg_rtx (SImode);
4131 operands[1] = GEN_INT (-INTVAL (operands[1]));
4132 if (!INT_14_BITS (operands[1]))
4133 operands[1] = force_reg (SImode, operands[1]);
4134 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4139 if (!INT_5_BITS (operands[2]))
4140 operands[2] = force_reg (SImode, operands[2]);
4142 emit_insn (gen_cmpsi (operands[0], operands[2]));
4143 emit_jump_insn (gen_bgtu (operands[4]));
4144 if (TARGET_BIG_SWITCH)
4146 rtx temp = gen_reg_rtx (SImode);
4147 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4150 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4154 (define_insn "casesi0"
4156 (mem:SI (plus:SI (pc)
4157 (match_operand:SI 0 "register_operand" "r")))
4158 (label_ref (match_operand 1 "" ""))))]
4161 [(set_attr "type" "multi")
4162 (set_attr "length" "8")])
4164 ;; Need nops for the calls because execution is supposed to continue
4165 ;; past; we don't want to nullify an instruction that we need.
4166 ;;- jump to subroutine
4168 (define_expand "call"
4169 [(parallel [(call (match_operand:SI 0 "" "")
4170 (match_operand 1 "" ""))
4171 (clobber (reg:SI 2))])]
4178 if (TARGET_PORTABLE_RUNTIME)
4179 op = force_reg (SImode, XEXP (operands[0], 0));
4181 op = XEXP (operands[0], 0);
4183 /* Use two different patterns for calls to explicitly named functions
4184 and calls through function pointers. This is necessary as these two
4185 types of calls use different calling conventions, and CSE might try
4186 to change the named call into an indirect call in some cases (using
4187 two patterns keeps CSE from performing this optimization). */
4188 if (GET_CODE (op) == SYMBOL_REF)
4189 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4192 rtx tmpreg = gen_rtx_REG (SImode, 22);
4193 emit_move_insn (tmpreg, force_reg (SImode, op));
4194 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4199 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4201 /* After each call we must restore the PIC register, even if it
4202 doesn't appear to be used.
4204 This will set regs_ever_live for the callee saved register we
4205 stored the PIC register in. */
4206 emit_move_insn (pic_offset_table_rtx,
4207 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4208 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4210 /* Gross. We have to keep the scheduler from moving the restore
4211 of the PIC register away from the call. SCHED_GROUP_P is
4212 supposed to do this, but for some reason the compiler will
4213 go into an infinite loop when we use that.
4215 This method (blockage insn) may make worse code (then again
4216 it may not since calls are nearly blockages anyway), but at
4217 least it should work. */
4218 emit_insn (gen_blockage ());
4223 (define_insn "call_internal_symref"
4224 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4225 (match_operand 1 "" "i"))
4226 (clobber (reg:SI 2))
4227 (use (const_int 0))]
4228 "! TARGET_PORTABLE_RUNTIME"
4231 output_arg_descriptor (insn);
4232 return output_call (insn, operands[0]);
4234 [(set_attr "type" "call")
4235 (set (attr "length")
4236 ;; If we're sure that we can either reach the target or that the
4237 ;; linker can use a long-branch stub, then the length is 4 bytes.
4239 ;; For long-calls the length will be either 52 bytes (non-pic)
4240 ;; or 68 bytes (pic). */
4241 ;; Else we have to use a long-call;
4242 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4245 (if_then_else (eq (symbol_ref "flag_pic")
4250 (define_insn "call_internal_reg"
4251 [(call (mem:SI (reg:SI 22))
4252 (match_operand 0 "" "i"))
4253 (clobber (reg:SI 2))
4254 (use (const_int 1))]
4260 /* First the special case for kernels, level 0 systems, etc. */
4261 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4262 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4264 /* Now the normal case -- we can reach $$dyncall directly or
4265 we're sure that we can get there via a long-branch stub.
4267 No need to check target flags as the length uniquely identifies
4268 the remaining cases. */
4269 if (get_attr_length (insn) == 8)
4270 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4272 /* Long millicode call, but we are not generating PIC or portable runtime
4274 if (get_attr_length (insn) == 12)
4275 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4277 /* Long millicode call for portable runtime. */
4278 if (get_attr_length (insn) == 20)
4279 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4281 /* If we're generating PIC code. */
4282 xoperands[0] = operands[0];
4283 xoperands[1] = gen_label_rtx ();
4284 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4285 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4286 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4287 CODE_LABEL_NUMBER (xoperands[1]));
4288 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4289 output_asm_insn (\"blr 0,%%r2\", xoperands);
4290 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4293 [(set_attr "type" "dyncall")
4294 (set (attr "length")
4296 ;; First NO_SPACE_REGS
4297 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4301 ;; Target (or stub) within reach
4302 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4304 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4308 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4309 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4311 (eq (symbol_ref "flag_pic")
4315 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4319 ;; Out of range PIC case
4322 (define_expand "call_value"
4323 [(parallel [(set (match_operand 0 "" "")
4324 (call (match_operand:SI 1 "" "")
4325 (match_operand 2 "" "")))
4326 (clobber (reg:SI 2))])]
4333 if (TARGET_PORTABLE_RUNTIME)
4334 op = force_reg (SImode, XEXP (operands[1], 0));
4336 op = XEXP (operands[1], 0);
4338 /* Use two different patterns for calls to explicitly named functions
4339 and calls through function pointers. This is necessary as these two
4340 types of calls use different calling conventions, and CSE might try
4341 to change the named call into an indirect call in some cases (using
4342 two patterns keeps CSE from performing this optimization). */
4343 if (GET_CODE (op) == SYMBOL_REF)
4344 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4349 rtx tmpreg = gen_rtx_REG (SImode, 22);
4350 emit_move_insn (tmpreg, force_reg (SImode, op));
4351 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4356 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4358 /* After each call we must restore the PIC register, even if it
4359 doesn't appear to be used.
4361 This will set regs_ever_live for the callee saved register we
4362 stored the PIC register in. */
4363 emit_move_insn (pic_offset_table_rtx,
4364 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4365 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4367 /* Gross. We have to keep the scheduler from moving the restore
4368 of the PIC register away from the call. SCHED_GROUP_P is
4369 supposed to do this, but for some reason the compiler will
4370 go into an infinite loop when we use that.
4372 This method (blockage insn) may make worse code (then again
4373 it may not since calls are nearly blockages anyway), but at
4374 least it should work. */
4375 emit_insn (gen_blockage ());
4380 (define_insn "call_value_internal_symref"
4381 [(set (match_operand 0 "" "=rf")
4382 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4383 (match_operand 2 "" "i")))
4384 (clobber (reg:SI 2))
4385 (use (const_int 0))]
4386 ;;- Don't use operand 1 for most machines.
4387 "! TARGET_PORTABLE_RUNTIME"
4390 output_arg_descriptor (insn);
4391 return output_call (insn, operands[1]);
4393 [(set_attr "type" "call")
4394 (set (attr "length")
4395 ;; If we're sure that we can either reach the target or that the
4396 ;; linker can use a long-branch stub, then the length is 4 bytes.
4398 ;; For long-calls the length will be either 52 bytes (non-pic)
4399 ;; or 68 bytes (pic). */
4400 ;; Else we have to use a long-call;
4401 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4404 (if_then_else (eq (symbol_ref "flag_pic")
4409 (define_insn "call_value_internal_reg"
4410 [(set (match_operand 0 "" "=rf")
4411 (call (mem:SI (reg:SI 22))
4412 (match_operand 1 "" "i")))
4413 (clobber (reg:SI 2))
4414 (use (const_int 1))]
4420 /* First the special case for kernels, level 0 systems, etc. */
4421 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4422 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4424 /* Now the normal case -- we can reach $$dyncall directly or
4425 we're sure that we can get there via a long-branch stub.
4427 No need to check target flags as the length uniquely identifies
4428 the remaining cases. */
4429 if (get_attr_length (insn) == 8)
4430 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4432 /* Long millicode call, but we are not generating PIC or portable runtime
4434 if (get_attr_length (insn) == 12)
4435 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4437 /* Long millicode call for portable runtime. */
4438 if (get_attr_length (insn) == 20)
4439 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4441 /* If we're generating PIC code. */
4442 xoperands[0] = operands[1];
4443 xoperands[1] = gen_label_rtx ();
4444 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4445 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4446 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4447 CODE_LABEL_NUMBER (xoperands[1]));
4448 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4449 output_asm_insn (\"blr 0,%%r2\", xoperands);
4450 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4453 [(set_attr "type" "dyncall")
4454 (set (attr "length")
4456 ;; First NO_SPACE_REGS
4457 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4461 ;; Target (or stub) within reach
4462 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4464 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4468 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4469 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4471 (eq (symbol_ref "flag_pic")
4475 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4479 ;; Out of range PIC case
4482 ;; Call subroutine returning any type.
4484 (define_expand "untyped_call"
4485 [(parallel [(call (match_operand 0 "" "")
4487 (match_operand 1 "" "")
4488 (match_operand 2 "" "")])]
4494 emit_call_insn (gen_call (operands[0], const0_rtx));
4496 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4498 rtx set = XVECEXP (operands[2], 0, i);
4499 emit_move_insn (SET_DEST (set), SET_SRC (set));
4502 /* The optimizer does not know that the call sets the function value
4503 registers we stored in the result block. We avoid problems by
4504 claiming that all hard registers are used and clobbered at this
4506 emit_insn (gen_blockage ());
4514 [(set_attr "type" "move")
4515 (set_attr "length" "4")])
4517 ;; These are just placeholders so we know where branch tables
4519 (define_insn "begin_brtab"
4524 /* Only GAS actually supports this pseudo-op. */
4526 return \".begin_brtab\";
4530 [(set_attr "type" "move")
4531 (set_attr "length" "0")])
4533 (define_insn "end_brtab"
4538 /* Only GAS actually supports this pseudo-op. */
4540 return \".end_brtab\";
4544 [(set_attr "type" "move")
4545 (set_attr "length" "0")])
4547 ;;; Hope this is only within a function...
4548 (define_insn "indirect_jump"
4549 [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
4552 [(set_attr "type" "branch")
4553 (set_attr "length" "4")])
4555 (define_insn "extzv"
4556 [(set (match_operand:SI 0 "register_operand" "=r")
4557 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4558 (match_operand:SI 2 "uint5_operand" "")
4559 (match_operand:SI 3 "uint5_operand" "")))]
4561 "extru %1,%3+%2-1,%2,%0"
4562 [(set_attr "type" "shift")
4563 (set_attr "length" "4")])
4566 [(set (match_operand:SI 0 "register_operand" "=r")
4567 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4569 (match_operand:SI 3 "register_operand" "q")))]
4572 [(set_attr "type" "shift")
4573 (set_attr "length" "4")])
4576 [(set (match_operand:SI 0 "register_operand" "=r")
4577 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4578 (match_operand:SI 2 "uint5_operand" "")
4579 (match_operand:SI 3 "uint5_operand" "")))]
4581 "extrs %1,%3+%2-1,%2,%0"
4582 [(set_attr "type" "shift")
4583 (set_attr "length" "4")])
4586 [(set (match_operand:SI 0 "register_operand" "=r")
4587 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4589 (match_operand:SI 3 "register_operand" "q")))]
4592 [(set_attr "type" "shift")
4593 (set_attr "length" "4")])
4596 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4597 (match_operand:SI 1 "uint5_operand" "")
4598 (match_operand:SI 2 "uint5_operand" ""))
4599 (match_operand:SI 3 "arith5_operand" "r,L"))]
4602 dep %3,%2+%1-1,%1,%0
4603 depi %3,%2+%1-1,%1,%0"
4604 [(set_attr "type" "shift,shift")
4605 (set_attr "length" "4,4")])
4607 ;; Optimize insertion of const_int values of type 1...1xxxx.
4609 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4610 (match_operand:SI 1 "uint5_operand" "")
4611 (match_operand:SI 2 "uint5_operand" ""))
4612 (match_operand:SI 3 "const_int_operand" ""))]
4613 "(INTVAL (operands[3]) & 0x10) != 0 &&
4614 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4617 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4618 return \"depi %3,%2+%1-1,%1,%0\";
4620 [(set_attr "type" "shift")
4621 (set_attr "length" "4")])
4623 ;; This insn is used for some loop tests, typically loops reversed when
4624 ;; strength reduction is used. It is actually created when the instruction
4625 ;; combination phase combines the special loop test. Since this insn
4626 ;; is both a jump insn and has an output, it must deal with its own
4627 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4628 ;; to not choose the register alternatives in the event a reload is needed.
4629 (define_insn "decrement_and_branch_until_zero"
4632 (match_operator 2 "comparison_operator"
4633 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4634 (match_operand:SI 1 "int5_operand" "L,L,L"))
4636 (label_ref (match_operand 3 "" ""))
4639 (plus:SI (match_dup 0) (match_dup 1)))
4640 (clobber (match_scratch:SI 4 "=X,r,r"))]
4642 "* return output_dbra (operands, insn, which_alternative); "
4643 ;; Do not expect to understand this the first time through.
4644 [(set_attr "type" "cbranch,multi,multi")
4645 (set (attr "length")
4646 (if_then_else (eq_attr "alternative" "0")
4647 ;; Loop counter in register case
4648 ;; Short branch has length of 4
4649 ;; Long branch has length of 8
4650 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4655 ;; Loop counter in FP reg case.
4656 ;; Extra goo to deal with additional reload insns.
4657 (if_then_else (eq_attr "alternative" "1")
4658 (if_then_else (lt (match_dup 3) (pc))
4660 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4665 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4669 ;; Loop counter in memory case.
4670 ;; Extra goo to deal with additional reload insns.
4671 (if_then_else (lt (match_dup 3) (pc))
4673 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4678 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4681 (const_int 16))))))])
4686 (match_operator 2 "movb_comparison_operator"
4687 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4688 (label_ref (match_operand 3 "" ""))
4690 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4693 "* return output_movb (operands, insn, which_alternative, 0); "
4694 ;; Do not expect to understand this the first time through.
4695 [(set_attr "type" "cbranch,multi,multi,multi")
4696 (set (attr "length")
4697 (if_then_else (eq_attr "alternative" "0")
4698 ;; Loop counter in register case
4699 ;; Short branch has length of 4
4700 ;; Long branch has length of 8
4701 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4706 ;; Loop counter in FP reg case.
4707 ;; Extra goo to deal with additional reload insns.
4708 (if_then_else (eq_attr "alternative" "1")
4709 (if_then_else (lt (match_dup 3) (pc))
4711 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4716 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4720 ;; Loop counter in memory or sar case.
4721 ;; Extra goo to deal with additional reload insns.
4723 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4726 (const_int 12)))))])
4728 ;; Handle negated branch.
4732 (match_operator 2 "movb_comparison_operator"
4733 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4735 (label_ref (match_operand 3 "" ""))))
4736 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4739 "* return output_movb (operands, insn, which_alternative, 1); "
4740 ;; Do not expect to understand this the first time through.
4741 [(set_attr "type" "cbranch,multi,multi,multi")
4742 (set (attr "length")
4743 (if_then_else (eq_attr "alternative" "0")
4744 ;; Loop counter in register case
4745 ;; Short branch has length of 4
4746 ;; Long branch has length of 8
4747 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4752 ;; Loop counter in FP reg case.
4753 ;; Extra goo to deal with additional reload insns.
4754 (if_then_else (eq_attr "alternative" "1")
4755 (if_then_else (lt (match_dup 3) (pc))
4757 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4762 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4766 ;; Loop counter in memory or SAR case.
4767 ;; Extra goo to deal with additional reload insns.
4769 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4772 (const_int 12)))))])
4774 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4775 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4777 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4780 [(set (pc) (label_ref (match_operand 3 "" "" )))
4781 (set (match_operand:SI 0 "register_operand" "=r")
4782 (plus:SI (match_operand:SI 1 "register_operand" "r")
4783 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4784 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4787 return output_parallel_addb (operands, get_attr_length (insn));
4789 [(set_attr "type" "parallel_branch")
4790 (set (attr "length")
4791 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4797 [(set (pc) (label_ref (match_operand 2 "" "" )))
4798 (set (match_operand:SF 0 "register_operand" "=r")
4799 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4803 return output_parallel_movb (operands, get_attr_length (insn));
4805 [(set_attr "type" "parallel_branch")
4806 (set (attr "length")
4807 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4813 [(set (pc) (label_ref (match_operand 2 "" "" )))
4814 (set (match_operand:SI 0 "register_operand" "=r")
4815 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4819 return output_parallel_movb (operands, get_attr_length (insn));
4821 [(set_attr "type" "parallel_branch")
4822 (set (attr "length")
4823 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4829 [(set (pc) (label_ref (match_operand 2 "" "" )))
4830 (set (match_operand:HI 0 "register_operand" "=r")
4831 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4835 return output_parallel_movb (operands, get_attr_length (insn));
4837 [(set_attr "type" "parallel_branch")
4838 (set (attr "length")
4839 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4845 [(set (pc) (label_ref (match_operand 2 "" "" )))
4846 (set (match_operand:QI 0 "register_operand" "=r")
4847 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4851 return output_parallel_movb (operands, get_attr_length (insn));
4853 [(set_attr "type" "parallel_branch")
4854 (set (attr "length")
4855 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4861 [(set (match_operand 0 "register_operand" "=f")
4862 (mult (match_operand 1 "register_operand" "f")
4863 (match_operand 2 "register_operand" "f")))
4864 (set (match_operand 3 "register_operand" "+f")
4865 (plus (match_operand 4 "register_operand" "f")
4866 (match_operand 5 "register_operand" "f")))]
4867 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4868 && reload_completed && fmpyaddoperands (operands)"
4871 if (GET_MODE (operands[0]) == DFmode)
4873 if (rtx_equal_p (operands[3], operands[5]))
4874 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4876 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4880 if (rtx_equal_p (operands[3], operands[5]))
4881 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4883 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4886 [(set_attr "type" "fpalu")
4887 (set_attr "length" "4")])
4890 [(set (match_operand 3 "register_operand" "+f")
4891 (plus (match_operand 4 "register_operand" "f")
4892 (match_operand 5 "register_operand" "f")))
4893 (set (match_operand 0 "register_operand" "=f")
4894 (mult (match_operand 1 "register_operand" "f")
4895 (match_operand 2 "register_operand" "f")))]
4896 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4897 && reload_completed && fmpyaddoperands (operands)"
4900 if (GET_MODE (operands[0]) == DFmode)
4902 if (rtx_equal_p (operands[3], operands[5]))
4903 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4905 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4909 if (rtx_equal_p (operands[3], operands[5]))
4910 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4912 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4915 [(set_attr "type" "fpalu")
4916 (set_attr "length" "4")])
4919 [(set (match_operand 0 "register_operand" "=f")
4920 (mult (match_operand 1 "register_operand" "f")
4921 (match_operand 2 "register_operand" "f")))
4922 (set (match_operand 3 "register_operand" "+f")
4923 (minus (match_operand 4 "register_operand" "f")
4924 (match_operand 5 "register_operand" "f")))]
4925 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4926 && reload_completed && fmpysuboperands (operands)"
4929 if (GET_MODE (operands[0]) == DFmode)
4930 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4932 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4934 [(set_attr "type" "fpalu")
4935 (set_attr "length" "4")])
4938 [(set (match_operand 3 "register_operand" "+f")
4939 (minus (match_operand 4 "register_operand" "f")
4940 (match_operand 5 "register_operand" "f")))
4941 (set (match_operand 0 "register_operand" "=f")
4942 (mult (match_operand 1 "register_operand" "f")
4943 (match_operand 2 "register_operand" "f")))]
4944 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4945 && reload_completed && fmpysuboperands (operands)"
4948 if (GET_MODE (operands[0]) == DFmode)
4949 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4951 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4953 [(set_attr "type" "fpalu")
4954 (set_attr "length" "4")])
4956 ;; Clean up turds left by reload.
4958 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
4959 (match_operand 1 "register_operand" "fr"))
4960 (set (match_operand 2 "register_operand" "fr")
4962 "! TARGET_SOFT_FLOAT
4963 && GET_CODE (operands[0]) == MEM
4964 && ! MEM_VOLATILE_P (operands[0])
4965 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4966 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4967 && GET_MODE (operands[0]) == DFmode
4968 && GET_CODE (operands[1]) == REG
4969 && GET_CODE (operands[2]) == REG
4970 && ! side_effects_p (XEXP (operands[0], 0))
4971 && REGNO_REG_CLASS (REGNO (operands[1]))
4972 == REGNO_REG_CLASS (REGNO (operands[2]))"
4977 if (FP_REG_P (operands[1]))
4978 output_asm_insn (output_fp_move_double (operands), operands);
4980 output_asm_insn (output_move_double (operands), operands);
4982 if (rtx_equal_p (operands[1], operands[2]))
4985 xoperands[0] = operands[2];
4986 xoperands[1] = operands[1];
4988 if (FP_REG_P (xoperands[1]))
4989 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4991 output_asm_insn (output_move_double (xoperands), xoperands);
4997 [(set (match_operand 0 "register_operand" "fr")
4998 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
4999 (set (match_operand 2 "register_operand" "fr")
5001 "! TARGET_SOFT_FLOAT
5002 && GET_CODE (operands[1]) == MEM
5003 && ! MEM_VOLATILE_P (operands[1])
5004 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5005 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5006 && GET_MODE (operands[0]) == DFmode
5007 && GET_CODE (operands[0]) == REG
5008 && GET_CODE (operands[2]) == REG
5009 && ! side_effects_p (XEXP (operands[1], 0))
5010 && REGNO_REG_CLASS (REGNO (operands[0]))
5011 == REGNO_REG_CLASS (REGNO (operands[2]))"
5016 if (FP_REG_P (operands[0]))
5017 output_asm_insn (output_fp_move_double (operands), operands);
5019 output_asm_insn (output_move_double (operands), operands);
5021 xoperands[0] = operands[2];
5022 xoperands[1] = operands[0];
5024 if (FP_REG_P (xoperands[1]))
5025 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5027 output_asm_insn (output_move_double (xoperands), xoperands);
5032 ;; Flush the I and D cache line found at the address in operand 0.
5033 ;; This is used by the trampoline code for nested functions.
5034 ;; So long as the trampoline itself is less than 32 bytes this
5037 (define_insn "dcacheflush"
5038 [(unspec_volatile [(const_int 1)] 0)
5039 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5040 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))]
5042 "fdc 0(%0)\;fdc 0(%1)\;sync"
5043 [(set_attr "type" "multi")
5044 (set_attr "length" "12")])
5046 (define_insn "icacheflush"
5047 [(unspec_volatile [(const_int 2)] 0)
5048 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5049 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))
5050 (use (match_operand:SI 2 "register_operand" "r"))
5051 (clobber (match_operand:SI 3 "register_operand" "=&r"))
5052 (clobber (match_operand:SI 4 "register_operand" "=&r"))]
5054 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5055 [(set_attr "type" "multi")
5056 (set_attr "length" "52")])
5058 ;; An out-of-line prologue.
5059 (define_insn "outline_prologue_call"
5060 [(unspec_volatile [(const_int 0)] 0)
5061 (clobber (reg:SI 31))
5062 (clobber (reg:SI 22))
5063 (clobber (reg:SI 21))
5064 (clobber (reg:SI 20))
5065 (clobber (reg:SI 19))
5066 (clobber (reg:SI 1))]
5070 extern int frame_pointer_needed;
5072 /* We need two different versions depending on whether or not we
5073 need a frame pointer. Also note that we return to the instruction
5074 immediately after the branch rather than two instructions after the
5075 break as normally is the case. */
5076 if (frame_pointer_needed)
5078 /* Must import the magic millicode routine(s). */
5079 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5081 if (TARGET_PORTABLE_RUNTIME)
5083 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5084 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5088 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5092 /* Must import the magic millicode routine(s). */
5093 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5095 if (TARGET_PORTABLE_RUNTIME)
5097 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5098 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5101 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5105 [(set_attr "type" "multi")
5106 (set_attr "length" "8")])
5108 ;; An out-of-line epilogue.
5109 (define_insn "outline_epilogue_call"
5110 [(unspec_volatile [(const_int 1)] 0)
5113 (clobber (reg:SI 31))
5114 (clobber (reg:SI 22))
5115 (clobber (reg:SI 21))
5116 (clobber (reg:SI 20))
5117 (clobber (reg:SI 19))
5118 (clobber (reg:SI 2))
5119 (clobber (reg:SI 1))]
5123 extern int frame_pointer_needed;
5125 /* We need two different versions depending on whether or not we
5126 need a frame pointer. Also note that we return to the instruction
5127 immediately after the branch rather than two instructions after the
5128 break as normally is the case. */
5129 if (frame_pointer_needed)
5131 /* Must import the magic millicode routine. */
5132 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5134 /* The out-of-line prologue will make sure we return to the right
5136 if (TARGET_PORTABLE_RUNTIME)
5138 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5139 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5143 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5147 /* Must import the magic millicode routine. */
5148 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5150 /* The out-of-line prologue will make sure we return to the right
5152 if (TARGET_PORTABLE_RUNTIME)
5154 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5155 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5158 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5162 [(set_attr "type" "multi")
5163 (set_attr "length" "8")])
5165 ;; Given a function pointer, canonicalize it so it can be
5166 ;; reliably compared to another function pointer. */
5167 (define_expand "canonicalize_funcptr_for_compare"
5168 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5169 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5170 (clobber (match_dup 2))
5171 (clobber (reg:SI 26))
5172 (clobber (reg:SI 22))
5173 (clobber (reg:SI 31))])
5174 (set (match_operand:SI 0 "register_operand" "")
5176 "! TARGET_PORTABLE_RUNTIME"
5179 operands[2] = gen_reg_rtx (SImode);
5180 if (GET_CODE (operands[1]) != REG)
5182 rtx tmp = gen_reg_rtx (Pmode);
5183 emit_move_insn (tmp, operands[1]);
5189 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5190 (clobber (match_operand:SI 0 "register_operand" "=a"))
5191 (clobber (reg:SI 26))
5192 (clobber (reg:SI 22))
5193 (clobber (reg:SI 31))]
5197 /* Must import the magic millicode routine. */
5198 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5200 /* This is absolutely amazing.
5202 First, copy our input parameter into %r29 just in case we don't
5203 need to call $$sh_func_adrs. */
5204 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5206 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5207 we use %r26 unchanged. */
5208 if (get_attr_length (insn) == 32)
5209 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5210 else if (get_attr_length (insn) == 40)
5211 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5212 else if (get_attr_length (insn) == 44)
5213 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5215 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5217 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5218 4096, then we use %r26 unchanged. */
5219 if (get_attr_length (insn) == 32)
5220 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5221 else if (get_attr_length (insn) == 40)
5222 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5223 else if (get_attr_length (insn) == 44)
5224 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5226 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5228 /* Else call $$sh_func_adrs to extract the function's real add24. */
5229 return output_millicode_call (insn,
5230 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5232 [(set_attr "type" "multi")
5233 (set (attr "length")
5235 ;; Target (or stub) within reach
5236 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5238 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5243 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5247 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5248 ;; same as NO_SPACE_REGS code
5249 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5251 (eq (symbol_ref "flag_pic")
5256 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5260 ;; Out of range and PIC
5263 ;; On the PA, the PIC register is call clobbered, so it must
5264 ;; be saved & restored around calls by the caller. If the call
5265 ;; doesn't return normally (nonlocal goto, or an exception is
5266 ;; thrown), then the code at the exception handler label must
5267 ;; restore the PIC register.
5268 (define_expand "exception_receiver"
5270 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5273 /* Load the PIC register from the stack slot (in our caller's
5275 emit_move_insn (pic_offset_table_rtx,
5276 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5277 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5278 emit_insn (gen_blockage ());