1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of bytes).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
371 ;; It is not necessary to define the shifter and integer alu units.
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
380 (define_function_unit "pa8000alu" 2 1
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
392 (define_function_unit "pa8000fdiv" 2 1
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
397 (define_function_unit "pa8000fdiv" 2 1
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
411 (define_expand "cmpsi"
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
424 (define_expand "cmpsf"
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
437 (define_expand "cmpdf"
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
473 [(set (match_operand:SI 0 "register_operand" "")
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
489 [(set (match_operand:SI 0 "register_operand" "")
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
503 [(set (match_operand:SI 0 "register_operand" "")
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
517 [(set (match_operand:SI 0 "register_operand" "")
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
531 [(set (match_operand:SI 0 "register_operand" "")
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
545 [(set (match_operand:SI 0 "register_operand" "")
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
565 if (hppa_branch_type != CMP_SI)
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
578 if (hppa_branch_type != CMP_SI)
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
591 if (hppa_branch_type != CMP_SI)
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
604 if (hppa_branch_type != CMP_SI)
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
840 ;;; Experimental conditional move patterns
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
845 (match_operator 1 "comparison_operator"
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
853 enum rtx_code code = GET_CODE (operands[1]);
855 if (hppa_branch_type != CMP_SI)
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
906 ;; Conditional Branches
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
916 if (hppa_branch_type != CMP_SI)
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
936 if (hppa_branch_type != CMP_SI)
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
954 if (hppa_branch_type != CMP_SI)
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
972 if (hppa_branch_type != CMP_SI)
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
990 if (hppa_branch_type != CMP_SI)
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
1000 (define_expand "ble"
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1008 if (hppa_branch_type != CMP_SI)
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1018 (define_expand "bgtu"
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1026 if (hppa_branch_type != CMP_SI)
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1032 (define_expand "bltu"
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1040 if (hppa_branch_type != CMP_SI)
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1046 (define_expand "bgeu"
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1054 if (hppa_branch_type != CMP_SI)
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1060 (define_expand "bleu"
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1068 if (hppa_branch_type != CMP_SI)
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1074 ;; Match the branch patterns.
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1105 ;; Match the negated branch.
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1114 (label_ref (match_operand 0 "" ""))))]
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1133 ;; Branch on Bit patterns.
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1139 (match_operand:SI 1 "uint5_operand" ""))
1141 (label_ref (match_operand 2 "" ""))
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1161 (match_operand:SI 1 "uint5_operand" ""))
1164 (label_ref (match_operand 2 "" ""))))]
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1183 (match_operand:SI 1 "uint5_operand" ""))
1185 (label_ref (match_operand 2 "" ""))
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1205 (match_operand:SI 1 "uint5_operand" ""))
1208 (label_ref (match_operand 2 "" ""))))]
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1222 ;; Branch on Variable Bit patterns.
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1228 (match_operand:SI 1 "register_operand" "q"))
1230 (label_ref (match_operand 2 "" ""))
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1250 (match_operand:SI 1 "register_operand" "q"))
1253 (label_ref (match_operand 2 "" ""))))]
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1272 (match_operand:SI 1 "register_operand" "q"))
1274 (label_ref (match_operand 2 "" ""))
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1294 (match_operand:SI 1 "register_operand" "q"))
1297 (label_ref (match_operand 2 "" ""))))]
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1311 ;; Floating point branches
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1316 "! TARGET_SOFT_FLOAT"
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1322 return \"ftest\;b%* %0\";
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1342 ;; Move instructions
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1350 if (emit_move_sequence (operands, SImode, 0))
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1387 ;;; pic symbol references
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1455 return \"ldwx %2(%1),%0\";
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1474 return \"ldwx %1(%2),%0\";
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1479 ;; Load or store with base-register modification.
1481 (define_expand "pre_load"
1482 [(parallel [(set (match_operand:SI 0 "register_operand" "")
1483 (mem (plus (match_operand 1 "register_operand" "")
1484 (match_operand 2 "pre_cint_operand" ""))))
1486 (plus (match_dup 1) (match_dup 2)))])]
1490 emit_insn (gen_pre_ldw (operands[0], operands[1], operands[2]));
1494 (define_insn "pre_ldw"
1495 [(set (match_operand:SI 0 "register_operand" "=r")
1496 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1497 (match_operand:SI 2 "pre_cint_operand" ""))))
1499 (plus:SI (match_dup 1) (match_dup 2)))]
1503 if (INTVAL (operands[2]) < 0)
1504 return \"ldwm %2(%1),%0\";
1505 return \"ldws,mb %2(%1),%0\";
1507 [(set_attr "type" "load")
1508 (set_attr "length" "4")])
1511 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1512 (match_operand:SI 1 "pre_cint_operand" "")))
1513 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1515 (plus:SI (match_dup 0) (match_dup 1)))]
1519 if (INTVAL (operands[1]) < 0)
1520 return \"stwm %r2,%1(%0)\";
1521 return \"stws,mb %r2,%1(%0)\";
1523 [(set_attr "type" "store")
1524 (set_attr "length" "4")])
1527 [(set (match_operand:SI 0 "register_operand" "=r")
1528 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1530 (plus:SI (match_dup 1)
1531 (match_operand:SI 2 "post_cint_operand" "")))]
1535 if (INTVAL (operands[2]) > 0)
1536 return \"ldwm %2(%1),%0\";
1537 return \"ldws,ma %2(%1),%0\";
1539 [(set_attr "type" "load")
1540 (set_attr "length" "4")])
1542 (define_expand "post_store"
1543 [(parallel [(set (mem (match_operand 0 "register_operand" ""))
1544 (match_operand 1 "reg_or_0_operand" ""))
1547 (match_operand 2 "post_cint_operand" "")))])]
1551 emit_insn (gen_post_stw (operands[0], operands[1], operands[2]));
1555 (define_insn "post_stw"
1556 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1557 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1559 (plus:SI (match_dup 0)
1560 (match_operand:SI 2 "post_cint_operand" "")))]
1564 if (INTVAL (operands[2]) > 0)
1565 return \"stwm %r1,%2(%0)\";
1566 return \"stws,ma %r1,%2(%0)\";
1568 [(set_attr "type" "store")
1569 (set_attr "length" "4")])
1572 ;; Note since this pattern can be created at reload time (via movsi), all
1573 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1574 (define_insn "pic_load_label"
1575 [(set (match_operand:SI 0 "register_operand" "=a")
1576 (match_operand:SI 1 "pic_label_operand" ""))]
1580 rtx label_rtx = gen_label_rtx ();
1582 extern FILE *asm_out_file;
1584 xoperands[0] = operands[0];
1585 xoperands[1] = operands[1];
1586 xoperands[2] = label_rtx;
1587 output_asm_insn (\"bl .+8,%0\", xoperands);
1588 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1590 CODE_LABEL_NUMBER (label_rtx));
1592 /* If we're trying to load the address of a label that happens to be
1593 close, then we can use a shorter sequence. */
1594 if (GET_CODE (operands[1]) == LABEL_REF
1596 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1597 - insn_addresses[INSN_UID (insn)]) < 8100)
1599 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1600 always non-negative. */
1601 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1605 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1606 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1610 [(set_attr "type" "multi")
1611 (set_attr "length" "16")]) ; 12 or 16
1614 [(set (match_operand:SI 0 "register_operand" "=a")
1615 (plus:SI (match_operand:SI 1 "register_operand" "r")
1616 (high:SI (match_operand 2 "" ""))))]
1617 "symbolic_operand (operands[2], Pmode)
1618 && ! function_label_operand (operands[2])
1621 [(set_attr "type" "binary")
1622 (set_attr "length" "4")])
1624 ; We need this to make sure CSE doesn't simplify a memory load with a
1625 ; symbolic address, whose content it think it knows. For PIC, what CSE
1626 ; think is the real value will be the address of that value.
1628 [(set (match_operand:SI 0 "register_operand" "=r")
1630 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1632 [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1637 return \"ldw RT'%G2(%1),%0\";
1639 [(set_attr "type" "load")
1640 (set_attr "length" "4")])
1642 ;; Always use addil rather than ldil;add sequences. This allows the
1643 ;; HP linker to eliminate the dp relocation if the symbolic operand
1644 ;; lives in the TEXT space.
1646 [(set (match_operand:SI 0 "register_operand" "=a")
1647 (high:SI (match_operand 1 "" "")))]
1648 "symbolic_operand (operands[1], Pmode)
1649 && ! function_label_operand (operands[1])
1650 && ! read_only_operand (operands[1])
1654 if (TARGET_LONG_LOAD_STORE)
1655 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1657 return \"addil LR'%H1,%%r27\";
1659 [(set_attr "type" "binary")
1660 (set (attr "length")
1661 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1666 ;; This is for use in the prologue/epilogue code. We need it
1667 ;; to add large constants to a stack pointer or frame pointer.
1668 ;; Because of the additional %r1 pressure, we probably do not
1669 ;; want to use this in general code, so make it available
1670 ;; only after reload.
1672 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1673 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1674 (high:SI (match_operand 2 "const_int_operand" ""))))]
1678 ldil L'%G2,%0\;addl %0,%1,%0"
1679 [(set_attr "type" "binary,binary")
1680 (set_attr "length" "4,8")])
1683 [(set (match_operand:SI 0 "register_operand" "=r")
1684 (high:SI (match_operand 1 "" "")))]
1685 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1686 && !is_function_label_plus_const (operands[1])"
1689 if (symbolic_operand (operands[1], Pmode))
1690 return \"ldil LR'%H1,%0\";
1692 return \"ldil L'%G1,%0\";
1694 [(set_attr "type" "move")
1695 (set_attr "length" "4")])
1698 [(set (match_operand:SI 0 "register_operand" "=r")
1699 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1700 (match_operand:SI 2 "immediate_operand" "i")))]
1701 "!is_function_label_plus_const (operands[2])"
1704 if (flag_pic && symbolic_operand (operands[2], Pmode))
1706 else if (symbolic_operand (operands[2], Pmode))
1707 return \"ldo RR'%G2(%1),%0\";
1709 return \"ldo R'%G2(%1),%0\";
1711 [(set_attr "type" "move")
1712 (set_attr "length" "4")])
1714 ;; Now that a symbolic_address plus a constant is broken up early
1715 ;; in the compilation phase (for better CSE) we need a special
1716 ;; combiner pattern to load the symbolic address plus the constant
1717 ;; in only 2 instructions. (For cases where the symbolic address
1718 ;; was not a common subexpression.)
1720 [(set (match_operand:SI 0 "register_operand" "")
1721 (match_operand:SI 1 "symbolic_operand" ""))
1722 (clobber (match_operand:SI 2 "register_operand" ""))]
1723 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1724 [(set (match_dup 2) (high:SI (match_dup 1)))
1725 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1728 ;; hppa_legitimize_address goes to a great deal of trouble to
1729 ;; create addresses which use indexing. In some cases, this
1730 ;; is a lose because there isn't any store instructions which
1731 ;; allow indexed addresses (with integer register source).
1733 ;; These define_splits try to turn a 3 insn store into
1734 ;; a 2 insn store with some creative RTL rewriting.
1736 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1737 (match_operand:SI 1 "shadd_operand" ""))
1738 (plus:SI (match_operand:SI 2 "register_operand" "")
1739 (match_operand:SI 3 "const_int_operand" ""))))
1740 (match_operand:SI 4 "register_operand" ""))
1741 (clobber (match_operand:SI 5 "register_operand" ""))]
1743 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1745 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1749 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1750 (match_operand:SI 1 "shadd_operand" ""))
1751 (plus:SI (match_operand:SI 2 "register_operand" "")
1752 (match_operand:SI 3 "const_int_operand" ""))))
1753 (match_operand:HI 4 "register_operand" ""))
1754 (clobber (match_operand:SI 5 "register_operand" ""))]
1756 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1758 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1762 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1763 (match_operand:SI 1 "shadd_operand" ""))
1764 (plus:SI (match_operand:SI 2 "register_operand" "")
1765 (match_operand:SI 3 "const_int_operand" ""))))
1766 (match_operand:QI 4 "register_operand" ""))
1767 (clobber (match_operand:SI 5 "register_operand" ""))]
1769 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1771 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1774 (define_expand "movhi"
1775 [(set (match_operand:HI 0 "general_operand" "")
1776 (match_operand:HI 1 "general_operand" ""))]
1780 if (emit_move_sequence (operands, HImode, 0))
1785 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1786 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1787 "register_operand (operands[0], HImode)
1788 || reg_or_0_operand (operands[1], HImode)"
1798 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1799 (set_attr "pa_combine_type" "addmove")
1800 (set_attr "length" "4,4,4,4,4,4,4,4")])
1803 [(set (match_operand:HI 0 "register_operand" "=r")
1804 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1805 (match_operand:SI 2 "register_operand" "r"))))]
1806 "! TARGET_DISABLE_INDEXING"
1809 /* Reload can create backwards (relative to cse) unscaled index
1810 address modes when eliminating registers and possibly for
1811 pseudos that don't get hard registers. Deal with it. */
1812 if (operands[2] == hard_frame_pointer_rtx
1813 || operands[2] == stack_pointer_rtx)
1814 return \"ldhx %1(%2),%0\";
1816 return \"ldhx %2(%1),%0\";
1818 [(set_attr "type" "load")
1819 (set_attr "length" "4")])
1822 [(set (match_operand:HI 0 "register_operand" "=r")
1823 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1824 (match_operand:SI 2 "basereg_operand" "r"))))]
1825 "! TARGET_DISABLE_INDEXING"
1828 /* Reload can create backwards (relative to cse) unscaled index
1829 address modes when eliminating registers and possibly for
1830 pseudos that don't get hard registers. Deal with it. */
1831 if (operands[1] == hard_frame_pointer_rtx
1832 || operands[1] == stack_pointer_rtx)
1833 return \"ldhx %2(%1),%0\";
1835 return \"ldhx %1(%2),%0\";
1837 [(set_attr "type" "load")
1838 (set_attr "length" "4")])
1840 ; Now zero extended variants.
1842 [(set (match_operand:SI 0 "register_operand" "=r")
1843 (zero_extend:SI (mem:HI
1845 (match_operand:SI 1 "basereg_operand" "r")
1846 (match_operand:SI 2 "register_operand" "r")))))]
1847 "! TARGET_DISABLE_INDEXING"
1850 /* Reload can create backwards (relative to cse) unscaled index
1851 address modes when eliminating registers and possibly for
1852 pseudos that don't get hard registers. Deal with it. */
1853 if (operands[2] == hard_frame_pointer_rtx
1854 || operands[2] == stack_pointer_rtx)
1855 return \"ldhx %1(%2),%0\";
1857 return \"ldhx %2(%1),%0\";
1859 [(set_attr "type" "load")
1860 (set_attr "length" "4")])
1863 [(set (match_operand:SI 0 "register_operand" "=r")
1864 (zero_extend:SI (mem:HI
1866 (match_operand:SI 1 "register_operand" "r")
1867 (match_operand:SI 2 "basereg_operand" "r")))))]
1868 "! TARGET_DISABLE_INDEXING"
1871 /* Reload can create backwards (relative to cse) unscaled index
1872 address modes when eliminating registers and possibly for
1873 pseudos that don't get hard registers. Deal with it. */
1874 if (operands[1] == hard_frame_pointer_rtx
1875 || operands[1] == stack_pointer_rtx)
1876 return \"ldhx %2(%1),%0\";
1878 return \"ldhx %1(%2),%0\";
1880 [(set_attr "type" "load")
1881 (set_attr "length" "4")])
1884 [(set (match_operand:HI 0 "register_operand" "=r")
1885 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1886 (match_operand:SI 2 "int5_operand" "L"))))
1888 (plus:SI (match_dup 1) (match_dup 2)))]
1891 [(set_attr "type" "load")
1892 (set_attr "length" "4")])
1894 ; And a zero extended variant.
1896 [(set (match_operand:SI 0 "register_operand" "=r")
1897 (zero_extend:SI (mem:HI
1899 (match_operand:SI 1 "register_operand" "+r")
1900 (match_operand:SI 2 "int5_operand" "L")))))
1902 (plus:SI (match_dup 1) (match_dup 2)))]
1905 [(set_attr "type" "load")
1906 (set_attr "length" "4")])
1909 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1910 (match_operand:SI 1 "int5_operand" "L")))
1911 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1913 (plus:SI (match_dup 0) (match_dup 1)))]
1915 "sths,mb %r2,%1(%0)"
1916 [(set_attr "type" "store")
1917 (set_attr "length" "4")])
1920 [(set (match_operand:HI 0 "register_operand" "=r")
1921 (high:HI (match_operand 1 "const_int_operand" "")))]
1924 [(set_attr "type" "move")
1925 (set_attr "length" "4")])
1928 [(set (match_operand:HI 0 "register_operand" "=r")
1929 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1930 (match_operand 2 "const_int_operand" "")))]
1933 [(set_attr "type" "move")
1934 (set_attr "length" "4")])
1936 (define_expand "movqi"
1937 [(set (match_operand:QI 0 "general_operand" "")
1938 (match_operand:QI 1 "general_operand" ""))]
1942 if (emit_move_sequence (operands, QImode, 0))
1947 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1948 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1949 "register_operand (operands[0], QImode)
1950 || reg_or_0_operand (operands[1], QImode)"
1960 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1961 (set_attr "pa_combine_type" "addmove")
1962 (set_attr "length" "4,4,4,4,4,4,4,4")])
1965 [(set (match_operand:QI 0 "register_operand" "=r")
1966 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1967 (match_operand:SI 2 "register_operand" "r"))))]
1968 "! TARGET_DISABLE_INDEXING"
1971 /* Reload can create backwards (relative to cse) unscaled index
1972 address modes when eliminating registers and possibly for
1973 pseudos that don't get hard registers. Deal with it. */
1974 if (operands[2] == hard_frame_pointer_rtx
1975 || operands[2] == stack_pointer_rtx)
1976 return \"ldbx %1(%2),%0\";
1978 return \"ldbx %2(%1),%0\";
1980 [(set_attr "type" "load")
1981 (set_attr "length" "4")])
1984 [(set (match_operand:QI 0 "register_operand" "=r")
1985 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1986 (match_operand:SI 2 "basereg_operand" "r"))))]
1987 "! TARGET_DISABLE_INDEXING"
1990 /* Reload can create backwards (relative to cse) unscaled index
1991 address modes when eliminating registers and possibly for
1992 pseudos that don't get hard registers. Deal with it. */
1993 if (operands[1] == hard_frame_pointer_rtx
1994 || operands[1] == stack_pointer_rtx)
1995 return \"ldbx %2(%1),%0\";
1997 return \"ldbx %1(%2),%0\";
1999 [(set_attr "type" "load")
2000 (set_attr "length" "4")])
2002 ; Indexed byte load with zero extension to SImode or HImode.
2004 [(set (match_operand:SI 0 "register_operand" "=r")
2005 (zero_extend:SI (mem:QI
2007 (match_operand:SI 1 "basereg_operand" "r")
2008 (match_operand:SI 2 "register_operand" "r")))))]
2009 "! TARGET_DISABLE_INDEXING"
2012 /* Reload can create backwards (relative to cse) unscaled index
2013 address modes when eliminating registers and possibly for
2014 pseudos that don't get hard registers. Deal with it. */
2015 if (operands[2] == hard_frame_pointer_rtx
2016 || operands[2] == stack_pointer_rtx)
2017 return \"ldbx %1(%2),%0\";
2019 return \"ldbx %2(%1),%0\";
2021 [(set_attr "type" "load")
2022 (set_attr "length" "4")])
2025 [(set (match_operand:SI 0 "register_operand" "=r")
2026 (zero_extend:SI (mem:QI
2028 (match_operand:SI 1 "register_operand" "r")
2029 (match_operand:SI 2 "basereg_operand" "r")))))]
2030 "! TARGET_DISABLE_INDEXING"
2033 /* Reload can create backwards (relative to cse) unscaled index
2034 address modes when eliminating registers and possibly for
2035 pseudos that don't get hard registers. Deal with it. */
2036 if (operands[1] == hard_frame_pointer_rtx
2037 || operands[1] == stack_pointer_rtx)
2038 return \"ldbx %2(%1),%0\";
2040 return \"ldbx %1(%2),%0\";
2042 [(set_attr "type" "load")
2043 (set_attr "length" "4")])
2046 [(set (match_operand:HI 0 "register_operand" "=r")
2047 (zero_extend:HI (mem:QI
2049 (match_operand:SI 1 "basereg_operand" "r")
2050 (match_operand:SI 2 "register_operand" "r")))))]
2051 "! TARGET_DISABLE_INDEXING"
2054 /* Reload can create backwards (relative to cse) unscaled index
2055 address modes when eliminating registers and possibly for
2056 pseudos that don't get hard registers. Deal with it. */
2057 if (operands[2] == hard_frame_pointer_rtx
2058 || operands[2] == stack_pointer_rtx)
2059 return \"ldbx %1(%2),%0\";
2061 return \"ldbx %2(%1),%0\";
2063 [(set_attr "type" "load")
2064 (set_attr "length" "4")])
2067 [(set (match_operand:HI 0 "register_operand" "=r")
2068 (zero_extend:HI (mem:QI
2070 (match_operand:SI 1 "register_operand" "r")
2071 (match_operand:SI 2 "basereg_operand" "r")))))]
2072 "! TARGET_DISABLE_INDEXING"
2075 /* Reload can create backwards (relative to cse) unscaled index
2076 address modes when eliminating registers and possibly for
2077 pseudos that don't get hard registers. Deal with it. */
2078 if (operands[1] == hard_frame_pointer_rtx
2079 || operands[1] == stack_pointer_rtx)
2080 return \"ldbx %2(%1),%0\";
2082 return \"ldbx %1(%2),%0\";
2084 [(set_attr "type" "load")
2085 (set_attr "length" "4")])
2088 [(set (match_operand:QI 0 "register_operand" "=r")
2089 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2090 (match_operand:SI 2 "int5_operand" "L"))))
2091 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2094 [(set_attr "type" "load")
2095 (set_attr "length" "4")])
2097 ; Now the same thing with zero extensions.
2099 [(set (match_operand:SI 0 "register_operand" "=r")
2100 (zero_extend:SI (mem:QI (plus:SI
2101 (match_operand:SI 1 "register_operand" "+r")
2102 (match_operand:SI 2 "int5_operand" "L")))))
2103 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2106 [(set_attr "type" "load")
2107 (set_attr "length" "4")])
2110 [(set (match_operand:HI 0 "register_operand" "=r")
2111 (zero_extend:HI (mem:QI (plus:SI
2112 (match_operand:SI 1 "register_operand" "+r")
2113 (match_operand:SI 2 "int5_operand" "L")))))
2114 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2117 [(set_attr "type" "load")
2118 (set_attr "length" "4")])
2121 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2122 (match_operand:SI 1 "int5_operand" "L")))
2123 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2125 (plus:SI (match_dup 0) (match_dup 1)))]
2127 "stbs,mb %r2,%1(%0)"
2128 [(set_attr "type" "store")
2129 (set_attr "length" "4")])
2131 ;; The definition of this insn does not really explain what it does,
2132 ;; but it should suffice
2133 ;; that anything generated as this insn will be recognized as one
2134 ;; and that it will not successfully combine with anything.
2135 (define_expand "movstrsi"
2136 [(parallel [(set (match_operand:BLK 0 "" "")
2137 (match_operand:BLK 1 "" ""))
2138 (clobber (match_dup 7))
2139 (clobber (match_dup 8))
2140 (clobber (match_dup 4))
2141 (clobber (match_dup 5))
2142 (clobber (match_dup 6))
2143 (use (match_operand:SI 2 "arith_operand" ""))
2144 (use (match_operand:SI 3 "const_int_operand" ""))])]
2150 /* HP provides very fast block move library routine for the PA;
2151 this routine includes:
2153 4x4 byte at a time block moves,
2154 1x4 byte at a time with alignment checked at runtime with
2155 attempts to align the source and destination as needed
2158 With that in mind, here's the heuristics to try and guess when
2159 the inlined block move will be better than the library block
2162 If the size isn't constant, then always use the library routines.
2164 If the size is large in respect to the known alignment, then use
2165 the library routines.
2167 If the size is small in repsect to the known alignment, then open
2168 code the copy (since that will lead to better scheduling).
2170 Else use the block move pattern. */
2172 /* Undetermined size, use the library routine. */
2173 if (GET_CODE (operands[2]) != CONST_INT)
2176 size = INTVAL (operands[2]);
2177 align = INTVAL (operands[3]);
2178 align = align > 4 ? 4 : align;
2180 /* If size/alignment > 8 (eg size is large in respect to alignment),
2181 then use the library routines. */
2182 if (size / align > 16)
2185 /* This does happen, but not often enough to worry much about. */
2186 if (size / align < MOVE_RATIO)
2189 /* Fall through means we're going to use our block move pattern. */
2191 = change_address (operands[0], VOIDmode,
2192 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2194 = change_address (operands[1], VOIDmode,
2195 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2196 operands[4] = gen_reg_rtx (SImode);
2197 operands[5] = gen_reg_rtx (SImode);
2198 operands[6] = gen_reg_rtx (SImode);
2199 operands[7] = XEXP (operands[0], 0);
2200 operands[8] = XEXP (operands[1], 0);
2203 ;; The operand constraints are written like this to support both compile-time
2204 ;; and run-time determined byte count. If the count is run-time determined,
2205 ;; the register with the byte count is clobbered by the copying code, and
2206 ;; therefore it is forced to operand 2. If the count is compile-time
2207 ;; determined, we need two scratch registers for the unrolled code.
2208 (define_insn "movstrsi_internal"
2209 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2210 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2211 (clobber (match_dup 0))
2212 (clobber (match_dup 1))
2213 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2214 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2215 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2216 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2217 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2219 "* return output_block_move (operands, !which_alternative);"
2220 [(set_attr "type" "multi,multi")])
2222 ;; Floating point move insns
2224 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2225 ;; to be reloaded by putting the constant into memory when
2226 ;; reg is a floating point register.
2228 ;; For integer registers we use ldil;ldo to set the appropriate
2231 ;; This must come before the movdf pattern, and it must be present
2232 ;; to handle obscure reloading cases.
2234 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2235 (match_operand:DF 1 "" "?F,m"))]
2236 "GET_CODE (operands[1]) == CONST_DOUBLE
2237 && operands[1] != CONST0_RTX (DFmode)
2238 && ! TARGET_SOFT_FLOAT"
2239 "* return (which_alternative == 0 ? output_move_double (operands)
2240 : \"fldd%F1 %1,%0\");"
2241 [(set_attr "type" "move,fpload")
2242 (set_attr "length" "16,4")])
2244 (define_expand "movdf"
2245 [(set (match_operand:DF 0 "general_operand" "")
2246 (match_operand:DF 1 "general_operand" ""))]
2250 if (emit_move_sequence (operands, DFmode, 0))
2254 ;; Reloading an SImode or DImode value requires a scratch register if
2255 ;; going in to or out of float point registers.
2257 (define_expand "reload_indf"
2258 [(set (match_operand:DF 0 "register_operand" "=Z")
2259 (match_operand:DF 1 "non_hard_reg_operand" ""))
2260 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2264 if (emit_move_sequence (operands, DFmode, operands[2]))
2267 /* We don't want the clobber emitted, so handle this ourselves. */
2268 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2272 (define_expand "reload_outdf"
2273 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2274 (match_operand:DF 1 "register_operand" "Z"))
2275 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2279 if (emit_move_sequence (operands, DFmode, operands[2]))
2282 /* We don't want the clobber emitted, so handle this ourselves. */
2283 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2288 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2289 "=f,*r,RQ,?o,?Q,f,*r,*r")
2290 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2291 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2292 "(register_operand (operands[0], DFmode)
2293 || reg_or_0_operand (operands[1], DFmode))
2294 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2295 && GET_CODE (operands[0]) == MEM)
2296 && ! TARGET_SOFT_FLOAT"
2299 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2300 || operands[1] == CONST0_RTX (DFmode))
2301 return output_fp_move_double (operands);
2302 return output_move_double (operands);
2304 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2305 (set_attr "length" "4,8,4,8,16,4,8,16")])
2308 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2310 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2312 "(register_operand (operands[0], DFmode)
2313 || reg_or_0_operand (operands[1], DFmode))
2314 && TARGET_SOFT_FLOAT"
2317 return output_move_double (operands);
2319 [(set_attr "type" "move,store,store,load,load")
2320 (set_attr "length" "8,8,16,8,16")])
2323 [(set (match_operand:DF 0 "register_operand" "=fx")
2324 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2325 (match_operand:SI 2 "register_operand" "r"))))]
2326 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2329 /* Reload can create backwards (relative to cse) unscaled index
2330 address modes when eliminating registers and possibly for
2331 pseudos that don't get hard registers. Deal with it. */
2332 if (operands[2] == hard_frame_pointer_rtx
2333 || operands[2] == stack_pointer_rtx)
2334 return \"flddx %1(%2),%0\";
2336 return \"flddx %2(%1),%0\";
2338 [(set_attr "type" "fpload")
2339 (set_attr "length" "4")])
2342 [(set (match_operand:DF 0 "register_operand" "=fx")
2343 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2344 (match_operand:SI 2 "basereg_operand" "r"))))]
2345 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2348 /* Reload can create backwards (relative to cse) unscaled index
2349 address modes when eliminating registers and possibly for
2350 pseudos that don't get hard registers. Deal with it. */
2351 if (operands[1] == hard_frame_pointer_rtx
2352 || operands[1] == stack_pointer_rtx)
2353 return \"flddx %2(%1),%0\";
2355 return \"flddx %1(%2),%0\";
2357 [(set_attr "type" "fpload")
2358 (set_attr "length" "4")])
2361 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2362 (match_operand:SI 2 "register_operand" "r")))
2363 (match_operand:DF 0 "register_operand" "fx"))]
2364 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2367 /* Reload can create backwards (relative to cse) unscaled index
2368 address modes when eliminating registers and possibly for
2369 pseudos that don't get hard registers. Deal with it. */
2370 if (operands[2] == hard_frame_pointer_rtx
2371 || operands[2] == stack_pointer_rtx)
2372 return \"fstdx %0,%1(%2)\";
2374 return \"fstdx %0,%2(%1)\";
2376 [(set_attr "type" "fpstore")
2377 (set_attr "length" "4")])
2380 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2381 (match_operand:SI 2 "basereg_operand" "r")))
2382 (match_operand:DF 0 "register_operand" "fx"))]
2383 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2386 /* Reload can create backwards (relative to cse) unscaled index
2387 address modes when eliminating registers and possibly for
2388 pseudos that don't get hard registers. Deal with it. */
2389 if (operands[1] == hard_frame_pointer_rtx
2390 || operands[1] == stack_pointer_rtx)
2391 return \"fstdx %0,%2(%1)\";
2393 return \"fstdx %0,%1(%2)\";
2395 [(set_attr "type" "fpstore")
2396 (set_attr "length" "4")])
2398 (define_expand "movdi"
2399 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2400 (match_operand:DI 1 "general_operand" ""))]
2404 if (emit_move_sequence (operands, DImode, 0))
2408 (define_expand "reload_indi"
2409 [(set (match_operand:DI 0 "register_operand" "=f")
2410 (match_operand:DI 1 "non_hard_reg_operand" ""))
2411 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2415 if (emit_move_sequence (operands, DImode, operands[2]))
2418 /* We don't want the clobber emitted, so handle this ourselves. */
2419 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2423 (define_expand "reload_outdi"
2424 [(set (match_operand:DI 0 "general_operand" "")
2425 (match_operand:DI 1 "register_operand" "f"))
2426 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2430 if (emit_move_sequence (operands, DImode, operands[2]))
2433 /* We don't want the clobber emitted, so handle this ourselves. */
2434 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2439 [(set (match_operand:DI 0 "register_operand" "=r")
2440 (high:DI (match_operand 1 "" "")))]
2444 rtx op0 = operands[0];
2445 rtx op1 = operands[1];
2447 if (GET_CODE (op1) == CONST_INT)
2449 operands[0] = operand_subword (op0, 1, 0, DImode);
2450 output_asm_insn (\"ldil L'%1,%0\", operands);
2452 operands[0] = operand_subword (op0, 0, 0, DImode);
2453 if (INTVAL (op1) < 0)
2454 output_asm_insn (\"ldi -1,%0\", operands);
2456 output_asm_insn (\"ldi 0,%0\", operands);
2459 else if (GET_CODE (op1) == CONST_DOUBLE)
2461 operands[0] = operand_subword (op0, 1, 0, DImode);
2462 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2463 output_asm_insn (\"ldil L'%1,%0\", operands);
2465 operands[0] = operand_subword (op0, 0, 0, DImode);
2466 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2467 output_asm_insn (singlemove_string (operands), operands);
2473 [(set_attr "type" "move")
2474 (set_attr "length" "8")])
2477 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2478 "=r,o,Q,r,r,r,f,f,*TR")
2479 (match_operand:DI 1 "general_operand"
2480 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2481 "(register_operand (operands[0], DImode)
2482 || reg_or_0_operand (operands[1], DImode))
2483 && ! TARGET_SOFT_FLOAT"
2486 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2487 || (operands[1] == CONST0_RTX (DImode)))
2488 return output_fp_move_double (operands);
2489 return output_move_double (operands);
2491 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2492 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2495 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2497 (match_operand:DI 1 "general_operand"
2499 "(register_operand (operands[0], DImode)
2500 || reg_or_0_operand (operands[1], DImode))
2501 && TARGET_SOFT_FLOAT"
2504 return output_move_double (operands);
2506 [(set_attr "type" "move,store,store,load,load,multi")
2507 (set_attr "length" "8,8,16,8,16,16")])
2510 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2511 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2512 (match_operand:DI 2 "immediate_operand" "i,i")))]
2516 /* Don't output a 64 bit constant, since we can't trust the assembler to
2517 handle it correctly. */
2518 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2519 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2520 if (which_alternative == 1)
2521 output_asm_insn (\"copy %1,%0\", operands);
2522 return \"ldo R'%G2(%R1),%R0\";
2524 [(set_attr "type" "move,move")
2525 (set_attr "length" "4,8")])
2527 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2528 ;; to be reloaded by putting the constant into memory when
2529 ;; reg is a floating point register.
2531 ;; For integer registers we use ldil;ldo to set the appropriate
2534 ;; This must come before the movsf pattern, and it must be present
2535 ;; to handle obscure reloading cases.
2537 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2538 (match_operand:SF 1 "" "?F,m"))]
2539 "GET_CODE (operands[1]) == CONST_DOUBLE
2540 && operands[1] != CONST0_RTX (SFmode)
2541 && ! TARGET_SOFT_FLOAT"
2542 "* return (which_alternative == 0 ? singlemove_string (operands)
2543 : \" fldw%F1 %1,%0\");"
2544 [(set_attr "type" "move,fpload")
2545 (set_attr "length" "8,4")])
2547 (define_expand "movsf"
2548 [(set (match_operand:SF 0 "general_operand" "")
2549 (match_operand:SF 1 "general_operand" ""))]
2553 if (emit_move_sequence (operands, SFmode, 0))
2557 ;; Reloading an SImode or DImode value requires a scratch register if
2558 ;; going in to or out of float point registers.
2560 (define_expand "reload_insf"
2561 [(set (match_operand:SF 0 "register_operand" "=Z")
2562 (match_operand:SF 1 "non_hard_reg_operand" ""))
2563 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2567 if (emit_move_sequence (operands, SFmode, operands[2]))
2570 /* We don't want the clobber emitted, so handle this ourselves. */
2571 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2575 (define_expand "reload_outsf"
2576 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2577 (match_operand:SF 1 "register_operand" "Z"))
2578 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2582 if (emit_move_sequence (operands, SFmode, operands[2]))
2585 /* We don't want the clobber emitted, so handle this ourselves. */
2586 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2591 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2593 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2594 "fG,rG,RQ,RQ,f,rG"))]
2595 "(register_operand (operands[0], SFmode)
2596 || reg_or_0_operand (operands[1], SFmode))
2597 && ! TARGET_SOFT_FLOAT"
2605 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2606 (set_attr "pa_combine_type" "addmove")
2607 (set_attr "length" "4,4,4,4,4,4")])
2610 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2612 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2614 "(register_operand (operands[0], SFmode)
2615 || reg_or_0_operand (operands[1], SFmode))
2616 && TARGET_SOFT_FLOAT"
2621 [(set_attr "type" "move,load,store")
2622 (set_attr "pa_combine_type" "addmove")
2623 (set_attr "length" "4,4,4")])
2626 [(set (match_operand:SF 0 "register_operand" "=fx")
2627 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2628 (match_operand:SI 2 "register_operand" "r"))))]
2629 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2632 /* Reload can create backwards (relative to cse) unscaled index
2633 address modes when eliminating registers and possibly for
2634 pseudos that don't get hard registers. Deal with it. */
2635 if (operands[2] == hard_frame_pointer_rtx
2636 || operands[2] == stack_pointer_rtx)
2637 return \"fldwx %1(%2),%0\";
2639 return \"fldwx %2(%1),%0\";
2641 [(set_attr "type" "fpload")
2642 (set_attr "length" "4")])
2645 [(set (match_operand:SF 0 "register_operand" "=fx")
2646 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2647 (match_operand:SI 2 "basereg_operand" "r"))))]
2648 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2651 /* Reload can create backwards (relative to cse) unscaled index
2652 address modes when eliminating registers and possibly for
2653 pseudos that don't get hard registers. Deal with it. */
2654 if (operands[1] == hard_frame_pointer_rtx
2655 || operands[1] == stack_pointer_rtx)
2656 return \"fldwx %2(%1),%0\";
2658 return \"fldwx %1(%2),%0\";
2660 [(set_attr "type" "fpload")
2661 (set_attr "length" "4")])
2664 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2665 (match_operand:SI 2 "register_operand" "r")))
2666 (match_operand:SF 0 "register_operand" "fx"))]
2667 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2670 /* Reload can create backwards (relative to cse) unscaled index
2671 address modes when eliminating registers and possibly for
2672 pseudos that don't get hard registers. Deal with it. */
2673 if (operands[2] == hard_frame_pointer_rtx
2674 || operands[2] == stack_pointer_rtx)
2675 return \"fstwx %0,%1(%2)\";
2677 return \"fstwx %0,%2(%1)\";
2679 [(set_attr "type" "fpstore")
2680 (set_attr "length" "4")])
2683 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2684 (match_operand:SI 2 "basereg_operand" "r")))
2685 (match_operand:SF 0 "register_operand" "fx"))]
2686 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2689 /* Reload can create backwards (relative to cse) unscaled index
2690 address modes when eliminating registers and possibly for
2691 pseudos that don't get hard registers. Deal with it. */
2692 if (operands[1] == hard_frame_pointer_rtx
2693 || operands[1] == stack_pointer_rtx)
2694 return \"fstwx %0,%2(%1)\";
2696 return \"fstwx %0,%1(%2)\";
2698 [(set_attr "type" "fpstore")
2699 (set_attr "length" "4")])
2702 ;;- zero extension instructions
2703 ;; We have define_expand for zero extension patterns to make sure the
2704 ;; operands get loaded into registers. The define_insns accept
2705 ;; memory operands. This gives us better overall code than just
2706 ;; having a pattern that does or does not accept memory operands.
2708 (define_expand "zero_extendhisi2"
2709 [(set (match_operand:SI 0 "register_operand" "")
2711 (match_operand:HI 1 "register_operand" "")))]
2716 [(set (match_operand:SI 0 "register_operand" "=r,r")
2718 (match_operand:HI 1 "move_operand" "r,RQ")))]
2719 "GET_CODE (operands[1]) != CONST_INT"
2723 [(set_attr "type" "shift,load")
2724 (set_attr "length" "4,4")])
2726 (define_expand "zero_extendqihi2"
2727 [(set (match_operand:HI 0 "register_operand" "")
2729 (match_operand:QI 1 "register_operand" "")))]
2734 [(set (match_operand:HI 0 "register_operand" "=r,r")
2736 (match_operand:QI 1 "move_operand" "r,RQ")))]
2737 "GET_CODE (operands[1]) != CONST_INT"
2741 [(set_attr "type" "shift,load")
2742 (set_attr "length" "4,4")])
2744 (define_expand "zero_extendqisi2"
2745 [(set (match_operand:SI 0 "register_operand" "")
2747 (match_operand:QI 1 "register_operand" "")))]
2752 [(set (match_operand:SI 0 "register_operand" "=r,r")
2754 (match_operand:QI 1 "move_operand" "r,RQ")))]
2755 "GET_CODE (operands[1]) != CONST_INT"
2759 [(set_attr "type" "shift,load")
2760 (set_attr "length" "4,4")])
2762 ;;- sign extension instructions
2764 (define_insn "extendhisi2"
2765 [(set (match_operand:SI 0 "register_operand" "=r")
2766 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2769 [(set_attr "type" "shift")
2770 (set_attr "length" "4")])
2772 (define_insn "extendqihi2"
2773 [(set (match_operand:HI 0 "register_operand" "=r")
2774 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2777 [(set_attr "type" "shift")
2778 (set_attr "length" "4")])
2780 (define_insn "extendqisi2"
2781 [(set (match_operand:SI 0 "register_operand" "=r")
2782 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2785 [(set_attr "type" "shift")
2786 (set_attr "length" "4")])
2788 ;; Conversions between float and double.
2790 (define_insn "extendsfdf2"
2791 [(set (match_operand:DF 0 "register_operand" "=f")
2793 (match_operand:SF 1 "register_operand" "f")))]
2794 "! TARGET_SOFT_FLOAT"
2795 "fcnvff,sgl,dbl %1,%0"
2796 [(set_attr "type" "fpalu")
2797 (set_attr "length" "4")])
2799 (define_insn "truncdfsf2"
2800 [(set (match_operand:SF 0 "register_operand" "=f")
2802 (match_operand:DF 1 "register_operand" "f")))]
2803 "! TARGET_SOFT_FLOAT"
2804 "fcnvff,dbl,sgl %1,%0"
2805 [(set_attr "type" "fpalu")
2806 (set_attr "length" "4")])
2808 ;; Conversion between fixed point and floating point.
2809 ;; Note that among the fix-to-float insns
2810 ;; the ones that start with SImode come first.
2811 ;; That is so that an operand that is a CONST_INT
2812 ;; (and therefore lacks a specific machine mode).
2813 ;; will be recognized as SImode (which is always valid)
2814 ;; rather than as QImode or HImode.
2816 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2817 ;; to be reloaded by putting the constant into memory.
2818 ;; It must come before the more general floatsisf2 pattern.
2820 [(set (match_operand:SF 0 "register_operand" "=f")
2821 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2822 "! TARGET_SOFT_FLOAT"
2823 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2824 [(set_attr "type" "fpalu")
2825 (set_attr "length" "8")])
2827 (define_insn "floatsisf2"
2828 [(set (match_operand:SF 0 "register_operand" "=f")
2829 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2830 "! TARGET_SOFT_FLOAT"
2831 "fcnvxf,sgl,sgl %1,%0"
2832 [(set_attr "type" "fpalu")
2833 (set_attr "length" "4")])
2835 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2836 ;; to be reloaded by putting the constant into memory.
2837 ;; It must come before the more general floatsidf2 pattern.
2839 [(set (match_operand:DF 0 "register_operand" "=f")
2840 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2841 "! TARGET_SOFT_FLOAT"
2842 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2843 [(set_attr "type" "fpalu")
2844 (set_attr "length" "8")])
2846 (define_insn "floatsidf2"
2847 [(set (match_operand:DF 0 "register_operand" "=f")
2848 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2849 "! TARGET_SOFT_FLOAT"
2850 "fcnvxf,sgl,dbl %1,%0"
2851 [(set_attr "type" "fpalu")
2852 (set_attr "length" "4")])
2854 (define_expand "floatunssisf2"
2855 [(set (subreg:SI (match_dup 2) 1)
2856 (match_operand:SI 1 "register_operand" ""))
2857 (set (subreg:SI (match_dup 2) 0)
2859 (set (match_operand:SF 0 "register_operand" "")
2860 (float:SF (match_dup 2)))]
2861 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2862 "operands[2] = gen_reg_rtx (DImode);")
2864 (define_expand "floatunssidf2"
2865 [(set (subreg:SI (match_dup 2) 1)
2866 (match_operand:SI 1 "register_operand" ""))
2867 (set (subreg:SI (match_dup 2) 0)
2869 (set (match_operand:DF 0 "register_operand" "")
2870 (float:DF (match_dup 2)))]
2871 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2872 "operands[2] = gen_reg_rtx (DImode);")
2874 (define_insn "floatdisf2"
2875 [(set (match_operand:SF 0 "register_operand" "=f")
2876 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2877 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2878 "fcnvxf,dbl,sgl %1,%0"
2879 [(set_attr "type" "fpalu")
2880 (set_attr "length" "4")])
2882 (define_insn "floatdidf2"
2883 [(set (match_operand:DF 0 "register_operand" "=f")
2884 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2885 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2886 "fcnvxf,dbl,dbl %1,%0"
2887 [(set_attr "type" "fpalu")
2888 (set_attr "length" "4")])
2890 ;; Convert a float to an actual integer.
2891 ;; Truncation is performed as part of the conversion.
2893 (define_insn "fix_truncsfsi2"
2894 [(set (match_operand:SI 0 "register_operand" "=f")
2895 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2896 "! TARGET_SOFT_FLOAT"
2897 "fcnvfxt,sgl,sgl %1,%0"
2898 [(set_attr "type" "fpalu")
2899 (set_attr "length" "4")])
2901 (define_insn "fix_truncdfsi2"
2902 [(set (match_operand:SI 0 "register_operand" "=f")
2903 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2904 "! TARGET_SOFT_FLOAT"
2905 "fcnvfxt,dbl,sgl %1,%0"
2906 [(set_attr "type" "fpalu")
2907 (set_attr "length" "4")])
2909 (define_insn "fix_truncsfdi2"
2910 [(set (match_operand:DI 0 "register_operand" "=f")
2911 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2912 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2913 "fcnvfxt,sgl,dbl %1,%0"
2914 [(set_attr "type" "fpalu")
2915 (set_attr "length" "4")])
2917 (define_insn "fix_truncdfdi2"
2918 [(set (match_operand:DI 0 "register_operand" "=f")
2919 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2920 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2921 "fcnvfxt,dbl,dbl %1,%0"
2922 [(set_attr "type" "fpalu")
2923 (set_attr "length" "4")])
2925 ;;- arithmetic instructions
2927 (define_insn "adddi3"
2928 [(set (match_operand:DI 0 "register_operand" "=r")
2929 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2930 (match_operand:DI 2 "arith11_operand" "rI")))]
2934 if (GET_CODE (operands[2]) == CONST_INT)
2936 if (INTVAL (operands[2]) >= 0)
2937 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2939 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2942 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2944 [(set_attr "type" "binary")
2945 (set_attr "length" "8")])
2948 [(set (match_operand:SI 0 "register_operand" "=r")
2949 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2950 (match_operand:SI 2 "register_operand" "r")))]
2953 [(set_attr "type" "binary")
2954 (set_attr "length" "4")])
2956 ;; define_splits to optimize cases of adding a constant integer
2957 ;; to a register when the constant does not fit in 14 bits. */
2959 [(set (match_operand:SI 0 "register_operand" "")
2960 (plus:SI (match_operand:SI 1 "register_operand" "")
2961 (match_operand:SI 2 "const_int_operand" "")))
2962 (clobber (match_operand:SI 4 "register_operand" ""))]
2963 "! cint_ok_for_move (INTVAL (operands[2]))
2964 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2965 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2966 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2969 int val = INTVAL (operands[2]);
2970 int low = (val < 0) ? -0x2000 : 0x1fff;
2971 int rest = val - low;
2973 operands[2] = GEN_INT (rest);
2974 operands[3] = GEN_INT (low);
2978 [(set (match_operand:SI 0 "register_operand" "")
2979 (plus:SI (match_operand:SI 1 "register_operand" "")
2980 (match_operand:SI 2 "const_int_operand" "")))
2981 (clobber (match_operand:SI 4 "register_operand" ""))]
2982 "! cint_ok_for_move (INTVAL (operands[2]))"
2983 [(set (match_dup 4) (match_dup 2))
2984 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2988 HOST_WIDE_INT intval = INTVAL (operands[2]);
2990 /* Try dividing the constant by 2, then 4, and finally 8 to see
2991 if we can get a constant which can be loaded into a register
2992 in a single instruction (cint_ok_for_move).
2994 If that fails, try to negate the constant and subtract it
2995 from our input operand. */
2996 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
2998 operands[2] = GEN_INT (intval / 2);
2999 operands[3] = GEN_INT (2);
3001 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
3003 operands[2] = GEN_INT (intval / 4);
3004 operands[3] = GEN_INT (4);
3006 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
3008 operands[2] = GEN_INT (intval / 8);
3009 operands[3] = GEN_INT (8);
3011 else if (cint_ok_for_move (-intval))
3013 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
3014 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
3021 (define_insn "addsi3"
3022 [(set (match_operand:SI 0 "register_operand" "=r,r")
3023 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3024 (match_operand:SI 2 "arith_operand" "r,J")))]
3029 [(set_attr "type" "binary,binary")
3030 (set_attr "pa_combine_type" "addmove")
3031 (set_attr "length" "4,4")])
3033 ;; Disgusting kludge to work around reload bugs with frame pointer
3034 ;; elimination. Similar to other magic reload patterns in the
3035 ;; indexed memory operations.
3037 [(set (match_operand:SI 0 "register_operand" "=&r")
3038 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3039 (match_operand:SI 2 "register_operand" "r"))
3040 (match_operand:SI 3 "const_int_operand" "rL")))]
3041 "reload_in_progress"
3044 if (GET_CODE (operands[3]) == CONST_INT)
3045 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3047 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3049 [(set_attr "type" "binary")
3050 (set_attr "length" "8")])
3052 (define_insn "subdi3"
3053 [(set (match_operand:DI 0 "register_operand" "=r")
3054 (minus:DI (match_operand:DI 1 "register_operand" "r")
3055 (match_operand:DI 2 "register_operand" "r")))]
3057 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3058 [(set_attr "type" "binary")
3059 (set_attr "length" "8")])
3061 (define_insn "subsi3"
3062 [(set (match_operand:SI 0 "register_operand" "=r,r")
3063 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3064 (match_operand:SI 2 "register_operand" "r,r")))]
3069 [(set_attr "type" "binary,binary")
3070 (set_attr "length" "4,4")])
3072 ;; Clobbering a "register_operand" instead of a match_scratch
3073 ;; in operand3 of millicode calls avoids spilling %r1 and
3074 ;; produces better code.
3076 ;; The mulsi3 insns set up registers for the millicode call.
3077 (define_expand "mulsi3"
3078 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3079 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3080 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3081 (clobber (match_dup 3))
3082 (clobber (reg:SI 26))
3083 (clobber (reg:SI 25))
3084 (clobber (reg:SI 31))])
3085 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3089 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3091 rtx scratch = gen_reg_rtx (DImode);
3092 operands[1] = force_reg (SImode, operands[1]);
3093 operands[2] = force_reg (SImode, operands[2]);
3094 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3095 emit_insn (gen_rtx_SET (VOIDmode,
3097 gen_rtx_SUBREG (SImode, scratch, 1)));
3100 operands[3] = gen_reg_rtx (SImode);
3103 (define_insn "umulsidi3"
3104 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3105 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3106 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3107 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3109 [(set_attr "type" "fpmuldbl")
3110 (set_attr "length" "4")])
3113 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3114 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3115 (match_operand:DI 2 "uint32_operand" "f")))]
3116 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3118 [(set_attr "type" "fpmuldbl")
3119 (set_attr "length" "4")])
3122 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3123 (clobber (match_operand:SI 0 "register_operand" "=a"))
3124 (clobber (reg:SI 26))
3125 (clobber (reg:SI 25))
3126 (clobber (reg:SI 31))]
3128 "* return output_mul_insn (0, insn);"
3129 [(set_attr "type" "milli")
3130 (set (attr "length")
3132 ;; Target (or stub) within reach
3133 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3135 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3140 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3144 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3145 ;; same as NO_SPACE_REGS code
3146 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3148 (eq (symbol_ref "flag_pic")
3152 ;; Out of range and either PIC or PORTABLE_RUNTIME
3155 ;;; Division and mod.
3156 (define_expand "divsi3"
3157 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3158 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3159 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3160 (clobber (match_dup 3))
3161 (clobber (reg:SI 26))
3162 (clobber (reg:SI 25))
3163 (clobber (reg:SI 31))])
3164 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3168 operands[3] = gen_reg_rtx (SImode);
3169 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3175 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3176 (clobber (match_operand:SI 1 "register_operand" "=a"))
3177 (clobber (reg:SI 26))
3178 (clobber (reg:SI 25))
3179 (clobber (reg:SI 31))]
3182 return output_div_insn (operands, 0, insn);"
3183 [(set_attr "type" "milli")
3184 (set (attr "length")
3186 ;; Target (or stub) within reach
3187 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3189 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3194 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3198 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3199 ;; same as NO_SPACE_REGS code
3200 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3202 (eq (symbol_ref "flag_pic")
3206 ;; Out of range and either PIC or PORTABLE_RUNTIME
3209 (define_expand "udivsi3"
3210 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3211 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3212 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3213 (clobber (match_dup 3))
3214 (clobber (reg:SI 26))
3215 (clobber (reg:SI 25))
3216 (clobber (reg:SI 31))])
3217 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3221 operands[3] = gen_reg_rtx (SImode);
3222 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3228 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3229 (clobber (match_operand:SI 1 "register_operand" "=a"))
3230 (clobber (reg:SI 26))
3231 (clobber (reg:SI 25))
3232 (clobber (reg:SI 31))]
3235 return output_div_insn (operands, 1, insn);"
3236 [(set_attr "type" "milli")
3237 (set (attr "length")
3239 ;; Target (or stub) within reach
3240 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3242 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3247 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3251 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3252 ;; same as NO_SPACE_REGS code
3253 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3255 (eq (symbol_ref "flag_pic")
3259 ;; Out of range and either PIC or PORTABLE_RUNTIME
3262 (define_expand "modsi3"
3263 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3264 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3265 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3266 (clobber (match_dup 3))
3267 (clobber (reg:SI 26))
3268 (clobber (reg:SI 25))
3269 (clobber (reg:SI 31))])
3270 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3274 operands[3] = gen_reg_rtx (SImode);
3278 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3279 (clobber (match_operand:SI 0 "register_operand" "=a"))
3280 (clobber (reg:SI 26))
3281 (clobber (reg:SI 25))
3282 (clobber (reg:SI 31))]
3285 return output_mod_insn (0, insn);"
3286 [(set_attr "type" "milli")
3287 (set (attr "length")
3289 ;; Target (or stub) within reach
3290 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3292 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3297 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3301 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3302 ;; same as NO_SPACE_REGS code
3303 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3305 (eq (symbol_ref "flag_pic")
3309 ;; Out of range and either PIC or PORTABLE_RUNTIME
3312 (define_expand "umodsi3"
3313 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3314 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3315 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3316 (clobber (match_dup 3))
3317 (clobber (reg:SI 26))
3318 (clobber (reg:SI 25))
3319 (clobber (reg:SI 31))])
3320 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3324 operands[3] = gen_reg_rtx (SImode);
3328 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3329 (clobber (match_operand:SI 0 "register_operand" "=a"))
3330 (clobber (reg:SI 26))
3331 (clobber (reg:SI 25))
3332 (clobber (reg:SI 31))]
3335 return output_mod_insn (1, insn);"
3336 [(set_attr "type" "milli")
3337 (set (attr "length")
3339 ;; Target (or stub) within reach
3340 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3342 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3347 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3351 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3352 ;; same as NO_SPACE_REGS code
3353 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3355 (eq (symbol_ref "flag_pic")
3359 ;; Out of range and either PIC or PORTABLE_RUNTIME
3362 ;;- and instructions
3363 ;; We define DImode `and` so with DImode `not` we can get
3364 ;; DImode `andn`. Other combinations are possible.
3366 (define_expand "anddi3"
3367 [(set (match_operand:DI 0 "register_operand" "")
3368 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3369 (match_operand:DI 2 "arith_double_operand" "")))]
3373 if (! register_operand (operands[1], DImode)
3374 || ! register_operand (operands[2], DImode))
3375 /* Let GCC break this into word-at-a-time operations. */
3380 [(set (match_operand:DI 0 "register_operand" "=r")
3381 (and:DI (match_operand:DI 1 "register_operand" "%r")
3382 (match_operand:DI 2 "register_operand" "r")))]
3384 "and %1,%2,%0\;and %R1,%R2,%R0"
3385 [(set_attr "type" "binary")
3386 (set_attr "length" "8")])
3388 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3389 ; constant with ldil;ldo.
3390 (define_insn "andsi3"
3391 [(set (match_operand:SI 0 "register_operand" "=r,r")
3392 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3393 (match_operand:SI 2 "and_operand" "rO,P")))]
3395 "* return output_and (operands); "
3396 [(set_attr "type" "binary,shift")
3397 (set_attr "length" "4,4")])
3400 [(set (match_operand:DI 0 "register_operand" "=r")
3401 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3402 (match_operand:DI 2 "register_operand" "r")))]
3404 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3405 [(set_attr "type" "binary")
3406 (set_attr "length" "8")])
3409 [(set (match_operand:SI 0 "register_operand" "=r")
3410 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3411 (match_operand:SI 2 "register_operand" "r")))]
3414 [(set_attr "type" "binary")
3415 (set_attr "length" "4")])
3417 (define_expand "iordi3"
3418 [(set (match_operand:DI 0 "register_operand" "")
3419 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3420 (match_operand:DI 2 "arith_double_operand" "")))]
3424 if (! register_operand (operands[1], DImode)
3425 || ! register_operand (operands[2], DImode))
3426 /* Let GCC break this into word-at-a-time operations. */
3431 [(set (match_operand:DI 0 "register_operand" "=r")
3432 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3433 (match_operand:DI 2 "register_operand" "r")))]
3435 "or %1,%2,%0\;or %R1,%R2,%R0"
3436 [(set_attr "type" "binary")
3437 (set_attr "length" "8")])
3439 ;; Need a define_expand because we've run out of CONST_OK... characters.
3440 (define_expand "iorsi3"
3441 [(set (match_operand:SI 0 "register_operand" "")
3442 (ior:SI (match_operand:SI 1 "register_operand" "")
3443 (match_operand:SI 2 "arith32_operand" "")))]
3447 if (! (ior_operand (operands[2], SImode)
3448 || register_operand (operands[2], SImode)))
3449 operands[2] = force_reg (SImode, operands[2]);
3453 [(set (match_operand:SI 0 "register_operand" "=r,r")
3454 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3455 (match_operand:SI 2 "ior_operand" "M,i")))]
3457 "* return output_ior (operands); "
3458 [(set_attr "type" "binary,shift")
3459 (set_attr "length" "4,4")])
3462 [(set (match_operand:SI 0 "register_operand" "=r")
3463 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3464 (match_operand:SI 2 "register_operand" "r")))]
3467 [(set_attr "type" "binary")
3468 (set_attr "length" "4")])
3470 (define_expand "xordi3"
3471 [(set (match_operand:DI 0 "register_operand" "")
3472 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3473 (match_operand:DI 2 "arith_double_operand" "")))]
3477 if (! register_operand (operands[1], DImode)
3478 || ! register_operand (operands[2], DImode))
3479 /* Let GCC break this into word-at-a-time operations. */
3484 [(set (match_operand:DI 0 "register_operand" "=r")
3485 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3486 (match_operand:DI 2 "register_operand" "r")))]
3488 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3489 [(set_attr "type" "binary")
3490 (set_attr "length" "8")])
3492 (define_insn "xorsi3"
3493 [(set (match_operand:SI 0 "register_operand" "=r")
3494 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3495 (match_operand:SI 2 "register_operand" "r")))]
3498 [(set_attr "type" "binary")
3499 (set_attr "length" "4")])
3501 (define_insn "negdi2"
3502 [(set (match_operand:DI 0 "register_operand" "=r")
3503 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3505 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3506 [(set_attr "type" "unary")
3507 (set_attr "length" "8")])
3509 (define_insn "negsi2"
3510 [(set (match_operand:SI 0 "register_operand" "=r")
3511 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3514 [(set_attr "type" "unary")
3515 (set_attr "length" "4")])
3517 (define_expand "one_cmpldi2"
3518 [(set (match_operand:DI 0 "register_operand" "")
3519 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3523 if (! register_operand (operands[1], DImode))
3528 [(set (match_operand:DI 0 "register_operand" "=r")
3529 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3531 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3532 [(set_attr "type" "unary")
3533 (set_attr "length" "8")])
3535 (define_insn "one_cmplsi2"
3536 [(set (match_operand:SI 0 "register_operand" "=r")
3537 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3540 [(set_attr "type" "unary")
3541 (set_attr "length" "4")])
3543 ;; Floating point arithmetic instructions.
3545 (define_insn "adddf3"
3546 [(set (match_operand:DF 0 "register_operand" "=f")
3547 (plus:DF (match_operand:DF 1 "register_operand" "f")
3548 (match_operand:DF 2 "register_operand" "f")))]
3549 "! TARGET_SOFT_FLOAT"
3551 [(set_attr "type" "fpalu")
3552 (set_attr "pa_combine_type" "faddsub")
3553 (set_attr "length" "4")])
3555 (define_insn "addsf3"
3556 [(set (match_operand:SF 0 "register_operand" "=f")
3557 (plus:SF (match_operand:SF 1 "register_operand" "f")
3558 (match_operand:SF 2 "register_operand" "f")))]
3559 "! TARGET_SOFT_FLOAT"
3561 [(set_attr "type" "fpalu")
3562 (set_attr "pa_combine_type" "faddsub")
3563 (set_attr "length" "4")])
3565 (define_insn "subdf3"
3566 [(set (match_operand:DF 0 "register_operand" "=f")
3567 (minus:DF (match_operand:DF 1 "register_operand" "f")
3568 (match_operand:DF 2 "register_operand" "f")))]
3569 "! TARGET_SOFT_FLOAT"
3571 [(set_attr "type" "fpalu")
3572 (set_attr "pa_combine_type" "faddsub")
3573 (set_attr "length" "4")])
3575 (define_insn "subsf3"
3576 [(set (match_operand:SF 0 "register_operand" "=f")
3577 (minus:SF (match_operand:SF 1 "register_operand" "f")
3578 (match_operand:SF 2 "register_operand" "f")))]
3579 "! TARGET_SOFT_FLOAT"
3581 [(set_attr "type" "fpalu")
3582 (set_attr "pa_combine_type" "faddsub")
3583 (set_attr "length" "4")])
3585 (define_insn "muldf3"
3586 [(set (match_operand:DF 0 "register_operand" "=f")
3587 (mult:DF (match_operand:DF 1 "register_operand" "f")
3588 (match_operand:DF 2 "register_operand" "f")))]
3589 "! TARGET_SOFT_FLOAT"
3591 [(set_attr "type" "fpmuldbl")
3592 (set_attr "pa_combine_type" "fmpy")
3593 (set_attr "length" "4")])
3595 (define_insn "mulsf3"
3596 [(set (match_operand:SF 0 "register_operand" "=f")
3597 (mult:SF (match_operand:SF 1 "register_operand" "f")
3598 (match_operand:SF 2 "register_operand" "f")))]
3599 "! TARGET_SOFT_FLOAT"
3601 [(set_attr "type" "fpmulsgl")
3602 (set_attr "pa_combine_type" "fmpy")
3603 (set_attr "length" "4")])
3605 (define_insn "divdf3"
3606 [(set (match_operand:DF 0 "register_operand" "=f")
3607 (div:DF (match_operand:DF 1 "register_operand" "f")
3608 (match_operand:DF 2 "register_operand" "f")))]
3609 "! TARGET_SOFT_FLOAT"
3611 [(set_attr "type" "fpdivdbl")
3612 (set_attr "length" "4")])
3614 (define_insn "divsf3"
3615 [(set (match_operand:SF 0 "register_operand" "=f")
3616 (div:SF (match_operand:SF 1 "register_operand" "f")
3617 (match_operand:SF 2 "register_operand" "f")))]
3618 "! TARGET_SOFT_FLOAT"
3620 [(set_attr "type" "fpdivsgl")
3621 (set_attr "length" "4")])
3623 (define_insn "negdf2"
3624 [(set (match_operand:DF 0 "register_operand" "=f")
3625 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3626 "! TARGET_SOFT_FLOAT"
3630 return \"fneg,dbl %1,%0\";
3632 return \"fsub,dbl %%fr0,%1,%0\";
3634 [(set_attr "type" "fpalu")
3635 (set_attr "length" "4")])
3637 (define_insn "negsf2"
3638 [(set (match_operand:SF 0 "register_operand" "=f")
3639 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3640 "! TARGET_SOFT_FLOAT"
3644 return \"fneg,sgl %1,%0\";
3646 return \"fsub,sgl %%fr0,%1,%0\";
3648 [(set_attr "type" "fpalu")
3649 (set_attr "length" "4")])
3651 (define_insn "absdf2"
3652 [(set (match_operand:DF 0 "register_operand" "=f")
3653 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3654 "! TARGET_SOFT_FLOAT"
3656 [(set_attr "type" "fpalu")
3657 (set_attr "length" "4")])
3659 (define_insn "abssf2"
3660 [(set (match_operand:SF 0 "register_operand" "=f")
3661 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3662 "! TARGET_SOFT_FLOAT"
3664 [(set_attr "type" "fpalu")
3665 (set_attr "length" "4")])
3667 (define_insn "sqrtdf2"
3668 [(set (match_operand:DF 0 "register_operand" "=f")
3669 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3670 "! TARGET_SOFT_FLOAT"
3672 [(set_attr "type" "fpsqrtdbl")
3673 (set_attr "length" "4")])
3675 (define_insn "sqrtsf2"
3676 [(set (match_operand:SF 0 "register_operand" "=f")
3677 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3678 "! TARGET_SOFT_FLOAT"
3680 [(set_attr "type" "fpsqrtsgl")
3681 (set_attr "length" "4")])
3683 ;; PA 2.0 floating point instructions
3687 [(set (match_operand:DF 0 "register_operand" "=f")
3688 (plus:DF (mult:DF (match_operand:DF 1 "register_operand" "f")
3689 (match_operand:DF 2 "register_operand" "f"))
3690 (match_operand:DF 3 "register_operand" "f")))]
3691 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3692 "fmpyfadd,dbl %1,%2,%3,%0"
3693 [(set_attr "type" "fpmuldbl")
3694 (set_attr "length" "4")])
3697 [(set (match_operand:DF 0 "register_operand" "=f")
3698 (plus:DF (match_operand:DF 1 "register_operand" "f")
3699 (mult:DF (match_operand:DF 2 "register_operand" "f")
3700 (match_operand:DF 3 "register_operand" "f"))))]
3701 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3702 "fmpyfadd,dbl %2,%3,%1,%0"
3703 [(set_attr "type" "fpmuldbl")
3704 (set_attr "length" "4")])
3707 [(set (match_operand:SF 0 "register_operand" "=f")
3708 (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "f")
3709 (match_operand:SF 2 "register_operand" "f"))
3710 (match_operand:SF 3 "register_operand" "f")))]
3711 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3712 "fmpyfadd,sgl %1,%2,%3,%0"
3713 [(set_attr "type" "fpmulsgl")
3714 (set_attr "length" "4")])
3717 [(set (match_operand:SF 0 "register_operand" "=f")
3718 (plus:SF (match_operand:SF 1 "register_operand" "f")
3719 (mult:SF (match_operand:SF 2 "register_operand" "f")
3720 (match_operand:SF 3 "register_operand" "f"))))]
3721 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3722 "fmpyfadd,sgl %2,%3,%1,%0"
3723 [(set_attr "type" "fpmulsgl")
3724 (set_attr "length" "4")])
3726 ; fmpynfadd patterns
3728 [(set (match_operand:DF 0 "register_operand" "=f")
3729 (minus:DF (match_operand:DF 1 "register_operand" "f")
3730 (mult:DF (match_operand:DF 2 "register_operand" "f")
3731 (match_operand:DF 3 "register_operand" "f"))))]
3732 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3733 "fmpynfadd,dbl %2,%3,%1,%0"
3734 [(set_attr "type" "fpmuldbl")
3735 (set_attr "length" "4")])
3738 [(set (match_operand:SF 0 "register_operand" "=f")
3739 (minus:SF (match_operand:SF 1 "register_operand" "f")
3740 (mult:SF (match_operand:SF 2 "register_operand" "f")
3741 (match_operand:SF 3 "register_operand" "f"))))]
3742 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3743 "fmpynfadd,sgl %2,%3,%1,%0"
3744 [(set_attr "type" "fpmulsgl")
3745 (set_attr "length" "4")])
3749 [(set (match_operand:DF 0 "register_operand" "=f")
3750 (neg:DF (abs:DF (match_operand:DF 1 "register_operand" "f"))))]
3751 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3753 [(set_attr "type" "fpalu")
3754 (set_attr "length" "4")])
3757 [(set (match_operand:SF 0 "register_operand" "=f")
3758 (neg:SF (abs:SF (match_operand:SF 1 "register_operand" "f"))))]
3759 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3761 [(set_attr "type" "fpalu")
3762 (set_attr "length" "4")])
3765 ;;- Shift instructions
3767 ;; Optimized special case of shifting.
3770 [(set (match_operand:SI 0 "register_operand" "=r")
3771 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3775 [(set_attr "type" "load")
3776 (set_attr "length" "4")])
3779 [(set (match_operand:SI 0 "register_operand" "=r")
3780 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3784 [(set_attr "type" "load")
3785 (set_attr "length" "4")])
3788 [(set (match_operand:SI 0 "register_operand" "=r")
3789 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3790 (match_operand:SI 3 "shadd_operand" ""))
3791 (match_operand:SI 1 "register_operand" "r")))]
3793 "sh%O3addl %2,%1,%0"
3794 [(set_attr "type" "binary")
3795 (set_attr "length" "4")])
3797 ;; This variant of the above insn can occur if the first operand
3798 ;; is the frame pointer. This is a kludge, but there doesn't
3799 ;; seem to be a way around it. Only recognize it while reloading.
3800 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3801 ;; has constraints allowing a register. I don't know how this works,
3802 ;; but it somehow makes sure that out-of-range constants are placed
3803 ;; in a register which somehow magically is a "const_int_operand".
3804 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3807 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3808 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3809 (match_operand:SI 4 "shadd_operand" ""))
3810 (match_operand:SI 1 "register_operand" "r,r"))
3811 (match_operand:SI 3 "const_int_operand" "r,J")))]
3812 "reload_in_progress"
3814 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3815 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3816 [(set_attr "type" "multi")
3817 (set_attr "length" "8")])
3819 ;; This anonymous pattern and splitter wins because it reduces the latency
3820 ;; of the shadd sequence without increasing the latency of the shift.
3822 ;; We want to make sure and split up the operations for the scheduler since
3823 ;; these instructions can (and should) schedule independently.
3825 ;; It would be clearer if combine used the same operator for both expressions,
3826 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3829 ;; If this pattern is not split before register allocation, then we must expose
3830 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3832 [(set (match_operand:SI 0 "register_operand" "=r")
3833 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3834 (match_operand:SI 3 "shadd_operand" ""))
3835 (match_operand:SI 1 "register_operand" "r")))
3836 (set (match_operand:SI 4 "register_operand" "=&r")
3837 (ashift:SI (match_dup 2)
3838 (match_operand:SI 5 "const_int_operand" "i")))]
3839 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3841 [(set_attr "type" "binary")
3842 (set_attr "length" "8")])
3845 [(set (match_operand:SI 0 "register_operand" "=r")
3846 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3847 (match_operand:SI 3 "shadd_operand" ""))
3848 (match_operand:SI 1 "register_operand" "r")))
3849 (set (match_operand:SI 4 "register_operand" "=&r")
3850 (ashift:SI (match_dup 2)
3851 (match_operand:SI 5 "const_int_operand" "i")))]
3852 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3853 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3854 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3858 (define_expand "ashlsi3"
3859 [(set (match_operand:SI 0 "register_operand" "")
3860 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3861 (match_operand:SI 2 "arith32_operand" "")))]
3865 if (GET_CODE (operands[2]) != CONST_INT)
3867 rtx temp = gen_reg_rtx (SImode);
3868 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3869 if (GET_CODE (operands[1]) == CONST_INT)
3870 emit_insn (gen_zvdep_imm (operands[0], operands[1], temp));
3872 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3875 /* Make sure both inputs are not constants,
3876 there are no patterns for that. */
3877 operands[1] = force_reg (SImode, operands[1]);
3881 [(set (match_operand:SI 0 "register_operand" "=r")
3882 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3883 (match_operand:SI 2 "const_int_operand" "n")))]
3885 "zdep %1,%P2,%L2,%0"
3886 [(set_attr "type" "shift")
3887 (set_attr "length" "4")])
3889 ; Match cases of op1 a CONST_INT here that zvdep_imm doesn't handle.
3890 ; Doing it like this makes slightly better code since reload can
3891 ; replace a register with a known value in range -16..15 with a
3892 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm,
3893 ; but since we have no more CONST_OK... characters, that is not
3895 (define_insn "zvdep32"
3896 [(set (match_operand:SI 0 "register_operand" "=r,r")
3897 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3898 (minus:SI (const_int 31)
3899 (match_operand:SI 2 "register_operand" "q,q"))))]
3904 [(set_attr "type" "shift,shift")
3905 (set_attr "length" "4,4")])
3907 (define_insn "zvdep_imm"
3908 [(set (match_operand:SI 0 "register_operand" "=r")
3909 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3910 (minus:SI (const_int 31)
3911 (match_operand:SI 2 "register_operand" "q"))))]
3915 int x = INTVAL (operands[1]);
3916 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3917 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3918 return \"zvdepi %1,%2,%0\";
3920 [(set_attr "type" "shift")
3921 (set_attr "length" "4")])
3923 (define_insn "vdepi_ior"
3924 [(set (match_operand:SI 0 "register_operand" "=r")
3925 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3926 (minus:SI (const_int 31)
3927 (match_operand:SI 2 "register_operand" "q")))
3928 (match_operand:SI 3 "register_operand" "0")))]
3929 ; accept ...0001...1, can this be generalized?
3930 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3933 int x = INTVAL (operands[1]);
3934 operands[2] = GEN_INT (exact_log2 (x + 1));
3935 return \"vdepi -1,%2,%0\";
3937 [(set_attr "type" "shift")
3938 (set_attr "length" "4")])
3940 (define_insn "vdepi_and"
3941 [(set (match_operand:SI 0 "register_operand" "=r")
3942 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3943 (minus:SI (const_int 31)
3944 (match_operand:SI 2 "register_operand" "q")))
3945 (match_operand:SI 3 "register_operand" "0")))]
3946 ; this can be generalized...!
3947 "INTVAL (operands[1]) == -2"
3950 int x = INTVAL (operands[1]);
3951 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3952 return \"vdepi 0,%2,%0\";
3954 [(set_attr "type" "shift")
3955 (set_attr "length" "4")])
3957 (define_expand "ashrsi3"
3958 [(set (match_operand:SI 0 "register_operand" "")
3959 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3960 (match_operand:SI 2 "arith32_operand" "")))]
3964 if (GET_CODE (operands[2]) != CONST_INT)
3966 rtx temp = gen_reg_rtx (SImode);
3967 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3968 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3974 [(set (match_operand:SI 0 "register_operand" "=r")
3975 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3976 (match_operand:SI 2 "const_int_operand" "n")))]
3978 "extrs %1,%P2,%L2,%0"
3979 [(set_attr "type" "shift")
3980 (set_attr "length" "4")])
3982 (define_insn "vextrs32"
3983 [(set (match_operand:SI 0 "register_operand" "=r")
3984 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3985 (minus:SI (const_int 31)
3986 (match_operand:SI 2 "register_operand" "q"))))]
3989 [(set_attr "type" "shift")
3990 (set_attr "length" "4")])
3992 (define_insn "lshrsi3"
3993 [(set (match_operand:SI 0 "register_operand" "=r,r")
3994 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
3995 (match_operand:SI 2 "arith32_operand" "q,n")))]
3999 extru %1,%P2,%L2,%0"
4000 [(set_attr "type" "shift")
4001 (set_attr "length" "4")])
4003 (define_insn "rotrsi3"
4004 [(set (match_operand:SI 0 "register_operand" "=r,r")
4005 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
4006 (match_operand:SI 2 "arith32_operand" "q,n")))]
4010 if (GET_CODE (operands[2]) == CONST_INT)
4012 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
4013 return \"shd %1,%1,%2,%0\";
4016 return \"vshd %1,%1,%0\";
4018 [(set_attr "type" "shift")
4019 (set_attr "length" "4")])
4021 (define_expand "rotlsi3"
4022 [(set (match_operand:SI 0 "register_operand" "")
4023 (rotate:SI (match_operand:SI 1 "register_operand" "")
4024 (match_operand:SI 2 "arith32_operand" "")))]
4028 if (GET_CODE (operands[2]) != CONST_INT)
4030 rtx temp = gen_reg_rtx (SImode);
4031 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
4032 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
4035 /* Else expand normally. */
4039 [(set (match_operand:SI 0 "register_operand" "=r")
4040 (rotate:SI (match_operand:SI 1 "register_operand" "r")
4041 (match_operand:SI 2 "const_int_operand" "n")))]
4045 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
4046 return \"shd %1,%1,%2,%0\";
4048 [(set_attr "type" "shift")
4049 (set_attr "length" "4")])
4052 [(set (match_operand:SI 0 "register_operand" "=r")
4053 (match_operator:SI 5 "plus_xor_ior_operator"
4054 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
4055 (match_operand:SI 3 "const_int_operand" "n"))
4056 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4057 (match_operand:SI 4 "const_int_operand" "n"))]))]
4058 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4060 [(set_attr "type" "shift")
4061 (set_attr "length" "4")])
4064 [(set (match_operand:SI 0 "register_operand" "=r")
4065 (match_operator:SI 5 "plus_xor_ior_operator"
4066 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4067 (match_operand:SI 4 "const_int_operand" "n"))
4068 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4069 (match_operand:SI 3 "const_int_operand" "n"))]))]
4070 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4072 [(set_attr "type" "shift")
4073 (set_attr "length" "4")])
4076 [(set (match_operand:SI 0 "register_operand" "=r")
4077 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
4078 (match_operand:SI 2 "const_int_operand" ""))
4079 (match_operand:SI 3 "const_int_operand" "")))]
4080 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
4083 int cnt = INTVAL (operands[2]) & 31;
4084 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
4085 operands[2] = GEN_INT (31 - cnt);
4086 return \"zdep %1,%2,%3,%0\";
4088 [(set_attr "type" "shift")
4089 (set_attr "length" "4")])
4091 ;; Unconditional and other jump instructions.
4093 (define_insn "return"
4095 "hppa_can_use_return_insn_p ()"
4097 [(set_attr "type" "branch")
4098 (set_attr "length" "4")])
4100 ;; Use a different pattern for functions which have non-trivial
4101 ;; epilogues so as not to confuse jump and reorg.
4102 (define_insn "return_internal"
4107 [(set_attr "type" "branch")
4108 (set_attr "length" "4")])
4110 (define_expand "prologue"
4113 "hppa_expand_prologue ();DONE;")
4115 (define_expand "epilogue"
4120 /* Try to use the trivial return first. Else use the full
4122 if (hppa_can_use_return_insn_p ())
4123 emit_jump_insn (gen_return ());
4126 hppa_expand_epilogue ();
4127 emit_jump_insn (gen_return_internal ());
4132 ;; Special because we use the value placed in %r2 by the bl instruction
4133 ;; from within its delay slot to set the value for the 2nd parameter to
4135 (define_insn "call_profiler"
4136 [(unspec_volatile [(const_int 0)] 0)
4137 (use (match_operand:SI 0 "const_int_operand" ""))]
4139 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4140 [(set_attr "type" "multi")
4141 (set_attr "length" "8")])
4143 (define_insn "blockage"
4144 [(unspec_volatile [(const_int 2)] 0)]
4147 [(set_attr "length" "0")])
4150 [(set (pc) (label_ref (match_operand 0 "" "")))]
4154 extern int optimize;
4156 if (GET_MODE (insn) == SImode)
4159 /* An unconditional branch which can reach its target. */
4160 if (get_attr_length (insn) != 24
4161 && get_attr_length (insn) != 16)
4164 /* An unconditional branch which can not reach its target.
4166 We need to be able to use %r1 as a scratch register; however,
4167 we can never be sure whether or not it's got a live value in
4168 it. Therefore, we must restore its original value after the
4171 To make matters worse, we don't have a stack slot which we
4172 can always clobber. sp-12/sp-16 shouldn't ever have a live
4173 value during a non-optimizing compilation, so we use those
4174 slots for now. We don't support very long branches when
4175 optimizing -- they should be quite rare when optimizing.
4177 Really the way to go long term is a register scavenger; goto
4178 the target of the jump and find a register which we can use
4179 as a scratch to hold the value in %r1. */
4181 /* We don't know how to register scavenge yet. */
4185 /* First store %r1 into the stack. */
4186 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4188 /* Now load the target address into %r1 and do an indirect jump
4189 to the value specified in %r1. Be careful to generate PIC
4194 xoperands[0] = operands[0];
4195 xoperands[1] = gen_label_rtx ();
4197 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4198 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4199 CODE_LABEL_NUMBER (xoperands[1]));
4200 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4204 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4206 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4207 so we know nothing else can be in the delay slot. */
4208 return \"ldw -16(%%r30),%%r1\";
4210 [(set_attr "type" "uncond_branch")
4211 (set_attr "pa_combine_type" "uncond_branch")
4212 (set (attr "length")
4213 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4214 (if_then_else (lt (abs (minus (match_dup 0)
4215 (plus (pc) (const_int 8))))
4219 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4221 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4226 ;; Subroutines of "casesi".
4227 ;; operand 0 is index
4228 ;; operand 1 is the minimum bound
4229 ;; operand 2 is the maximum bound - minimum bound + 1
4230 ;; operand 3 is CODE_LABEL for the table;
4231 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4233 (define_expand "casesi"
4234 [(match_operand:SI 0 "general_operand" "")
4235 (match_operand:SI 1 "const_int_operand" "")
4236 (match_operand:SI 2 "const_int_operand" "")
4237 (match_operand 3 "" "")
4238 (match_operand 4 "" "")]
4242 if (GET_CODE (operands[0]) != REG)
4243 operands[0] = force_reg (SImode, operands[0]);
4245 if (operands[1] != const0_rtx)
4247 rtx reg = gen_reg_rtx (SImode);
4249 operands[1] = GEN_INT (-INTVAL (operands[1]));
4250 if (!INT_14_BITS (operands[1]))
4251 operands[1] = force_reg (SImode, operands[1]);
4252 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4257 if (!INT_5_BITS (operands[2]))
4258 operands[2] = force_reg (SImode, operands[2]);
4260 emit_insn (gen_cmpsi (operands[0], operands[2]));
4261 emit_jump_insn (gen_bgtu (operands[4]));
4262 if (TARGET_BIG_SWITCH)
4264 rtx temp = gen_reg_rtx (SImode);
4265 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4268 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4272 (define_insn "casesi0"
4274 (mem:SI (plus:SI (pc)
4275 (match_operand:SI 0 "register_operand" "r")))
4276 (label_ref (match_operand 1 "" ""))))]
4279 [(set_attr "type" "multi")
4280 (set_attr "length" "8")])
4282 ;; Need nops for the calls because execution is supposed to continue
4283 ;; past; we don't want to nullify an instruction that we need.
4284 ;;- jump to subroutine
4286 (define_expand "call"
4287 [(parallel [(call (match_operand:SI 0 "" "")
4288 (match_operand 1 "" ""))
4289 (clobber (reg:SI 2))])]
4296 if (TARGET_PORTABLE_RUNTIME)
4297 op = force_reg (SImode, XEXP (operands[0], 0));
4299 op = XEXP (operands[0], 0);
4301 /* Use two different patterns for calls to explicitly named functions
4302 and calls through function pointers. This is necessary as these two
4303 types of calls use different calling conventions, and CSE might try
4304 to change the named call into an indirect call in some cases (using
4305 two patterns keeps CSE from performing this optimization). */
4306 if (GET_CODE (op) == SYMBOL_REF)
4307 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4310 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4311 emit_move_insn (tmpreg, force_reg (word_mode, op));
4312 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4317 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4319 /* After each call we must restore the PIC register, even if it
4320 doesn't appear to be used.
4322 This will set regs_ever_live for the callee saved register we
4323 stored the PIC register in. */
4324 emit_move_insn (pic_offset_table_rtx,
4325 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4326 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4328 /* Gross. We have to keep the scheduler from moving the restore
4329 of the PIC register away from the call. SCHED_GROUP_P is
4330 supposed to do this, but for some reason the compiler will
4331 go into an infinite loop when we use that.
4333 This method (blockage insn) may make worse code (then again
4334 it may not since calls are nearly blockages anyway), but at
4335 least it should work. */
4336 emit_insn (gen_blockage ());
4341 (define_insn "call_internal_symref"
4342 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4343 (match_operand 1 "" "i"))
4344 (clobber (reg:SI 2))
4345 (use (const_int 0))]
4346 "! TARGET_PORTABLE_RUNTIME"
4349 output_arg_descriptor (insn);
4350 return output_call (insn, operands[0]);
4352 [(set_attr "type" "call")
4353 (set (attr "length")
4354 ;; If we're sure that we can either reach the target or that the
4355 ;; linker can use a long-branch stub, then the length is 4 bytes.
4357 ;; For long-calls the length will be either 52 bytes (non-pic)
4358 ;; or 68 bytes (pic). */
4359 ;; Else we have to use a long-call;
4360 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4363 (if_then_else (eq (symbol_ref "flag_pic")
4368 (define_insn "call_internal_reg"
4369 [(call (mem:SI (reg:SI 22))
4370 (match_operand 0 "" "i"))
4371 (clobber (reg:SI 2))
4372 (use (const_int 1))]
4378 /* First the special case for kernels, level 0 systems, etc. */
4379 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4380 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4382 /* Now the normal case -- we can reach $$dyncall directly or
4383 we're sure that we can get there via a long-branch stub.
4385 No need to check target flags as the length uniquely identifies
4386 the remaining cases. */
4387 if (get_attr_length (insn) == 8)
4388 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4390 /* Long millicode call, but we are not generating PIC or portable runtime
4392 if (get_attr_length (insn) == 12)
4393 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4395 /* Long millicode call for portable runtime. */
4396 if (get_attr_length (insn) == 20)
4397 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4399 /* If we're generating PIC code. */
4400 xoperands[0] = operands[0];
4401 xoperands[1] = gen_label_rtx ();
4402 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4403 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4404 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4405 CODE_LABEL_NUMBER (xoperands[1]));
4406 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4407 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4408 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4411 [(set_attr "type" "dyncall")
4412 (set (attr "length")
4414 ;; First NO_SPACE_REGS
4415 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4419 ;; Target (or stub) within reach
4420 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4422 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4426 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4427 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4429 (eq (symbol_ref "flag_pic")
4433 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4437 ;; Out of range PIC case
4440 (define_expand "call_value"
4441 [(parallel [(set (match_operand 0 "" "")
4442 (call (match_operand:SI 1 "" "")
4443 (match_operand 2 "" "")))
4444 (clobber (reg:SI 2))])]
4451 if (TARGET_PORTABLE_RUNTIME)
4452 op = force_reg (word_mode, XEXP (operands[1], 0));
4454 op = XEXP (operands[1], 0);
4456 /* Use two different patterns for calls to explicitly named functions
4457 and calls through function pointers. This is necessary as these two
4458 types of calls use different calling conventions, and CSE might try
4459 to change the named call into an indirect call in some cases (using
4460 two patterns keeps CSE from performing this optimization). */
4461 if (GET_CODE (op) == SYMBOL_REF)
4462 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4467 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4468 emit_move_insn (tmpreg, force_reg (word_mode, op));
4469 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4474 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4476 /* After each call we must restore the PIC register, even if it
4477 doesn't appear to be used.
4479 This will set regs_ever_live for the callee saved register we
4480 stored the PIC register in. */
4481 emit_move_insn (pic_offset_table_rtx,
4482 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4483 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4485 /* Gross. We have to keep the scheduler from moving the restore
4486 of the PIC register away from the call. SCHED_GROUP_P is
4487 supposed to do this, but for some reason the compiler will
4488 go into an infinite loop when we use that.
4490 This method (blockage insn) may make worse code (then again
4491 it may not since calls are nearly blockages anyway), but at
4492 least it should work. */
4493 emit_insn (gen_blockage ());
4498 (define_insn "call_value_internal_symref"
4499 [(set (match_operand 0 "" "=rf")
4500 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4501 (match_operand 2 "" "i")))
4502 (clobber (reg:SI 2))
4503 (use (const_int 0))]
4504 ;;- Don't use operand 1 for most machines.
4505 "! TARGET_PORTABLE_RUNTIME"
4508 output_arg_descriptor (insn);
4509 return output_call (insn, operands[1]);
4511 [(set_attr "type" "call")
4512 (set (attr "length")
4513 ;; If we're sure that we can either reach the target or that the
4514 ;; linker can use a long-branch stub, then the length is 4 bytes.
4516 ;; For long-calls the length will be either 52 bytes (non-pic)
4517 ;; or 68 bytes (pic). */
4518 ;; Else we have to use a long-call;
4519 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4522 (if_then_else (eq (symbol_ref "flag_pic")
4527 (define_insn "call_value_internal_reg"
4528 [(set (match_operand 0 "" "=rf")
4529 (call (mem:SI (reg:SI 22))
4530 (match_operand 1 "" "i")))
4531 (clobber (reg:SI 2))
4532 (use (const_int 1))]
4538 /* First the special case for kernels, level 0 systems, etc. */
4539 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4540 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4542 /* Now the normal case -- we can reach $$dyncall directly or
4543 we're sure that we can get there via a long-branch stub.
4545 No need to check target flags as the length uniquely identifies
4546 the remaining cases. */
4547 if (get_attr_length (insn) == 8)
4548 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4550 /* Long millicode call, but we are not generating PIC or portable runtime
4552 if (get_attr_length (insn) == 12)
4553 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4555 /* Long millicode call for portable runtime. */
4556 if (get_attr_length (insn) == 20)
4557 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4559 /* If we're generating PIC code. */
4560 xoperands[0] = operands[1];
4561 xoperands[1] = gen_label_rtx ();
4562 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4563 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4564 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4565 CODE_LABEL_NUMBER (xoperands[1]));
4566 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4567 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4568 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4571 [(set_attr "type" "dyncall")
4572 (set (attr "length")
4574 ;; First NO_SPACE_REGS
4575 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4579 ;; Target (or stub) within reach
4580 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4582 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4586 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4587 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4589 (eq (symbol_ref "flag_pic")
4593 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4597 ;; Out of range PIC case
4600 ;; Call subroutine returning any type.
4602 (define_expand "untyped_call"
4603 [(parallel [(call (match_operand 0 "" "")
4605 (match_operand 1 "" "")
4606 (match_operand 2 "" "")])]
4612 emit_call_insn (gen_call (operands[0], const0_rtx));
4614 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4616 rtx set = XVECEXP (operands[2], 0, i);
4617 emit_move_insn (SET_DEST (set), SET_SRC (set));
4620 /* The optimizer does not know that the call sets the function value
4621 registers we stored in the result block. We avoid problems by
4622 claiming that all hard registers are used and clobbered at this
4624 emit_insn (gen_blockage ());
4632 [(set_attr "type" "move")
4633 (set_attr "length" "4")])
4635 ;; These are just placeholders so we know where branch tables
4637 (define_insn "begin_brtab"
4642 /* Only GAS actually supports this pseudo-op. */
4644 return \".begin_brtab\";
4648 [(set_attr "type" "move")
4649 (set_attr "length" "0")])
4651 (define_insn "end_brtab"
4656 /* Only GAS actually supports this pseudo-op. */
4658 return \".end_brtab\";
4662 [(set_attr "type" "move")
4663 (set_attr "length" "0")])
4665 ;;; Hope this is only within a function...
4666 (define_insn "indirect_jump"
4667 [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
4670 [(set_attr "type" "branch")
4671 (set_attr "length" "4")])
4673 (define_insn "extzv"
4674 [(set (match_operand:SI 0 "register_operand" "=r")
4675 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4676 (match_operand:SI 2 "uint5_operand" "")
4677 (match_operand:SI 3 "uint5_operand" "")))]
4679 "extru %1,%3+%2-1,%2,%0"
4680 [(set_attr "type" "shift")
4681 (set_attr "length" "4")])
4684 [(set (match_operand:SI 0 "register_operand" "=r")
4685 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4687 (match_operand:SI 3 "register_operand" "q")))]
4690 [(set_attr "type" "shift")
4691 (set_attr "length" "4")])
4694 [(set (match_operand:SI 0 "register_operand" "=r")
4695 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4696 (match_operand:SI 2 "uint5_operand" "")
4697 (match_operand:SI 3 "uint5_operand" "")))]
4699 "extrs %1,%3+%2-1,%2,%0"
4700 [(set_attr "type" "shift")
4701 (set_attr "length" "4")])
4704 [(set (match_operand:SI 0 "register_operand" "=r")
4705 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4707 (match_operand:SI 3 "register_operand" "q")))]
4710 [(set_attr "type" "shift")
4711 (set_attr "length" "4")])
4714 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4715 (match_operand:SI 1 "uint5_operand" "")
4716 (match_operand:SI 2 "uint5_operand" ""))
4717 (match_operand:SI 3 "arith5_operand" "r,L"))]
4720 dep %3,%2+%1-1,%1,%0
4721 depi %3,%2+%1-1,%1,%0"
4722 [(set_attr "type" "shift,shift")
4723 (set_attr "length" "4,4")])
4725 ;; Optimize insertion of const_int values of type 1...1xxxx.
4727 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4728 (match_operand:SI 1 "uint5_operand" "")
4729 (match_operand:SI 2 "uint5_operand" ""))
4730 (match_operand:SI 3 "const_int_operand" ""))]
4731 "(INTVAL (operands[3]) & 0x10) != 0 &&
4732 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4735 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4736 return \"depi %3,%2+%1-1,%1,%0\";
4738 [(set_attr "type" "shift")
4739 (set_attr "length" "4")])
4741 ;; This insn is used for some loop tests, typically loops reversed when
4742 ;; strength reduction is used. It is actually created when the instruction
4743 ;; combination phase combines the special loop test. Since this insn
4744 ;; is both a jump insn and has an output, it must deal with its own
4745 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4746 ;; to not choose the register alternatives in the event a reload is needed.
4747 (define_insn "decrement_and_branch_until_zero"
4750 (match_operator 2 "comparison_operator"
4751 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4752 (match_operand:SI 1 "int5_operand" "L,L,L"))
4754 (label_ref (match_operand 3 "" ""))
4757 (plus:SI (match_dup 0) (match_dup 1)))
4758 (clobber (match_scratch:SI 4 "=X,r,r"))]
4760 "* return output_dbra (operands, insn, which_alternative); "
4761 ;; Do not expect to understand this the first time through.
4762 [(set_attr "type" "cbranch,multi,multi")
4763 (set (attr "length")
4764 (if_then_else (eq_attr "alternative" "0")
4765 ;; Loop counter in register case
4766 ;; Short branch has length of 4
4767 ;; Long branch has length of 8
4768 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4773 ;; Loop counter in FP reg case.
4774 ;; Extra goo to deal with additional reload insns.
4775 (if_then_else (eq_attr "alternative" "1")
4776 (if_then_else (lt (match_dup 3) (pc))
4778 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4783 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4787 ;; Loop counter in memory case.
4788 ;; Extra goo to deal with additional reload insns.
4789 (if_then_else (lt (match_dup 3) (pc))
4791 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4796 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4799 (const_int 16))))))])
4804 (match_operator 2 "movb_comparison_operator"
4805 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4806 (label_ref (match_operand 3 "" ""))
4808 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4811 "* return output_movb (operands, insn, which_alternative, 0); "
4812 ;; Do not expect to understand this the first time through.
4813 [(set_attr "type" "cbranch,multi,multi,multi")
4814 (set (attr "length")
4815 (if_then_else (eq_attr "alternative" "0")
4816 ;; Loop counter in register case
4817 ;; Short branch has length of 4
4818 ;; Long branch has length of 8
4819 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4824 ;; Loop counter in FP reg case.
4825 ;; Extra goo to deal with additional reload insns.
4826 (if_then_else (eq_attr "alternative" "1")
4827 (if_then_else (lt (match_dup 3) (pc))
4829 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4834 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4838 ;; Loop counter in memory or sar case.
4839 ;; Extra goo to deal with additional reload insns.
4841 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4844 (const_int 12)))))])
4846 ;; Handle negated branch.
4850 (match_operator 2 "movb_comparison_operator"
4851 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4853 (label_ref (match_operand 3 "" ""))))
4854 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4857 "* return output_movb (operands, insn, which_alternative, 1); "
4858 ;; Do not expect to understand this the first time through.
4859 [(set_attr "type" "cbranch,multi,multi,multi")
4860 (set (attr "length")
4861 (if_then_else (eq_attr "alternative" "0")
4862 ;; Loop counter in register case
4863 ;; Short branch has length of 4
4864 ;; Long branch has length of 8
4865 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4870 ;; Loop counter in FP reg case.
4871 ;; Extra goo to deal with additional reload insns.
4872 (if_then_else (eq_attr "alternative" "1")
4873 (if_then_else (lt (match_dup 3) (pc))
4875 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4880 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4884 ;; Loop counter in memory or SAR case.
4885 ;; Extra goo to deal with additional reload insns.
4887 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4890 (const_int 12)))))])
4892 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4893 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4895 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4898 [(set (pc) (label_ref (match_operand 3 "" "" )))
4899 (set (match_operand:SI 0 "register_operand" "=r")
4900 (plus:SI (match_operand:SI 1 "register_operand" "r")
4901 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4902 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4905 return output_parallel_addb (operands, get_attr_length (insn));
4907 [(set_attr "type" "parallel_branch")
4908 (set (attr "length")
4909 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4915 [(set (pc) (label_ref (match_operand 2 "" "" )))
4916 (set (match_operand:SF 0 "register_operand" "=r")
4917 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4921 return output_parallel_movb (operands, get_attr_length (insn));
4923 [(set_attr "type" "parallel_branch")
4924 (set (attr "length")
4925 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4931 [(set (pc) (label_ref (match_operand 2 "" "" )))
4932 (set (match_operand:SI 0 "register_operand" "=r")
4933 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4937 return output_parallel_movb (operands, get_attr_length (insn));
4939 [(set_attr "type" "parallel_branch")
4940 (set (attr "length")
4941 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4947 [(set (pc) (label_ref (match_operand 2 "" "" )))
4948 (set (match_operand:HI 0 "register_operand" "=r")
4949 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4953 return output_parallel_movb (operands, get_attr_length (insn));
4955 [(set_attr "type" "parallel_branch")
4956 (set (attr "length")
4957 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4963 [(set (pc) (label_ref (match_operand 2 "" "" )))
4964 (set (match_operand:QI 0 "register_operand" "=r")
4965 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4969 return output_parallel_movb (operands, get_attr_length (insn));
4971 [(set_attr "type" "parallel_branch")
4972 (set (attr "length")
4973 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4979 [(set (match_operand 0 "register_operand" "=f")
4980 (mult (match_operand 1 "register_operand" "f")
4981 (match_operand 2 "register_operand" "f")))
4982 (set (match_operand 3 "register_operand" "+f")
4983 (plus (match_operand 4 "register_operand" "f")
4984 (match_operand 5 "register_operand" "f")))]
4985 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
4986 && reload_completed && fmpyaddoperands (operands)"
4989 if (GET_MODE (operands[0]) == DFmode)
4991 if (rtx_equal_p (operands[3], operands[5]))
4992 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4994 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4998 if (rtx_equal_p (operands[3], operands[5]))
4999 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5001 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5004 [(set_attr "type" "fpalu")
5005 (set_attr "length" "4")])
5008 [(set (match_operand 3 "register_operand" "+f")
5009 (plus (match_operand 4 "register_operand" "f")
5010 (match_operand 5 "register_operand" "f")))
5011 (set (match_operand 0 "register_operand" "=f")
5012 (mult (match_operand 1 "register_operand" "f")
5013 (match_operand 2 "register_operand" "f")))]
5014 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5015 && reload_completed && fmpyaddoperands (operands)"
5018 if (GET_MODE (operands[0]) == DFmode)
5020 if (rtx_equal_p (operands[3], operands[5]))
5021 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5023 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5027 if (rtx_equal_p (operands[3], operands[5]))
5028 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5030 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5033 [(set_attr "type" "fpalu")
5034 (set_attr "length" "4")])
5037 [(set (match_operand 0 "register_operand" "=f")
5038 (mult (match_operand 1 "register_operand" "f")
5039 (match_operand 2 "register_operand" "f")))
5040 (set (match_operand 3 "register_operand" "+f")
5041 (minus (match_operand 4 "register_operand" "f")
5042 (match_operand 5 "register_operand" "f")))]
5043 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5044 && reload_completed && fmpysuboperands (operands)"
5047 if (GET_MODE (operands[0]) == DFmode)
5048 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5050 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5052 [(set_attr "type" "fpalu")
5053 (set_attr "length" "4")])
5056 [(set (match_operand 3 "register_operand" "+f")
5057 (minus (match_operand 4 "register_operand" "f")
5058 (match_operand 5 "register_operand" "f")))
5059 (set (match_operand 0 "register_operand" "=f")
5060 (mult (match_operand 1 "register_operand" "f")
5061 (match_operand 2 "register_operand" "f")))]
5062 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5063 && reload_completed && fmpysuboperands (operands)"
5066 if (GET_MODE (operands[0]) == DFmode)
5067 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5069 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5071 [(set_attr "type" "fpalu")
5072 (set_attr "length" "4")])
5074 ;; Clean up turds left by reload.
5076 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
5077 (match_operand 1 "register_operand" "fr"))
5078 (set (match_operand 2 "register_operand" "fr")
5080 "! TARGET_SOFT_FLOAT
5081 && GET_CODE (operands[0]) == MEM
5082 && ! MEM_VOLATILE_P (operands[0])
5083 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5084 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5085 && GET_MODE (operands[0]) == DFmode
5086 && GET_CODE (operands[1]) == REG
5087 && GET_CODE (operands[2]) == REG
5088 && ! side_effects_p (XEXP (operands[0], 0))
5089 && REGNO_REG_CLASS (REGNO (operands[1]))
5090 == REGNO_REG_CLASS (REGNO (operands[2]))"
5095 if (FP_REG_P (operands[1]))
5096 output_asm_insn (output_fp_move_double (operands), operands);
5098 output_asm_insn (output_move_double (operands), operands);
5100 if (rtx_equal_p (operands[1], operands[2]))
5103 xoperands[0] = operands[2];
5104 xoperands[1] = operands[1];
5106 if (FP_REG_P (xoperands[1]))
5107 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5109 output_asm_insn (output_move_double (xoperands), xoperands);
5115 [(set (match_operand 0 "register_operand" "fr")
5116 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
5117 (set (match_operand 2 "register_operand" "fr")
5119 "! TARGET_SOFT_FLOAT
5120 && GET_CODE (operands[1]) == MEM
5121 && ! MEM_VOLATILE_P (operands[1])
5122 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5123 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5124 && GET_MODE (operands[0]) == DFmode
5125 && GET_CODE (operands[0]) == REG
5126 && GET_CODE (operands[2]) == REG
5127 && ! side_effects_p (XEXP (operands[1], 0))
5128 && REGNO_REG_CLASS (REGNO (operands[0]))
5129 == REGNO_REG_CLASS (REGNO (operands[2]))"
5134 if (FP_REG_P (operands[0]))
5135 output_asm_insn (output_fp_move_double (operands), operands);
5137 output_asm_insn (output_move_double (operands), operands);
5139 xoperands[0] = operands[2];
5140 xoperands[1] = operands[0];
5142 if (FP_REG_P (xoperands[1]))
5143 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5145 output_asm_insn (output_move_double (xoperands), xoperands);
5150 ;; Flush the I and D cache line found at the address in operand 0.
5151 ;; This is used by the trampoline code for nested functions.
5152 ;; So long as the trampoline itself is less than 32 bytes this
5155 (define_insn "dcacheflush"
5156 [(unspec_volatile [(const_int 1)] 0)
5157 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5158 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))]
5160 "fdc 0(%0)\;fdc 0(%1)\;sync"
5161 [(set_attr "type" "multi")
5162 (set_attr "length" "12")])
5164 (define_insn "icacheflush"
5165 [(unspec_volatile [(const_int 2)] 0)
5166 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5167 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))
5168 (use (match_operand:SI 2 "register_operand" "r"))
5169 (clobber (match_operand:SI 3 "register_operand" "=&r"))
5170 (clobber (match_operand:SI 4 "register_operand" "=&r"))]
5172 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5173 [(set_attr "type" "multi")
5174 (set_attr "length" "52")])
5176 ;; An out-of-line prologue.
5177 (define_insn "outline_prologue_call"
5178 [(unspec_volatile [(const_int 0)] 0)
5179 (clobber (reg:SI 31))
5180 (clobber (reg:SI 22))
5181 (clobber (reg:SI 21))
5182 (clobber (reg:SI 20))
5183 (clobber (reg:SI 19))
5184 (clobber (reg:SI 1))]
5188 extern int frame_pointer_needed;
5190 /* We need two different versions depending on whether or not we
5191 need a frame pointer. Also note that we return to the instruction
5192 immediately after the branch rather than two instructions after the
5193 break as normally is the case. */
5194 if (frame_pointer_needed)
5196 /* Must import the magic millicode routine(s). */
5197 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5199 if (TARGET_PORTABLE_RUNTIME)
5201 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5202 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5206 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5210 /* Must import the magic millicode routine(s). */
5211 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5213 if (TARGET_PORTABLE_RUNTIME)
5215 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5216 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5219 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5223 [(set_attr "type" "multi")
5224 (set_attr "length" "8")])
5226 ;; An out-of-line epilogue.
5227 (define_insn "outline_epilogue_call"
5228 [(unspec_volatile [(const_int 1)] 0)
5231 (clobber (reg:SI 31))
5232 (clobber (reg:SI 22))
5233 (clobber (reg:SI 21))
5234 (clobber (reg:SI 20))
5235 (clobber (reg:SI 19))
5236 (clobber (reg:SI 2))
5237 (clobber (reg:SI 1))]
5241 extern int frame_pointer_needed;
5243 /* We need two different versions depending on whether or not we
5244 need a frame pointer. Also note that we return to the instruction
5245 immediately after the branch rather than two instructions after the
5246 break as normally is the case. */
5247 if (frame_pointer_needed)
5249 /* Must import the magic millicode routine. */
5250 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5252 /* The out-of-line prologue will make sure we return to the right
5254 if (TARGET_PORTABLE_RUNTIME)
5256 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5257 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5261 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5265 /* Must import the magic millicode routine. */
5266 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5268 /* The out-of-line prologue will make sure we return to the right
5270 if (TARGET_PORTABLE_RUNTIME)
5272 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5273 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5276 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5280 [(set_attr "type" "multi")
5281 (set_attr "length" "8")])
5283 ;; Given a function pointer, canonicalize it so it can be
5284 ;; reliably compared to another function pointer. */
5285 (define_expand "canonicalize_funcptr_for_compare"
5286 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5287 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5288 (clobber (match_dup 2))
5289 (clobber (reg:SI 26))
5290 (clobber (reg:SI 22))
5291 (clobber (reg:SI 31))])
5292 (set (match_operand:SI 0 "register_operand" "")
5294 "! TARGET_PORTABLE_RUNTIME"
5297 operands[2] = gen_reg_rtx (SImode);
5298 if (GET_CODE (operands[1]) != REG)
5300 rtx tmp = gen_reg_rtx (Pmode);
5301 emit_move_insn (tmp, operands[1]);
5307 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5308 (clobber (match_operand:SI 0 "register_operand" "=a"))
5309 (clobber (reg:SI 26))
5310 (clobber (reg:SI 22))
5311 (clobber (reg:SI 31))]
5315 /* Must import the magic millicode routine. */
5316 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5318 /* This is absolutely amazing.
5320 First, copy our input parameter into %r29 just in case we don't
5321 need to call $$sh_func_adrs. */
5322 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5324 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5325 we use %r26 unchanged. */
5326 if (get_attr_length (insn) == 32)
5327 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5328 else if (get_attr_length (insn) == 40)
5329 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5330 else if (get_attr_length (insn) == 44)
5331 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5333 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5335 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5336 4096, then we use %r26 unchanged. */
5337 if (get_attr_length (insn) == 32)
5338 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5339 else if (get_attr_length (insn) == 40)
5340 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5341 else if (get_attr_length (insn) == 44)
5342 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5344 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5346 /* Else call $$sh_func_adrs to extract the function's real add24. */
5347 return output_millicode_call (insn,
5348 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5350 [(set_attr "type" "multi")
5351 (set (attr "length")
5353 ;; Target (or stub) within reach
5354 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5356 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5361 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5365 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5366 ;; same as NO_SPACE_REGS code
5367 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5369 (eq (symbol_ref "flag_pic")
5374 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5378 ;; Out of range and PIC
5381 ;; On the PA, the PIC register is call clobbered, so it must
5382 ;; be saved & restored around calls by the caller. If the call
5383 ;; doesn't return normally (nonlocal goto, or an exception is
5384 ;; thrown), then the code at the exception handler label must
5385 ;; restore the PIC register.
5386 (define_expand "exception_receiver"
5388 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5391 /* Load the PIC register from the stack slot (in our caller's
5393 emit_move_insn (pic_offset_table_rtx,
5394 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5395 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5396 emit_insn (gen_blockage ());