1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of bytes).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
371 ;; It is not necessary to define the shifter and integer alu units.
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
380 (define_function_unit "pa8000alu" 2 1
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
392 (define_function_unit "pa8000fdiv" 2 1
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
397 (define_function_unit "pa8000fdiv" 2 1
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
411 (define_expand "cmpsi"
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
424 (define_expand "cmpsf"
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
437 (define_expand "cmpdf"
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
473 [(set (match_operand:SI 0 "register_operand" "")
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
489 [(set (match_operand:SI 0 "register_operand" "")
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
503 [(set (match_operand:SI 0 "register_operand" "")
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
517 [(set (match_operand:SI 0 "register_operand" "")
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
531 [(set (match_operand:SI 0 "register_operand" "")
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
545 [(set (match_operand:SI 0 "register_operand" "")
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
565 if (hppa_branch_type != CMP_SI)
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
578 if (hppa_branch_type != CMP_SI)
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
591 if (hppa_branch_type != CMP_SI)
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
604 if (hppa_branch_type != CMP_SI)
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
840 ;;; Experimental conditional move patterns
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
845 (match_operator 1 "comparison_operator"
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
853 enum rtx_code code = GET_CODE (operands[1]);
855 if (hppa_branch_type != CMP_SI)
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
906 ;; Conditional Branches
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
916 if (hppa_branch_type != CMP_SI)
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
936 if (hppa_branch_type != CMP_SI)
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
954 if (hppa_branch_type != CMP_SI)
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
972 if (hppa_branch_type != CMP_SI)
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
990 if (hppa_branch_type != CMP_SI)
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
1000 (define_expand "ble"
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1008 if (hppa_branch_type != CMP_SI)
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1018 (define_expand "bgtu"
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1026 if (hppa_branch_type != CMP_SI)
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1032 (define_expand "bltu"
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1040 if (hppa_branch_type != CMP_SI)
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1046 (define_expand "bgeu"
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1054 if (hppa_branch_type != CMP_SI)
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1060 (define_expand "bleu"
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1068 if (hppa_branch_type != CMP_SI)
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1074 ;; Match the branch patterns.
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1105 ;; Match the negated branch.
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1114 (label_ref (match_operand 0 "" ""))))]
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1133 ;; Branch on Bit patterns.
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1139 (match_operand:SI 1 "uint5_operand" ""))
1141 (label_ref (match_operand 2 "" ""))
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1161 (match_operand:SI 1 "uint5_operand" ""))
1164 (label_ref (match_operand 2 "" ""))))]
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1183 (match_operand:SI 1 "uint5_operand" ""))
1185 (label_ref (match_operand 2 "" ""))
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1205 (match_operand:SI 1 "uint5_operand" ""))
1208 (label_ref (match_operand 2 "" ""))))]
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1222 ;; Branch on Variable Bit patterns.
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1228 (match_operand:SI 1 "register_operand" "q"))
1230 (label_ref (match_operand 2 "" ""))
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1250 (match_operand:SI 1 "register_operand" "q"))
1253 (label_ref (match_operand 2 "" ""))))]
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1272 (match_operand:SI 1 "register_operand" "q"))
1274 (label_ref (match_operand 2 "" ""))
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1294 (match_operand:SI 1 "register_operand" "q"))
1297 (label_ref (match_operand 2 "" ""))))]
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1311 ;; Floating point branches
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1316 "! TARGET_SOFT_FLOAT"
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1322 return \"ftest\;b%* %0\";
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1342 ;; Move instructions
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1350 if (emit_move_sequence (operands, SImode, 0))
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1387 ;;; pic symbol references
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1455 return \"ldwx %2(%1),%0\";
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1474 return \"ldwx %1(%2),%0\";
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1479 ;; Load or store with base-register modification.
1481 (define_expand "pre_load"
1482 [(parallel [(set (match_operand:SI 0 "register_operand" "")
1483 (mem (plus (match_operand 1 "register_operand" "")
1484 (match_operand 2 "pre_cint_operand" ""))))
1486 (plus (match_dup 1) (match_dup 2)))])]
1490 emit_insn (gen_pre_ldw (operands[0], operands[1], operands[2]));
1494 (define_insn "pre_ldw"
1495 [(set (match_operand:SI 0 "register_operand" "=r")
1496 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1497 (match_operand:SI 2 "pre_cint_operand" ""))))
1499 (plus:SI (match_dup 1) (match_dup 2)))]
1503 if (INTVAL (operands[2]) < 0)
1504 return \"ldwm %2(%1),%0\";
1505 return \"ldws,mb %2(%1),%0\";
1507 [(set_attr "type" "load")
1508 (set_attr "length" "4")])
1511 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1512 (match_operand:SI 1 "pre_cint_operand" "")))
1513 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1515 (plus:SI (match_dup 0) (match_dup 1)))]
1519 if (INTVAL (operands[1]) < 0)
1520 return \"stwm %r2,%1(%0)\";
1521 return \"stws,mb %r2,%1(%0)\";
1523 [(set_attr "type" "store")
1524 (set_attr "length" "4")])
1527 [(set (match_operand:SI 0 "register_operand" "=r")
1528 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1530 (plus:SI (match_dup 1)
1531 (match_operand:SI 2 "post_cint_operand" "")))]
1535 if (INTVAL (operands[2]) > 0)
1536 return \"ldwm %2(%1),%0\";
1537 return \"ldws,ma %2(%1),%0\";
1539 [(set_attr "type" "load")
1540 (set_attr "length" "4")])
1542 (define_expand "post_store"
1543 [(parallel [(set (mem (match_operand 0 "register_operand" ""))
1544 (match_operand 1 "reg_or_0_operand" ""))
1547 (match_operand 2 "post_cint_operand" "")))])]
1551 emit_insn (gen_post_stw (operands[0], operands[1], operands[2]));
1555 (define_insn "post_stw"
1556 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1557 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1559 (plus:SI (match_dup 0)
1560 (match_operand:SI 2 "post_cint_operand" "")))]
1564 if (INTVAL (operands[2]) > 0)
1565 return \"stwm %r1,%2(%0)\";
1566 return \"stws,ma %r1,%2(%0)\";
1568 [(set_attr "type" "store")
1569 (set_attr "length" "4")])
1572 ;; Note since this pattern can be created at reload time (via movsi), all
1573 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1574 (define_insn "pic_load_label"
1575 [(set (match_operand:SI 0 "register_operand" "=a")
1576 (match_operand:SI 1 "pic_label_operand" ""))]
1580 rtx label_rtx = gen_label_rtx ();
1582 extern FILE *asm_out_file;
1584 xoperands[0] = operands[0];
1585 xoperands[1] = operands[1];
1586 xoperands[2] = label_rtx;
1587 output_asm_insn (\"bl .+8,%0\", xoperands);
1588 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1590 CODE_LABEL_NUMBER (label_rtx));
1592 /* If we're trying to load the address of a label that happens to be
1593 close, then we can use a shorter sequence. */
1594 if (GET_CODE (operands[1]) == LABEL_REF
1596 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1597 - insn_addresses[INSN_UID (insn)]) < 8100)
1599 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1600 always non-negative. */
1601 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1605 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1606 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1610 [(set_attr "type" "multi")
1611 (set_attr "length" "16")]) ; 12 or 16
1614 [(set (match_operand:SI 0 "register_operand" "=a")
1615 (plus:SI (match_operand:SI 1 "register_operand" "r")
1616 (high:SI (match_operand 2 "" ""))))]
1617 "symbolic_operand (operands[2], Pmode)
1618 && ! function_label_operand (operands[2])
1621 [(set_attr "type" "binary")
1622 (set_attr "length" "4")])
1624 ; We need this to make sure CSE doesn't simplify a memory load with a
1625 ; symbolic address, whose content it think it knows. For PIC, what CSE
1626 ; think is the real value will be the address of that value.
1628 [(set (match_operand:SI 0 "register_operand" "=r")
1630 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1632 [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1638 return \"ldw RT'%G2(%1),%0\";
1640 [(set_attr "type" "load")
1641 (set_attr "length" "4")])
1643 ;; Always use addil rather than ldil;add sequences. This allows the
1644 ;; HP linker to eliminate the dp relocation if the symbolic operand
1645 ;; lives in the TEXT space.
1647 [(set (match_operand:SI 0 "register_operand" "=a")
1648 (high:SI (match_operand 1 "" "")))]
1649 "symbolic_operand (operands[1], Pmode)
1650 && ! function_label_operand (operands[1])
1651 && ! read_only_operand (operands[1])
1655 if (TARGET_LONG_LOAD_STORE)
1656 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1658 return \"addil LR'%H1,%%r27\";
1660 [(set_attr "type" "binary")
1661 (set (attr "length")
1662 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1667 ;; This is for use in the prologue/epilogue code. We need it
1668 ;; to add large constants to a stack pointer or frame pointer.
1669 ;; Because of the additional %r1 pressure, we probably do not
1670 ;; want to use this in general code, so make it available
1671 ;; only after reload.
1673 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1674 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1675 (high:SI (match_operand 2 "const_int_operand" ""))))]
1679 ldil L'%G2,%0\;addl %0,%1,%0"
1680 [(set_attr "type" "binary,binary")
1681 (set_attr "length" "4,8")])
1684 [(set (match_operand:SI 0 "register_operand" "=r")
1685 (high:SI (match_operand 1 "" "")))]
1686 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1687 && !is_function_label_plus_const (operands[1])"
1690 if (symbolic_operand (operands[1], Pmode))
1691 return \"ldil LR'%H1,%0\";
1693 return \"ldil L'%G1,%0\";
1695 [(set_attr "type" "move")
1696 (set_attr "length" "4")])
1699 [(set (match_operand:SI 0 "register_operand" "=r")
1700 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1701 (match_operand:SI 2 "immediate_operand" "i")))]
1702 "!is_function_label_plus_const (operands[2])"
1705 if (flag_pic && symbolic_operand (operands[2], Pmode))
1707 else if (symbolic_operand (operands[2], Pmode))
1708 return \"ldo RR'%G2(%1),%0\";
1710 return \"ldo R'%G2(%1),%0\";
1712 [(set_attr "type" "move")
1713 (set_attr "length" "4")])
1715 ;; Now that a symbolic_address plus a constant is broken up early
1716 ;; in the compilation phase (for better CSE) we need a special
1717 ;; combiner pattern to load the symbolic address plus the constant
1718 ;; in only 2 instructions. (For cases where the symbolic address
1719 ;; was not a common subexpression.)
1721 [(set (match_operand:SI 0 "register_operand" "")
1722 (match_operand:SI 1 "symbolic_operand" ""))
1723 (clobber (match_operand:SI 2 "register_operand" ""))]
1724 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1725 [(set (match_dup 2) (high:SI (match_dup 1)))
1726 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1729 ;; hppa_legitimize_address goes to a great deal of trouble to
1730 ;; create addresses which use indexing. In some cases, this
1731 ;; is a lose because there isn't any store instructions which
1732 ;; allow indexed addresses (with integer register source).
1734 ;; These define_splits try to turn a 3 insn store into
1735 ;; a 2 insn store with some creative RTL rewriting.
1737 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1738 (match_operand:SI 1 "shadd_operand" ""))
1739 (plus:SI (match_operand:SI 2 "register_operand" "")
1740 (match_operand:SI 3 "const_int_operand" ""))))
1741 (match_operand:SI 4 "register_operand" ""))
1742 (clobber (match_operand:SI 5 "register_operand" ""))]
1744 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1746 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1750 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1751 (match_operand:SI 1 "shadd_operand" ""))
1752 (plus:SI (match_operand:SI 2 "register_operand" "")
1753 (match_operand:SI 3 "const_int_operand" ""))))
1754 (match_operand:HI 4 "register_operand" ""))
1755 (clobber (match_operand:SI 5 "register_operand" ""))]
1757 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1759 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1763 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1764 (match_operand:SI 1 "shadd_operand" ""))
1765 (plus:SI (match_operand:SI 2 "register_operand" "")
1766 (match_operand:SI 3 "const_int_operand" ""))))
1767 (match_operand:QI 4 "register_operand" ""))
1768 (clobber (match_operand:SI 5 "register_operand" ""))]
1770 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1772 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1775 (define_expand "movhi"
1776 [(set (match_operand:HI 0 "general_operand" "")
1777 (match_operand:HI 1 "general_operand" ""))]
1781 if (emit_move_sequence (operands, HImode, 0))
1786 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1787 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1788 "register_operand (operands[0], HImode)
1789 || reg_or_0_operand (operands[1], HImode)"
1799 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1800 (set_attr "pa_combine_type" "addmove")
1801 (set_attr "length" "4,4,4,4,4,4,4,4")])
1804 [(set (match_operand:HI 0 "register_operand" "=r")
1805 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1806 (match_operand:SI 2 "register_operand" "r"))))]
1807 "! TARGET_DISABLE_INDEXING"
1810 /* Reload can create backwards (relative to cse) unscaled index
1811 address modes when eliminating registers and possibly for
1812 pseudos that don't get hard registers. Deal with it. */
1813 if (operands[2] == hard_frame_pointer_rtx
1814 || operands[2] == stack_pointer_rtx)
1815 return \"ldhx %1(%2),%0\";
1817 return \"ldhx %2(%1),%0\";
1819 [(set_attr "type" "load")
1820 (set_attr "length" "4")])
1823 [(set (match_operand:HI 0 "register_operand" "=r")
1824 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1825 (match_operand:SI 2 "basereg_operand" "r"))))]
1826 "! TARGET_DISABLE_INDEXING"
1829 /* Reload can create backwards (relative to cse) unscaled index
1830 address modes when eliminating registers and possibly for
1831 pseudos that don't get hard registers. Deal with it. */
1832 if (operands[1] == hard_frame_pointer_rtx
1833 || operands[1] == stack_pointer_rtx)
1834 return \"ldhx %2(%1),%0\";
1836 return \"ldhx %1(%2),%0\";
1838 [(set_attr "type" "load")
1839 (set_attr "length" "4")])
1841 ; Now zero extended variants.
1843 [(set (match_operand:SI 0 "register_operand" "=r")
1844 (zero_extend:SI (mem:HI
1846 (match_operand:SI 1 "basereg_operand" "r")
1847 (match_operand:SI 2 "register_operand" "r")))))]
1848 "! TARGET_DISABLE_INDEXING"
1851 /* Reload can create backwards (relative to cse) unscaled index
1852 address modes when eliminating registers and possibly for
1853 pseudos that don't get hard registers. Deal with it. */
1854 if (operands[2] == hard_frame_pointer_rtx
1855 || operands[2] == stack_pointer_rtx)
1856 return \"ldhx %1(%2),%0\";
1858 return \"ldhx %2(%1),%0\";
1860 [(set_attr "type" "load")
1861 (set_attr "length" "4")])
1864 [(set (match_operand:SI 0 "register_operand" "=r")
1865 (zero_extend:SI (mem:HI
1867 (match_operand:SI 1 "register_operand" "r")
1868 (match_operand:SI 2 "basereg_operand" "r")))))]
1869 "! TARGET_DISABLE_INDEXING"
1872 /* Reload can create backwards (relative to cse) unscaled index
1873 address modes when eliminating registers and possibly for
1874 pseudos that don't get hard registers. Deal with it. */
1875 if (operands[1] == hard_frame_pointer_rtx
1876 || operands[1] == stack_pointer_rtx)
1877 return \"ldhx %2(%1),%0\";
1879 return \"ldhx %1(%2),%0\";
1881 [(set_attr "type" "load")
1882 (set_attr "length" "4")])
1885 [(set (match_operand:HI 0 "register_operand" "=r")
1886 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1887 (match_operand:SI 2 "int5_operand" "L"))))
1889 (plus:SI (match_dup 1) (match_dup 2)))]
1892 [(set_attr "type" "load")
1893 (set_attr "length" "4")])
1895 ; And a zero extended variant.
1897 [(set (match_operand:SI 0 "register_operand" "=r")
1898 (zero_extend:SI (mem:HI
1900 (match_operand:SI 1 "register_operand" "+r")
1901 (match_operand:SI 2 "int5_operand" "L")))))
1903 (plus:SI (match_dup 1) (match_dup 2)))]
1906 [(set_attr "type" "load")
1907 (set_attr "length" "4")])
1910 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1911 (match_operand:SI 1 "int5_operand" "L")))
1912 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1914 (plus:SI (match_dup 0) (match_dup 1)))]
1916 "sths,mb %r2,%1(%0)"
1917 [(set_attr "type" "store")
1918 (set_attr "length" "4")])
1921 [(set (match_operand:HI 0 "register_operand" "=r")
1922 (high:HI (match_operand 1 "const_int_operand" "")))]
1925 [(set_attr "type" "move")
1926 (set_attr "length" "4")])
1929 [(set (match_operand:HI 0 "register_operand" "=r")
1930 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1931 (match_operand 2 "const_int_operand" "")))]
1934 [(set_attr "type" "move")
1935 (set_attr "length" "4")])
1937 (define_expand "movqi"
1938 [(set (match_operand:QI 0 "general_operand" "")
1939 (match_operand:QI 1 "general_operand" ""))]
1943 if (emit_move_sequence (operands, QImode, 0))
1948 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1949 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1950 "register_operand (operands[0], QImode)
1951 || reg_or_0_operand (operands[1], QImode)"
1961 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1962 (set_attr "pa_combine_type" "addmove")
1963 (set_attr "length" "4,4,4,4,4,4,4,4")])
1966 [(set (match_operand:QI 0 "register_operand" "=r")
1967 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1968 (match_operand:SI 2 "register_operand" "r"))))]
1969 "! TARGET_DISABLE_INDEXING"
1972 /* Reload can create backwards (relative to cse) unscaled index
1973 address modes when eliminating registers and possibly for
1974 pseudos that don't get hard registers. Deal with it. */
1975 if (operands[2] == hard_frame_pointer_rtx
1976 || operands[2] == stack_pointer_rtx)
1977 return \"ldbx %1(%2),%0\";
1979 return \"ldbx %2(%1),%0\";
1981 [(set_attr "type" "load")
1982 (set_attr "length" "4")])
1985 [(set (match_operand:QI 0 "register_operand" "=r")
1986 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1987 (match_operand:SI 2 "basereg_operand" "r"))))]
1988 "! TARGET_DISABLE_INDEXING"
1991 /* Reload can create backwards (relative to cse) unscaled index
1992 address modes when eliminating registers and possibly for
1993 pseudos that don't get hard registers. Deal with it. */
1994 if (operands[1] == hard_frame_pointer_rtx
1995 || operands[1] == stack_pointer_rtx)
1996 return \"ldbx %2(%1),%0\";
1998 return \"ldbx %1(%2),%0\";
2000 [(set_attr "type" "load")
2001 (set_attr "length" "4")])
2003 ; Indexed byte load with zero extension to SImode or HImode.
2005 [(set (match_operand:SI 0 "register_operand" "=r")
2006 (zero_extend:SI (mem:QI
2008 (match_operand:SI 1 "basereg_operand" "r")
2009 (match_operand:SI 2 "register_operand" "r")))))]
2010 "! TARGET_DISABLE_INDEXING"
2013 /* Reload can create backwards (relative to cse) unscaled index
2014 address modes when eliminating registers and possibly for
2015 pseudos that don't get hard registers. Deal with it. */
2016 if (operands[2] == hard_frame_pointer_rtx
2017 || operands[2] == stack_pointer_rtx)
2018 return \"ldbx %1(%2),%0\";
2020 return \"ldbx %2(%1),%0\";
2022 [(set_attr "type" "load")
2023 (set_attr "length" "4")])
2026 [(set (match_operand:SI 0 "register_operand" "=r")
2027 (zero_extend:SI (mem:QI
2029 (match_operand:SI 1 "register_operand" "r")
2030 (match_operand:SI 2 "basereg_operand" "r")))))]
2031 "! TARGET_DISABLE_INDEXING"
2034 /* Reload can create backwards (relative to cse) unscaled index
2035 address modes when eliminating registers and possibly for
2036 pseudos that don't get hard registers. Deal with it. */
2037 if (operands[1] == hard_frame_pointer_rtx
2038 || operands[1] == stack_pointer_rtx)
2039 return \"ldbx %2(%1),%0\";
2041 return \"ldbx %1(%2),%0\";
2043 [(set_attr "type" "load")
2044 (set_attr "length" "4")])
2047 [(set (match_operand:HI 0 "register_operand" "=r")
2048 (zero_extend:HI (mem:QI
2050 (match_operand:SI 1 "basereg_operand" "r")
2051 (match_operand:SI 2 "register_operand" "r")))))]
2052 "! TARGET_DISABLE_INDEXING"
2055 /* Reload can create backwards (relative to cse) unscaled index
2056 address modes when eliminating registers and possibly for
2057 pseudos that don't get hard registers. Deal with it. */
2058 if (operands[2] == hard_frame_pointer_rtx
2059 || operands[2] == stack_pointer_rtx)
2060 return \"ldbx %1(%2),%0\";
2062 return \"ldbx %2(%1),%0\";
2064 [(set_attr "type" "load")
2065 (set_attr "length" "4")])
2068 [(set (match_operand:HI 0 "register_operand" "=r")
2069 (zero_extend:HI (mem:QI
2071 (match_operand:SI 1 "register_operand" "r")
2072 (match_operand:SI 2 "basereg_operand" "r")))))]
2073 "! TARGET_DISABLE_INDEXING"
2076 /* Reload can create backwards (relative to cse) unscaled index
2077 address modes when eliminating registers and possibly for
2078 pseudos that don't get hard registers. Deal with it. */
2079 if (operands[1] == hard_frame_pointer_rtx
2080 || operands[1] == stack_pointer_rtx)
2081 return \"ldbx %2(%1),%0\";
2083 return \"ldbx %1(%2),%0\";
2085 [(set_attr "type" "load")
2086 (set_attr "length" "4")])
2089 [(set (match_operand:QI 0 "register_operand" "=r")
2090 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2091 (match_operand:SI 2 "int5_operand" "L"))))
2092 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2095 [(set_attr "type" "load")
2096 (set_attr "length" "4")])
2098 ; Now the same thing with zero extensions.
2100 [(set (match_operand:SI 0 "register_operand" "=r")
2101 (zero_extend:SI (mem:QI (plus:SI
2102 (match_operand:SI 1 "register_operand" "+r")
2103 (match_operand:SI 2 "int5_operand" "L")))))
2104 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2107 [(set_attr "type" "load")
2108 (set_attr "length" "4")])
2111 [(set (match_operand:HI 0 "register_operand" "=r")
2112 (zero_extend:HI (mem:QI (plus:SI
2113 (match_operand:SI 1 "register_operand" "+r")
2114 (match_operand:SI 2 "int5_operand" "L")))))
2115 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2118 [(set_attr "type" "load")
2119 (set_attr "length" "4")])
2122 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2123 (match_operand:SI 1 "int5_operand" "L")))
2124 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2126 (plus:SI (match_dup 0) (match_dup 1)))]
2128 "stbs,mb %r2,%1(%0)"
2129 [(set_attr "type" "store")
2130 (set_attr "length" "4")])
2132 ;; The definition of this insn does not really explain what it does,
2133 ;; but it should suffice
2134 ;; that anything generated as this insn will be recognized as one
2135 ;; and that it will not successfully combine with anything.
2136 (define_expand "movstrsi"
2137 [(parallel [(set (match_operand:BLK 0 "" "")
2138 (match_operand:BLK 1 "" ""))
2139 (clobber (match_dup 7))
2140 (clobber (match_dup 8))
2141 (clobber (match_dup 4))
2142 (clobber (match_dup 5))
2143 (clobber (match_dup 6))
2144 (use (match_operand:SI 2 "arith_operand" ""))
2145 (use (match_operand:SI 3 "const_int_operand" ""))])]
2151 /* HP provides very fast block move library routine for the PA;
2152 this routine includes:
2154 4x4 byte at a time block moves,
2155 1x4 byte at a time with alignment checked at runtime with
2156 attempts to align the source and destination as needed
2159 With that in mind, here's the heuristics to try and guess when
2160 the inlined block move will be better than the library block
2163 If the size isn't constant, then always use the library routines.
2165 If the size is large in respect to the known alignment, then use
2166 the library routines.
2168 If the size is small in repsect to the known alignment, then open
2169 code the copy (since that will lead to better scheduling).
2171 Else use the block move pattern. */
2173 /* Undetermined size, use the library routine. */
2174 if (GET_CODE (operands[2]) != CONST_INT)
2177 size = INTVAL (operands[2]);
2178 align = INTVAL (operands[3]);
2179 align = align > 4 ? 4 : align;
2181 /* If size/alignment > 8 (eg size is large in respect to alignment),
2182 then use the library routines. */
2183 if (size / align > 16)
2186 /* This does happen, but not often enough to worry much about. */
2187 if (size / align < MOVE_RATIO)
2190 /* Fall through means we're going to use our block move pattern. */
2192 = change_address (operands[0], VOIDmode,
2193 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2195 = change_address (operands[1], VOIDmode,
2196 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2197 operands[4] = gen_reg_rtx (SImode);
2198 operands[5] = gen_reg_rtx (SImode);
2199 operands[6] = gen_reg_rtx (SImode);
2200 operands[7] = XEXP (operands[0], 0);
2201 operands[8] = XEXP (operands[1], 0);
2204 ;; The operand constraints are written like this to support both compile-time
2205 ;; and run-time determined byte count. If the count is run-time determined,
2206 ;; the register with the byte count is clobbered by the copying code, and
2207 ;; therefore it is forced to operand 2. If the count is compile-time
2208 ;; determined, we need two scratch registers for the unrolled code.
2209 (define_insn "movstrsi_internal"
2210 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2211 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2212 (clobber (match_dup 0))
2213 (clobber (match_dup 1))
2214 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2215 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2216 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2217 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2218 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2220 "* return output_block_move (operands, !which_alternative);"
2221 [(set_attr "type" "multi,multi")])
2223 ;; Floating point move insns
2225 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2226 ;; to be reloaded by putting the constant into memory when
2227 ;; reg is a floating point register.
2229 ;; For integer registers we use ldil;ldo to set the appropriate
2232 ;; This must come before the movdf pattern, and it must be present
2233 ;; to handle obscure reloading cases.
2235 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2236 (match_operand:DF 1 "" "?F,m"))]
2237 "GET_CODE (operands[1]) == CONST_DOUBLE
2238 && operands[1] != CONST0_RTX (DFmode)
2239 && ! TARGET_SOFT_FLOAT"
2240 "* return (which_alternative == 0 ? output_move_double (operands)
2241 : \"fldd%F1 %1,%0\");"
2242 [(set_attr "type" "move,fpload")
2243 (set_attr "length" "16,4")])
2245 (define_expand "movdf"
2246 [(set (match_operand:DF 0 "general_operand" "")
2247 (match_operand:DF 1 "general_operand" ""))]
2251 if (emit_move_sequence (operands, DFmode, 0))
2255 ;; Reloading an SImode or DImode value requires a scratch register if
2256 ;; going in to or out of float point registers.
2258 (define_expand "reload_indf"
2259 [(set (match_operand:DF 0 "register_operand" "=Z")
2260 (match_operand:DF 1 "non_hard_reg_operand" ""))
2261 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2265 if (emit_move_sequence (operands, DFmode, operands[2]))
2268 /* We don't want the clobber emitted, so handle this ourselves. */
2269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2273 (define_expand "reload_outdf"
2274 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2275 (match_operand:DF 1 "register_operand" "Z"))
2276 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2280 if (emit_move_sequence (operands, DFmode, operands[2]))
2283 /* We don't want the clobber emitted, so handle this ourselves. */
2284 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2289 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2290 "=f,*r,RQ,?o,?Q,f,*r,*r")
2291 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2292 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2293 "(register_operand (operands[0], DFmode)
2294 || reg_or_0_operand (operands[1], DFmode))
2295 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2296 && GET_CODE (operands[0]) == MEM)
2297 && ! TARGET_SOFT_FLOAT"
2300 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2301 || operands[1] == CONST0_RTX (DFmode))
2302 return output_fp_move_double (operands);
2303 return output_move_double (operands);
2305 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2306 (set_attr "length" "4,8,4,8,16,4,8,16")])
2309 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2311 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2313 "(register_operand (operands[0], DFmode)
2314 || reg_or_0_operand (operands[1], DFmode))
2315 && TARGET_SOFT_FLOAT"
2318 return output_move_double (operands);
2320 [(set_attr "type" "move,store,store,load,load")
2321 (set_attr "length" "8,8,16,8,16")])
2324 [(set (match_operand:DF 0 "register_operand" "=fx")
2325 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2326 (match_operand:SI 2 "register_operand" "r"))))]
2327 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2330 /* Reload can create backwards (relative to cse) unscaled index
2331 address modes when eliminating registers and possibly for
2332 pseudos that don't get hard registers. Deal with it. */
2333 if (operands[2] == hard_frame_pointer_rtx
2334 || operands[2] == stack_pointer_rtx)
2335 return \"flddx %1(%2),%0\";
2337 return \"flddx %2(%1),%0\";
2339 [(set_attr "type" "fpload")
2340 (set_attr "length" "4")])
2343 [(set (match_operand:DF 0 "register_operand" "=fx")
2344 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2345 (match_operand:SI 2 "basereg_operand" "r"))))]
2346 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2349 /* Reload can create backwards (relative to cse) unscaled index
2350 address modes when eliminating registers and possibly for
2351 pseudos that don't get hard registers. Deal with it. */
2352 if (operands[1] == hard_frame_pointer_rtx
2353 || operands[1] == stack_pointer_rtx)
2354 return \"flddx %2(%1),%0\";
2356 return \"flddx %1(%2),%0\";
2358 [(set_attr "type" "fpload")
2359 (set_attr "length" "4")])
2362 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2363 (match_operand:SI 2 "register_operand" "r")))
2364 (match_operand:DF 0 "register_operand" "fx"))]
2365 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2368 /* Reload can create backwards (relative to cse) unscaled index
2369 address modes when eliminating registers and possibly for
2370 pseudos that don't get hard registers. Deal with it. */
2371 if (operands[2] == hard_frame_pointer_rtx
2372 || operands[2] == stack_pointer_rtx)
2373 return \"fstdx %0,%1(%2)\";
2375 return \"fstdx %0,%2(%1)\";
2377 [(set_attr "type" "fpstore")
2378 (set_attr "length" "4")])
2381 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2382 (match_operand:SI 2 "basereg_operand" "r")))
2383 (match_operand:DF 0 "register_operand" "fx"))]
2384 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2387 /* Reload can create backwards (relative to cse) unscaled index
2388 address modes when eliminating registers and possibly for
2389 pseudos that don't get hard registers. Deal with it. */
2390 if (operands[1] == hard_frame_pointer_rtx
2391 || operands[1] == stack_pointer_rtx)
2392 return \"fstdx %0,%2(%1)\";
2394 return \"fstdx %0,%1(%2)\";
2396 [(set_attr "type" "fpstore")
2397 (set_attr "length" "4")])
2399 (define_expand "movdi"
2400 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2401 (match_operand:DI 1 "general_operand" ""))]
2405 if (emit_move_sequence (operands, DImode, 0))
2409 (define_expand "reload_indi"
2410 [(set (match_operand:DI 0 "register_operand" "=Z")
2411 (match_operand:DI 1 "non_hard_reg_operand" ""))
2412 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2416 if (emit_move_sequence (operands, DImode, operands[2]))
2419 /* We don't want the clobber emitted, so handle this ourselves. */
2420 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2424 (define_expand "reload_outdi"
2425 [(set (match_operand:DI 0 "general_operand" "")
2426 (match_operand:DI 1 "register_operand" "Z"))
2427 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2431 if (emit_move_sequence (operands, DImode, operands[2]))
2434 /* We don't want the clobber emitted, so handle this ourselves. */
2435 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2440 [(set (match_operand:DI 0 "register_operand" "=r")
2441 (high:DI (match_operand 1 "" "")))]
2445 rtx op0 = operands[0];
2446 rtx op1 = operands[1];
2448 if (GET_CODE (op1) == CONST_INT)
2450 operands[0] = operand_subword (op0, 1, 0, DImode);
2451 output_asm_insn (\"ldil L'%1,%0\", operands);
2453 operands[0] = operand_subword (op0, 0, 0, DImode);
2454 if (INTVAL (op1) < 0)
2455 output_asm_insn (\"ldi -1,%0\", operands);
2457 output_asm_insn (\"ldi 0,%0\", operands);
2460 else if (GET_CODE (op1) == CONST_DOUBLE)
2462 operands[0] = operand_subword (op0, 1, 0, DImode);
2463 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2464 output_asm_insn (\"ldil L'%1,%0\", operands);
2466 operands[0] = operand_subword (op0, 0, 0, DImode);
2467 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2468 output_asm_insn (singlemove_string (operands), operands);
2474 [(set_attr "type" "move")
2475 (set_attr "length" "8")])
2478 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2479 "=r,o,Q,r,r,r,f,f,*TR")
2480 (match_operand:DI 1 "general_operand"
2481 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2482 "(register_operand (operands[0], DImode)
2483 || reg_or_0_operand (operands[1], DImode))
2484 && ! TARGET_SOFT_FLOAT"
2487 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2488 || (operands[1] == CONST0_RTX (DImode)))
2489 return output_fp_move_double (operands);
2490 return output_move_double (operands);
2492 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2493 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2496 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2498 (match_operand:DI 1 "general_operand"
2500 "(register_operand (operands[0], DImode)
2501 || reg_or_0_operand (operands[1], DImode))
2502 && TARGET_SOFT_FLOAT"
2505 return output_move_double (operands);
2507 [(set_attr "type" "move,store,store,load,load,multi")
2508 (set_attr "length" "8,8,16,8,16,16")])
2511 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2512 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2513 (match_operand:DI 2 "immediate_operand" "i,i")))]
2517 /* Don't output a 64 bit constant, since we can't trust the assembler to
2518 handle it correctly. */
2519 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2520 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2521 if (which_alternative == 1)
2522 output_asm_insn (\"copy %1,%0\", operands);
2523 return \"ldo R'%G2(%R1),%R0\";
2525 [(set_attr "type" "move,move")
2526 (set_attr "length" "4,8")])
2528 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2529 ;; to be reloaded by putting the constant into memory when
2530 ;; reg is a floating point register.
2532 ;; For integer registers we use ldil;ldo to set the appropriate
2535 ;; This must come before the movsf pattern, and it must be present
2536 ;; to handle obscure reloading cases.
2538 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2539 (match_operand:SF 1 "" "?F,m"))]
2540 "GET_CODE (operands[1]) == CONST_DOUBLE
2541 && operands[1] != CONST0_RTX (SFmode)
2542 && ! TARGET_SOFT_FLOAT"
2543 "* return (which_alternative == 0 ? singlemove_string (operands)
2544 : \" fldw%F1 %1,%0\");"
2545 [(set_attr "type" "move,fpload")
2546 (set_attr "length" "8,4")])
2548 (define_expand "movsf"
2549 [(set (match_operand:SF 0 "general_operand" "")
2550 (match_operand:SF 1 "general_operand" ""))]
2554 if (emit_move_sequence (operands, SFmode, 0))
2558 ;; Reloading an SImode or DImode value requires a scratch register if
2559 ;; going in to or out of float point registers.
2561 (define_expand "reload_insf"
2562 [(set (match_operand:SF 0 "register_operand" "=Z")
2563 (match_operand:SF 1 "non_hard_reg_operand" ""))
2564 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2568 if (emit_move_sequence (operands, SFmode, operands[2]))
2571 /* We don't want the clobber emitted, so handle this ourselves. */
2572 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2576 (define_expand "reload_outsf"
2577 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2578 (match_operand:SF 1 "register_operand" "Z"))
2579 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2583 if (emit_move_sequence (operands, SFmode, operands[2]))
2586 /* We don't want the clobber emitted, so handle this ourselves. */
2587 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2592 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2594 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2595 "fG,rG,RQ,RQ,f,rG"))]
2596 "(register_operand (operands[0], SFmode)
2597 || reg_or_0_operand (operands[1], SFmode))
2598 && ! TARGET_SOFT_FLOAT"
2606 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2607 (set_attr "pa_combine_type" "addmove")
2608 (set_attr "length" "4,4,4,4,4,4")])
2611 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2613 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2615 "(register_operand (operands[0], SFmode)
2616 || reg_or_0_operand (operands[1], SFmode))
2617 && TARGET_SOFT_FLOAT"
2622 [(set_attr "type" "move,load,store")
2623 (set_attr "pa_combine_type" "addmove")
2624 (set_attr "length" "4,4,4")])
2627 [(set (match_operand:SF 0 "register_operand" "=fx")
2628 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2629 (match_operand:SI 2 "register_operand" "r"))))]
2630 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2633 /* Reload can create backwards (relative to cse) unscaled index
2634 address modes when eliminating registers and possibly for
2635 pseudos that don't get hard registers. Deal with it. */
2636 if (operands[2] == hard_frame_pointer_rtx
2637 || operands[2] == stack_pointer_rtx)
2638 return \"fldwx %1(%2),%0\";
2640 return \"fldwx %2(%1),%0\";
2642 [(set_attr "type" "fpload")
2643 (set_attr "length" "4")])
2646 [(set (match_operand:SF 0 "register_operand" "=fx")
2647 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2648 (match_operand:SI 2 "basereg_operand" "r"))))]
2649 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2652 /* Reload can create backwards (relative to cse) unscaled index
2653 address modes when eliminating registers and possibly for
2654 pseudos that don't get hard registers. Deal with it. */
2655 if (operands[1] == hard_frame_pointer_rtx
2656 || operands[1] == stack_pointer_rtx)
2657 return \"fldwx %2(%1),%0\";
2659 return \"fldwx %1(%2),%0\";
2661 [(set_attr "type" "fpload")
2662 (set_attr "length" "4")])
2665 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2666 (match_operand:SI 2 "register_operand" "r")))
2667 (match_operand:SF 0 "register_operand" "fx"))]
2668 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2671 /* Reload can create backwards (relative to cse) unscaled index
2672 address modes when eliminating registers and possibly for
2673 pseudos that don't get hard registers. Deal with it. */
2674 if (operands[2] == hard_frame_pointer_rtx
2675 || operands[2] == stack_pointer_rtx)
2676 return \"fstwx %0,%1(%2)\";
2678 return \"fstwx %0,%2(%1)\";
2680 [(set_attr "type" "fpstore")
2681 (set_attr "length" "4")])
2684 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2685 (match_operand:SI 2 "basereg_operand" "r")))
2686 (match_operand:SF 0 "register_operand" "fx"))]
2687 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2690 /* Reload can create backwards (relative to cse) unscaled index
2691 address modes when eliminating registers and possibly for
2692 pseudos that don't get hard registers. Deal with it. */
2693 if (operands[1] == hard_frame_pointer_rtx
2694 || operands[1] == stack_pointer_rtx)
2695 return \"fstwx %0,%2(%1)\";
2697 return \"fstwx %0,%1(%2)\";
2699 [(set_attr "type" "fpstore")
2700 (set_attr "length" "4")])
2703 ;;- zero extension instructions
2704 ;; We have define_expand for zero extension patterns to make sure the
2705 ;; operands get loaded into registers. The define_insns accept
2706 ;; memory operands. This gives us better overall code than just
2707 ;; having a pattern that does or does not accept memory operands.
2709 (define_expand "zero_extendhisi2"
2710 [(set (match_operand:SI 0 "register_operand" "")
2712 (match_operand:HI 1 "register_operand" "")))]
2717 [(set (match_operand:SI 0 "register_operand" "=r,r")
2719 (match_operand:HI 1 "move_operand" "r,RQ")))]
2720 "GET_CODE (operands[1]) != CONST_INT"
2724 [(set_attr "type" "shift,load")
2725 (set_attr "length" "4,4")])
2727 (define_expand "zero_extendqihi2"
2728 [(set (match_operand:HI 0 "register_operand" "")
2730 (match_operand:QI 1 "register_operand" "")))]
2735 [(set (match_operand:HI 0 "register_operand" "=r,r")
2737 (match_operand:QI 1 "move_operand" "r,RQ")))]
2738 "GET_CODE (operands[1]) != CONST_INT"
2742 [(set_attr "type" "shift,load")
2743 (set_attr "length" "4,4")])
2745 (define_expand "zero_extendqisi2"
2746 [(set (match_operand:SI 0 "register_operand" "")
2748 (match_operand:QI 1 "register_operand" "")))]
2753 [(set (match_operand:SI 0 "register_operand" "=r,r")
2755 (match_operand:QI 1 "move_operand" "r,RQ")))]
2756 "GET_CODE (operands[1]) != CONST_INT"
2760 [(set_attr "type" "shift,load")
2761 (set_attr "length" "4,4")])
2763 ;;- sign extension instructions
2765 (define_insn "extendhisi2"
2766 [(set (match_operand:SI 0 "register_operand" "=r")
2767 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2770 [(set_attr "type" "shift")
2771 (set_attr "length" "4")])
2773 (define_insn "extendqihi2"
2774 [(set (match_operand:HI 0 "register_operand" "=r")
2775 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2778 [(set_attr "type" "shift")
2779 (set_attr "length" "4")])
2781 (define_insn "extendqisi2"
2782 [(set (match_operand:SI 0 "register_operand" "=r")
2783 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2786 [(set_attr "type" "shift")
2787 (set_attr "length" "4")])
2789 ;; Conversions between float and double.
2791 (define_insn "extendsfdf2"
2792 [(set (match_operand:DF 0 "register_operand" "=f")
2794 (match_operand:SF 1 "register_operand" "f")))]
2795 "! TARGET_SOFT_FLOAT"
2796 "fcnvff,sgl,dbl %1,%0"
2797 [(set_attr "type" "fpalu")
2798 (set_attr "length" "4")])
2800 (define_insn "truncdfsf2"
2801 [(set (match_operand:SF 0 "register_operand" "=f")
2803 (match_operand:DF 1 "register_operand" "f")))]
2804 "! TARGET_SOFT_FLOAT"
2805 "fcnvff,dbl,sgl %1,%0"
2806 [(set_attr "type" "fpalu")
2807 (set_attr "length" "4")])
2809 ;; Conversion between fixed point and floating point.
2810 ;; Note that among the fix-to-float insns
2811 ;; the ones that start with SImode come first.
2812 ;; That is so that an operand that is a CONST_INT
2813 ;; (and therefore lacks a specific machine mode).
2814 ;; will be recognized as SImode (which is always valid)
2815 ;; rather than as QImode or HImode.
2817 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2818 ;; to be reloaded by putting the constant into memory.
2819 ;; It must come before the more general floatsisf2 pattern.
2821 [(set (match_operand:SF 0 "register_operand" "=f")
2822 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2823 "! TARGET_SOFT_FLOAT"
2824 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2825 [(set_attr "type" "fpalu")
2826 (set_attr "length" "8")])
2828 (define_insn "floatsisf2"
2829 [(set (match_operand:SF 0 "register_operand" "=f")
2830 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2831 "! TARGET_SOFT_FLOAT"
2832 "fcnvxf,sgl,sgl %1,%0"
2833 [(set_attr "type" "fpalu")
2834 (set_attr "length" "4")])
2836 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2837 ;; to be reloaded by putting the constant into memory.
2838 ;; It must come before the more general floatsidf2 pattern.
2840 [(set (match_operand:DF 0 "register_operand" "=f")
2841 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2842 "! TARGET_SOFT_FLOAT"
2843 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2844 [(set_attr "type" "fpalu")
2845 (set_attr "length" "8")])
2847 (define_insn "floatsidf2"
2848 [(set (match_operand:DF 0 "register_operand" "=f")
2849 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2850 "! TARGET_SOFT_FLOAT"
2851 "fcnvxf,sgl,dbl %1,%0"
2852 [(set_attr "type" "fpalu")
2853 (set_attr "length" "4")])
2855 (define_expand "floatunssisf2"
2856 [(set (subreg:SI (match_dup 2) 1)
2857 (match_operand:SI 1 "register_operand" ""))
2858 (set (subreg:SI (match_dup 2) 0)
2860 (set (match_operand:SF 0 "register_operand" "")
2861 (float:SF (match_dup 2)))]
2862 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2863 "operands[2] = gen_reg_rtx (DImode);")
2865 (define_expand "floatunssidf2"
2866 [(set (subreg:SI (match_dup 2) 1)
2867 (match_operand:SI 1 "register_operand" ""))
2868 (set (subreg:SI (match_dup 2) 0)
2870 (set (match_operand:DF 0 "register_operand" "")
2871 (float:DF (match_dup 2)))]
2872 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2873 "operands[2] = gen_reg_rtx (DImode);")
2875 (define_insn "floatdisf2"
2876 [(set (match_operand:SF 0 "register_operand" "=f")
2877 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2878 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2879 "fcnvxf,dbl,sgl %1,%0"
2880 [(set_attr "type" "fpalu")
2881 (set_attr "length" "4")])
2883 (define_insn "floatdidf2"
2884 [(set (match_operand:DF 0 "register_operand" "=f")
2885 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2886 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2887 "fcnvxf,dbl,dbl %1,%0"
2888 [(set_attr "type" "fpalu")
2889 (set_attr "length" "4")])
2891 ;; Convert a float to an actual integer.
2892 ;; Truncation is performed as part of the conversion.
2894 (define_insn "fix_truncsfsi2"
2895 [(set (match_operand:SI 0 "register_operand" "=f")
2896 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2897 "! TARGET_SOFT_FLOAT"
2898 "fcnvfxt,sgl,sgl %1,%0"
2899 [(set_attr "type" "fpalu")
2900 (set_attr "length" "4")])
2902 (define_insn "fix_truncdfsi2"
2903 [(set (match_operand:SI 0 "register_operand" "=f")
2904 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2905 "! TARGET_SOFT_FLOAT"
2906 "fcnvfxt,dbl,sgl %1,%0"
2907 [(set_attr "type" "fpalu")
2908 (set_attr "length" "4")])
2910 (define_insn "fix_truncsfdi2"
2911 [(set (match_operand:DI 0 "register_operand" "=f")
2912 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2913 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2914 "fcnvfxt,sgl,dbl %1,%0"
2915 [(set_attr "type" "fpalu")
2916 (set_attr "length" "4")])
2918 (define_insn "fix_truncdfdi2"
2919 [(set (match_operand:DI 0 "register_operand" "=f")
2920 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2921 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2922 "fcnvfxt,dbl,dbl %1,%0"
2923 [(set_attr "type" "fpalu")
2924 (set_attr "length" "4")])
2926 ;;- arithmetic instructions
2928 (define_expand "adddi3"
2929 [(set (match_operand:DI 0 "register_operand" "")
2930 (plus:DI (match_operand:DI 1 "register_operand" "")
2931 (match_operand:DI 2 "arith11_operand" "")))]
2936 [(set (match_operand:DI 0 "register_operand" "=r")
2937 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2938 (match_operand:DI 2 "arith11_operand" "rI")))]
2942 if (GET_CODE (operands[2]) == CONST_INT)
2944 if (INTVAL (operands[2]) >= 0)
2945 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2947 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2950 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2952 [(set_attr "type" "binary")
2953 (set_attr "length" "8")])
2956 [(set (match_operand:SI 0 "register_operand" "=r")
2957 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2958 (match_operand:SI 2 "register_operand" "r")))]
2961 [(set_attr "type" "binary")
2962 (set_attr "length" "4")])
2964 ;; define_splits to optimize cases of adding a constant integer
2965 ;; to a register when the constant does not fit in 14 bits. */
2967 [(set (match_operand:SI 0 "register_operand" "")
2968 (plus:SI (match_operand:SI 1 "register_operand" "")
2969 (match_operand:SI 2 "const_int_operand" "")))
2970 (clobber (match_operand:SI 4 "register_operand" ""))]
2971 "! cint_ok_for_move (INTVAL (operands[2]))
2972 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2973 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2974 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2977 int val = INTVAL (operands[2]);
2978 int low = (val < 0) ? -0x2000 : 0x1fff;
2979 int rest = val - low;
2981 operands[2] = GEN_INT (rest);
2982 operands[3] = GEN_INT (low);
2986 [(set (match_operand:SI 0 "register_operand" "")
2987 (plus:SI (match_operand:SI 1 "register_operand" "")
2988 (match_operand:SI 2 "const_int_operand" "")))
2989 (clobber (match_operand:SI 4 "register_operand" ""))]
2990 "! cint_ok_for_move (INTVAL (operands[2]))"
2991 [(set (match_dup 4) (match_dup 2))
2992 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2996 HOST_WIDE_INT intval = INTVAL (operands[2]);
2998 /* Try dividing the constant by 2, then 4, and finally 8 to see
2999 if we can get a constant which can be loaded into a register
3000 in a single instruction (cint_ok_for_move).
3002 If that fails, try to negate the constant and subtract it
3003 from our input operand. */
3004 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
3006 operands[2] = GEN_INT (intval / 2);
3007 operands[3] = GEN_INT (2);
3009 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
3011 operands[2] = GEN_INT (intval / 4);
3012 operands[3] = GEN_INT (4);
3014 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
3016 operands[2] = GEN_INT (intval / 8);
3017 operands[3] = GEN_INT (8);
3019 else if (cint_ok_for_move (-intval))
3021 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
3022 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
3029 (define_insn "addsi3"
3030 [(set (match_operand:SI 0 "register_operand" "=r,r")
3031 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3032 (match_operand:SI 2 "arith_operand" "r,J")))]
3037 [(set_attr "type" "binary,binary")
3038 (set_attr "pa_combine_type" "addmove")
3039 (set_attr "length" "4,4")])
3041 ;; Disgusting kludge to work around reload bugs with frame pointer
3042 ;; elimination. Similar to other magic reload patterns in the
3043 ;; indexed memory operations.
3045 [(set (match_operand:SI 0 "register_operand" "=&r")
3046 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3047 (match_operand:SI 2 "register_operand" "r"))
3048 (match_operand:SI 3 "const_int_operand" "rL")))]
3049 "reload_in_progress"
3052 if (GET_CODE (operands[3]) == CONST_INT)
3053 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3055 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3057 [(set_attr "type" "binary")
3058 (set_attr "length" "8")])
3060 (define_expand "subdi3"
3061 [(set (match_operand:DI 0 "register_operand" "")
3062 (minus:DI (match_operand:DI 1 "register_operand" "")
3063 (match_operand:DI 2 "register_operand" "")))]
3068 [(set (match_operand:DI 0 "register_operand" "=r")
3069 (minus:DI (match_operand:DI 1 "register_operand" "r")
3070 (match_operand:DI 2 "register_operand" "r")))]
3072 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3073 [(set_attr "type" "binary")
3074 (set_attr "length" "8")])
3076 (define_insn "subsi3"
3077 [(set (match_operand:SI 0 "register_operand" "=r,r")
3078 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3079 (match_operand:SI 2 "register_operand" "r,r")))]
3084 [(set_attr "type" "binary,binary")
3085 (set_attr "length" "4,4")])
3087 ;; Clobbering a "register_operand" instead of a match_scratch
3088 ;; in operand3 of millicode calls avoids spilling %r1 and
3089 ;; produces better code.
3091 ;; The mulsi3 insns set up registers for the millicode call.
3092 (define_expand "mulsi3"
3093 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3094 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3095 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3096 (clobber (match_dup 3))
3097 (clobber (reg:SI 26))
3098 (clobber (reg:SI 25))
3099 (clobber (reg:SI 31))])
3100 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3104 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3106 rtx scratch = gen_reg_rtx (DImode);
3107 operands[1] = force_reg (SImode, operands[1]);
3108 operands[2] = force_reg (SImode, operands[2]);
3109 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3110 emit_insn (gen_rtx_SET (VOIDmode,
3112 gen_rtx_SUBREG (SImode, scratch, 1)));
3115 operands[3] = gen_reg_rtx (SImode);
3118 (define_insn "umulsidi3"
3119 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3120 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3121 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3122 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3124 [(set_attr "type" "fpmuldbl")
3125 (set_attr "length" "4")])
3128 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3129 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3130 (match_operand:DI 2 "uint32_operand" "f")))]
3131 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3133 [(set_attr "type" "fpmuldbl")
3134 (set_attr "length" "4")])
3137 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3138 (clobber (match_operand:SI 0 "register_operand" "=a"))
3139 (clobber (reg:SI 26))
3140 (clobber (reg:SI 25))
3141 (clobber (reg:SI 31))]
3143 "* return output_mul_insn (0, insn);"
3144 [(set_attr "type" "milli")
3145 (set (attr "length")
3147 ;; Target (or stub) within reach
3148 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3150 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3155 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3159 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3160 ;; same as NO_SPACE_REGS code
3161 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3163 (eq (symbol_ref "flag_pic")
3167 ;; Out of range and either PIC or PORTABLE_RUNTIME
3170 ;;; Division and mod.
3171 (define_expand "divsi3"
3172 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3173 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3174 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3175 (clobber (match_dup 3))
3176 (clobber (reg:SI 26))
3177 (clobber (reg:SI 25))
3178 (clobber (reg:SI 31))])
3179 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3183 operands[3] = gen_reg_rtx (SImode);
3184 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3190 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3191 (clobber (match_operand:SI 1 "register_operand" "=a"))
3192 (clobber (reg:SI 26))
3193 (clobber (reg:SI 25))
3194 (clobber (reg:SI 31))]
3197 return output_div_insn (operands, 0, insn);"
3198 [(set_attr "type" "milli")
3199 (set (attr "length")
3201 ;; Target (or stub) within reach
3202 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3204 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3209 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3213 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3214 ;; same as NO_SPACE_REGS code
3215 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3217 (eq (symbol_ref "flag_pic")
3221 ;; Out of range and either PIC or PORTABLE_RUNTIME
3224 (define_expand "udivsi3"
3225 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3226 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3227 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3228 (clobber (match_dup 3))
3229 (clobber (reg:SI 26))
3230 (clobber (reg:SI 25))
3231 (clobber (reg:SI 31))])
3232 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3236 operands[3] = gen_reg_rtx (SImode);
3237 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3243 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3244 (clobber (match_operand:SI 1 "register_operand" "=a"))
3245 (clobber (reg:SI 26))
3246 (clobber (reg:SI 25))
3247 (clobber (reg:SI 31))]
3250 return output_div_insn (operands, 1, insn);"
3251 [(set_attr "type" "milli")
3252 (set (attr "length")
3254 ;; Target (or stub) within reach
3255 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3257 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3262 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3266 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3267 ;; same as NO_SPACE_REGS code
3268 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3270 (eq (symbol_ref "flag_pic")
3274 ;; Out of range and either PIC or PORTABLE_RUNTIME
3277 (define_expand "modsi3"
3278 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3279 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3280 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3281 (clobber (match_dup 3))
3282 (clobber (reg:SI 26))
3283 (clobber (reg:SI 25))
3284 (clobber (reg:SI 31))])
3285 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3289 operands[3] = gen_reg_rtx (SImode);
3293 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3294 (clobber (match_operand:SI 0 "register_operand" "=a"))
3295 (clobber (reg:SI 26))
3296 (clobber (reg:SI 25))
3297 (clobber (reg:SI 31))]
3300 return output_mod_insn (0, insn);"
3301 [(set_attr "type" "milli")
3302 (set (attr "length")
3304 ;; Target (or stub) within reach
3305 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3307 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3312 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3316 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3317 ;; same as NO_SPACE_REGS code
3318 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3320 (eq (symbol_ref "flag_pic")
3324 ;; Out of range and either PIC or PORTABLE_RUNTIME
3327 (define_expand "umodsi3"
3328 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3329 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3330 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3331 (clobber (match_dup 3))
3332 (clobber (reg:SI 26))
3333 (clobber (reg:SI 25))
3334 (clobber (reg:SI 31))])
3335 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3339 operands[3] = gen_reg_rtx (SImode);
3343 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3344 (clobber (match_operand:SI 0 "register_operand" "=a"))
3345 (clobber (reg:SI 26))
3346 (clobber (reg:SI 25))
3347 (clobber (reg:SI 31))]
3350 return output_mod_insn (1, insn);"
3351 [(set_attr "type" "milli")
3352 (set (attr "length")
3354 ;; Target (or stub) within reach
3355 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3357 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3362 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3366 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3367 ;; same as NO_SPACE_REGS code
3368 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3370 (eq (symbol_ref "flag_pic")
3374 ;; Out of range and either PIC or PORTABLE_RUNTIME
3377 ;;- and instructions
3378 ;; We define DImode `and` so with DImode `not` we can get
3379 ;; DImode `andn`. Other combinations are possible.
3381 (define_expand "anddi3"
3382 [(set (match_operand:DI 0 "register_operand" "")
3383 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3384 (match_operand:DI 2 "arith_double_operand" "")))]
3388 if (! register_operand (operands[1], DImode)
3389 || ! register_operand (operands[2], DImode))
3390 /* Let GCC break this into word-at-a-time operations. */
3395 [(set (match_operand:DI 0 "register_operand" "=r")
3396 (and:DI (match_operand:DI 1 "register_operand" "%r")
3397 (match_operand:DI 2 "register_operand" "r")))]
3399 "and %1,%2,%0\;and %R1,%R2,%R0"
3400 [(set_attr "type" "binary")
3401 (set_attr "length" "8")])
3403 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3404 ; constant with ldil;ldo.
3405 (define_insn "andsi3"
3406 [(set (match_operand:SI 0 "register_operand" "=r,r")
3407 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3408 (match_operand:SI 2 "and_operand" "rO,P")))]
3410 "* return output_and (operands); "
3411 [(set_attr "type" "binary,shift")
3412 (set_attr "length" "4,4")])
3415 [(set (match_operand:DI 0 "register_operand" "=r")
3416 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3417 (match_operand:DI 2 "register_operand" "r")))]
3419 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3420 [(set_attr "type" "binary")
3421 (set_attr "length" "8")])
3424 [(set (match_operand:SI 0 "register_operand" "=r")
3425 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3426 (match_operand:SI 2 "register_operand" "r")))]
3429 [(set_attr "type" "binary")
3430 (set_attr "length" "4")])
3432 (define_expand "iordi3"
3433 [(set (match_operand:DI 0 "register_operand" "")
3434 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3435 (match_operand:DI 2 "arith_double_operand" "")))]
3439 if (! register_operand (operands[1], DImode)
3440 || ! register_operand (operands[2], DImode))
3441 /* Let GCC break this into word-at-a-time operations. */
3446 [(set (match_operand:DI 0 "register_operand" "=r")
3447 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3448 (match_operand:DI 2 "register_operand" "r")))]
3450 "or %1,%2,%0\;or %R1,%R2,%R0"
3451 [(set_attr "type" "binary")
3452 (set_attr "length" "8")])
3454 ;; Need a define_expand because we've run out of CONST_OK... characters.
3455 (define_expand "iorsi3"
3456 [(set (match_operand:SI 0 "register_operand" "")
3457 (ior:SI (match_operand:SI 1 "register_operand" "")
3458 (match_operand:SI 2 "arith32_operand" "")))]
3462 if (! (ior_operand (operands[2], SImode)
3463 || register_operand (operands[2], SImode)))
3464 operands[2] = force_reg (SImode, operands[2]);
3468 [(set (match_operand:SI 0 "register_operand" "=r,r")
3469 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3470 (match_operand:SI 2 "ior_operand" "M,i")))]
3472 "* return output_ior (operands); "
3473 [(set_attr "type" "binary,shift")
3474 (set_attr "length" "4,4")])
3477 [(set (match_operand:SI 0 "register_operand" "=r")
3478 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3479 (match_operand:SI 2 "register_operand" "r")))]
3482 [(set_attr "type" "binary")
3483 (set_attr "length" "4")])
3485 (define_expand "xordi3"
3486 [(set (match_operand:DI 0 "register_operand" "")
3487 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3488 (match_operand:DI 2 "arith_double_operand" "")))]
3492 if (! register_operand (operands[1], DImode)
3493 || ! register_operand (operands[2], DImode))
3494 /* Let GCC break this into word-at-a-time operations. */
3499 [(set (match_operand:DI 0 "register_operand" "=r")
3500 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3501 (match_operand:DI 2 "register_operand" "r")))]
3503 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3504 [(set_attr "type" "binary")
3505 (set_attr "length" "8")])
3507 (define_insn "xorsi3"
3508 [(set (match_operand:SI 0 "register_operand" "=r")
3509 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3510 (match_operand:SI 2 "register_operand" "r")))]
3513 [(set_attr "type" "binary")
3514 (set_attr "length" "4")])
3516 (define_insn "negdi2"
3517 [(set (match_operand:DI 0 "register_operand" "=r")
3518 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3520 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3521 [(set_attr "type" "unary")
3522 (set_attr "length" "8")])
3524 (define_insn "negsi2"
3525 [(set (match_operand:SI 0 "register_operand" "=r")
3526 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3529 [(set_attr "type" "unary")
3530 (set_attr "length" "4")])
3532 (define_expand "one_cmpldi2"
3533 [(set (match_operand:DI 0 "register_operand" "")
3534 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3538 if (! register_operand (operands[1], DImode))
3543 [(set (match_operand:DI 0 "register_operand" "=r")
3544 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3546 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3547 [(set_attr "type" "unary")
3548 (set_attr "length" "8")])
3550 (define_insn "one_cmplsi2"
3551 [(set (match_operand:SI 0 "register_operand" "=r")
3552 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3555 [(set_attr "type" "unary")
3556 (set_attr "length" "4")])
3558 ;; Floating point arithmetic instructions.
3560 (define_insn "adddf3"
3561 [(set (match_operand:DF 0 "register_operand" "=f")
3562 (plus:DF (match_operand:DF 1 "register_operand" "f")
3563 (match_operand:DF 2 "register_operand" "f")))]
3564 "! TARGET_SOFT_FLOAT"
3566 [(set_attr "type" "fpalu")
3567 (set_attr "pa_combine_type" "faddsub")
3568 (set_attr "length" "4")])
3570 (define_insn "addsf3"
3571 [(set (match_operand:SF 0 "register_operand" "=f")
3572 (plus:SF (match_operand:SF 1 "register_operand" "f")
3573 (match_operand:SF 2 "register_operand" "f")))]
3574 "! TARGET_SOFT_FLOAT"
3576 [(set_attr "type" "fpalu")
3577 (set_attr "pa_combine_type" "faddsub")
3578 (set_attr "length" "4")])
3580 (define_insn "subdf3"
3581 [(set (match_operand:DF 0 "register_operand" "=f")
3582 (minus:DF (match_operand:DF 1 "register_operand" "f")
3583 (match_operand:DF 2 "register_operand" "f")))]
3584 "! TARGET_SOFT_FLOAT"
3586 [(set_attr "type" "fpalu")
3587 (set_attr "pa_combine_type" "faddsub")
3588 (set_attr "length" "4")])
3590 (define_insn "subsf3"
3591 [(set (match_operand:SF 0 "register_operand" "=f")
3592 (minus:SF (match_operand:SF 1 "register_operand" "f")
3593 (match_operand:SF 2 "register_operand" "f")))]
3594 "! TARGET_SOFT_FLOAT"
3596 [(set_attr "type" "fpalu")
3597 (set_attr "pa_combine_type" "faddsub")
3598 (set_attr "length" "4")])
3600 (define_insn "muldf3"
3601 [(set (match_operand:DF 0 "register_operand" "=f")
3602 (mult:DF (match_operand:DF 1 "register_operand" "f")
3603 (match_operand:DF 2 "register_operand" "f")))]
3604 "! TARGET_SOFT_FLOAT"
3606 [(set_attr "type" "fpmuldbl")
3607 (set_attr "pa_combine_type" "fmpy")
3608 (set_attr "length" "4")])
3610 (define_insn "mulsf3"
3611 [(set (match_operand:SF 0 "register_operand" "=f")
3612 (mult:SF (match_operand:SF 1 "register_operand" "f")
3613 (match_operand:SF 2 "register_operand" "f")))]
3614 "! TARGET_SOFT_FLOAT"
3616 [(set_attr "type" "fpmulsgl")
3617 (set_attr "pa_combine_type" "fmpy")
3618 (set_attr "length" "4")])
3620 (define_insn "divdf3"
3621 [(set (match_operand:DF 0 "register_operand" "=f")
3622 (div:DF (match_operand:DF 1 "register_operand" "f")
3623 (match_operand:DF 2 "register_operand" "f")))]
3624 "! TARGET_SOFT_FLOAT"
3626 [(set_attr "type" "fpdivdbl")
3627 (set_attr "length" "4")])
3629 (define_insn "divsf3"
3630 [(set (match_operand:SF 0 "register_operand" "=f")
3631 (div:SF (match_operand:SF 1 "register_operand" "f")
3632 (match_operand:SF 2 "register_operand" "f")))]
3633 "! TARGET_SOFT_FLOAT"
3635 [(set_attr "type" "fpdivsgl")
3636 (set_attr "length" "4")])
3638 (define_insn "negdf2"
3639 [(set (match_operand:DF 0 "register_operand" "=f")
3640 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3641 "! TARGET_SOFT_FLOAT"
3645 return \"fneg,dbl %1,%0\";
3647 return \"fsub,dbl %%fr0,%1,%0\";
3649 [(set_attr "type" "fpalu")
3650 (set_attr "length" "4")])
3652 (define_insn "negsf2"
3653 [(set (match_operand:SF 0 "register_operand" "=f")
3654 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3655 "! TARGET_SOFT_FLOAT"
3659 return \"fneg,sgl %1,%0\";
3661 return \"fsub,sgl %%fr0,%1,%0\";
3663 [(set_attr "type" "fpalu")
3664 (set_attr "length" "4")])
3666 (define_insn "absdf2"
3667 [(set (match_operand:DF 0 "register_operand" "=f")
3668 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3669 "! TARGET_SOFT_FLOAT"
3671 [(set_attr "type" "fpalu")
3672 (set_attr "length" "4")])
3674 (define_insn "abssf2"
3675 [(set (match_operand:SF 0 "register_operand" "=f")
3676 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3677 "! TARGET_SOFT_FLOAT"
3679 [(set_attr "type" "fpalu")
3680 (set_attr "length" "4")])
3682 (define_insn "sqrtdf2"
3683 [(set (match_operand:DF 0 "register_operand" "=f")
3684 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3685 "! TARGET_SOFT_FLOAT"
3687 [(set_attr "type" "fpsqrtdbl")
3688 (set_attr "length" "4")])
3690 (define_insn "sqrtsf2"
3691 [(set (match_operand:SF 0 "register_operand" "=f")
3692 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3693 "! TARGET_SOFT_FLOAT"
3695 [(set_attr "type" "fpsqrtsgl")
3696 (set_attr "length" "4")])
3698 ;; PA 2.0 floating point instructions
3702 [(set (match_operand:DF 0 "register_operand" "=f")
3703 (plus:DF (mult:DF (match_operand:DF 1 "register_operand" "f")
3704 (match_operand:DF 2 "register_operand" "f"))
3705 (match_operand:DF 3 "register_operand" "f")))]
3706 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3707 "fmpyfadd,dbl %1,%2,%3,%0"
3708 [(set_attr "type" "fpmuldbl")
3709 (set_attr "length" "4")])
3712 [(set (match_operand:DF 0 "register_operand" "=f")
3713 (plus:DF (match_operand:DF 1 "register_operand" "f")
3714 (mult:DF (match_operand:DF 2 "register_operand" "f")
3715 (match_operand:DF 3 "register_operand" "f"))))]
3716 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3717 "fmpyfadd,dbl %2,%3,%1,%0"
3718 [(set_attr "type" "fpmuldbl")
3719 (set_attr "length" "4")])
3722 [(set (match_operand:SF 0 "register_operand" "=f")
3723 (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "f")
3724 (match_operand:SF 2 "register_operand" "f"))
3725 (match_operand:SF 3 "register_operand" "f")))]
3726 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3727 "fmpyfadd,sgl %1,%2,%3,%0"
3728 [(set_attr "type" "fpmulsgl")
3729 (set_attr "length" "4")])
3732 [(set (match_operand:SF 0 "register_operand" "=f")
3733 (plus:SF (match_operand:SF 1 "register_operand" "f")
3734 (mult:SF (match_operand:SF 2 "register_operand" "f")
3735 (match_operand:SF 3 "register_operand" "f"))))]
3736 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3737 "fmpyfadd,sgl %2,%3,%1,%0"
3738 [(set_attr "type" "fpmulsgl")
3739 (set_attr "length" "4")])
3741 ; fmpynfadd patterns
3743 [(set (match_operand:DF 0 "register_operand" "=f")
3744 (minus:DF (match_operand:DF 1 "register_operand" "f")
3745 (mult:DF (match_operand:DF 2 "register_operand" "f")
3746 (match_operand:DF 3 "register_operand" "f"))))]
3747 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3748 "fmpynfadd,dbl %2,%3,%1,%0"
3749 [(set_attr "type" "fpmuldbl")
3750 (set_attr "length" "4")])
3753 [(set (match_operand:SF 0 "register_operand" "=f")
3754 (minus:SF (match_operand:SF 1 "register_operand" "f")
3755 (mult:SF (match_operand:SF 2 "register_operand" "f")
3756 (match_operand:SF 3 "register_operand" "f"))))]
3757 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3758 "fmpynfadd,sgl %2,%3,%1,%0"
3759 [(set_attr "type" "fpmulsgl")
3760 (set_attr "length" "4")])
3764 [(set (match_operand:DF 0 "register_operand" "=f")
3765 (neg:DF (abs:DF (match_operand:DF 1 "register_operand" "f"))))]
3766 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3768 [(set_attr "type" "fpalu")
3769 (set_attr "length" "4")])
3772 [(set (match_operand:SF 0 "register_operand" "=f")
3773 (neg:SF (abs:SF (match_operand:SF 1 "register_operand" "f"))))]
3774 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3776 [(set_attr "type" "fpalu")
3777 (set_attr "length" "4")])
3780 ;;- Shift instructions
3782 ;; Optimized special case of shifting.
3785 [(set (match_operand:SI 0 "register_operand" "=r")
3786 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3790 [(set_attr "type" "load")
3791 (set_attr "length" "4")])
3794 [(set (match_operand:SI 0 "register_operand" "=r")
3795 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3799 [(set_attr "type" "load")
3800 (set_attr "length" "4")])
3803 [(set (match_operand:SI 0 "register_operand" "=r")
3804 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3805 (match_operand:SI 3 "shadd_operand" ""))
3806 (match_operand:SI 1 "register_operand" "r")))]
3808 "sh%O3addl %2,%1,%0"
3809 [(set_attr "type" "binary")
3810 (set_attr "length" "4")])
3812 ;; This variant of the above insn can occur if the first operand
3813 ;; is the frame pointer. This is a kludge, but there doesn't
3814 ;; seem to be a way around it. Only recognize it while reloading.
3815 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3816 ;; has constraints allowing a register. I don't know how this works,
3817 ;; but it somehow makes sure that out-of-range constants are placed
3818 ;; in a register which somehow magically is a "const_int_operand".
3819 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3822 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3823 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3824 (match_operand:SI 4 "shadd_operand" ""))
3825 (match_operand:SI 1 "register_operand" "r,r"))
3826 (match_operand:SI 3 "const_int_operand" "r,J")))]
3827 "reload_in_progress"
3829 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3830 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3831 [(set_attr "type" "multi")
3832 (set_attr "length" "8")])
3834 ;; This anonymous pattern and splitter wins because it reduces the latency
3835 ;; of the shadd sequence without increasing the latency of the shift.
3837 ;; We want to make sure and split up the operations for the scheduler since
3838 ;; these instructions can (and should) schedule independently.
3840 ;; It would be clearer if combine used the same operator for both expressions,
3841 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3844 ;; If this pattern is not split before register allocation, then we must expose
3845 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3847 [(set (match_operand:SI 0 "register_operand" "=r")
3848 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3849 (match_operand:SI 3 "shadd_operand" ""))
3850 (match_operand:SI 1 "register_operand" "r")))
3851 (set (match_operand:SI 4 "register_operand" "=&r")
3852 (ashift:SI (match_dup 2)
3853 (match_operand:SI 5 "const_int_operand" "i")))]
3854 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3856 [(set_attr "type" "binary")
3857 (set_attr "length" "8")])
3860 [(set (match_operand:SI 0 "register_operand" "=r")
3861 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3862 (match_operand:SI 3 "shadd_operand" ""))
3863 (match_operand:SI 1 "register_operand" "r")))
3864 (set (match_operand:SI 4 "register_operand" "=&r")
3865 (ashift:SI (match_dup 2)
3866 (match_operand:SI 5 "const_int_operand" "i")))]
3867 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3868 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3869 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3873 (define_expand "ashlsi3"
3874 [(set (match_operand:SI 0 "register_operand" "")
3875 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3876 (match_operand:SI 2 "arith32_operand" "")))]
3880 if (GET_CODE (operands[2]) != CONST_INT)
3882 rtx temp = gen_reg_rtx (SImode);
3883 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3884 if (GET_CODE (operands[1]) == CONST_INT)
3885 emit_insn (gen_zvdep_imm32 (operands[0], operands[1], temp));
3887 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3890 /* Make sure both inputs are not constants,
3891 there are no patterns for that. */
3892 operands[1] = force_reg (SImode, operands[1]);
3896 [(set (match_operand:SI 0 "register_operand" "=r")
3897 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3898 (match_operand:SI 2 "const_int_operand" "n")))]
3900 "zdep %1,%P2,%L2,%0"
3901 [(set_attr "type" "shift")
3902 (set_attr "length" "4")])
3904 ; Match cases of op1 a CONST_INT here that zvdep_imm32 doesn't handle.
3905 ; Doing it like this makes slightly better code since reload can
3906 ; replace a register with a known value in range -16..15 with a
3907 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm32,
3908 ; but since we have no more CONST_OK... characters, that is not
3910 (define_insn "zvdep32"
3911 [(set (match_operand:SI 0 "register_operand" "=r,r")
3912 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3913 (minus:SI (const_int 31)
3914 (match_operand:SI 2 "register_operand" "q,q"))))]
3919 [(set_attr "type" "shift,shift")
3920 (set_attr "length" "4,4")])
3922 (define_insn "zvdep_imm32"
3923 [(set (match_operand:SI 0 "register_operand" "=r")
3924 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3925 (minus:SI (const_int 31)
3926 (match_operand:SI 2 "register_operand" "q"))))]
3930 int x = INTVAL (operands[1]);
3931 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3932 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3933 return \"zvdepi %1,%2,%0\";
3935 [(set_attr "type" "shift")
3936 (set_attr "length" "4")])
3938 (define_insn "vdepi_ior"
3939 [(set (match_operand:SI 0 "register_operand" "=r")
3940 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3941 (minus:SI (const_int 31)
3942 (match_operand:SI 2 "register_operand" "q")))
3943 (match_operand:SI 3 "register_operand" "0")))]
3944 ; accept ...0001...1, can this be generalized?
3945 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3948 int x = INTVAL (operands[1]);
3949 operands[2] = GEN_INT (exact_log2 (x + 1));
3950 return \"vdepi -1,%2,%0\";
3952 [(set_attr "type" "shift")
3953 (set_attr "length" "4")])
3955 (define_insn "vdepi_and"
3956 [(set (match_operand:SI 0 "register_operand" "=r")
3957 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3958 (minus:SI (const_int 31)
3959 (match_operand:SI 2 "register_operand" "q")))
3960 (match_operand:SI 3 "register_operand" "0")))]
3961 ; this can be generalized...!
3962 "INTVAL (operands[1]) == -2"
3965 int x = INTVAL (operands[1]);
3966 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3967 return \"vdepi 0,%2,%0\";
3969 [(set_attr "type" "shift")
3970 (set_attr "length" "4")])
3972 (define_expand "ashrsi3"
3973 [(set (match_operand:SI 0 "register_operand" "")
3974 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3975 (match_operand:SI 2 "arith32_operand" "")))]
3979 if (GET_CODE (operands[2]) != CONST_INT)
3981 rtx temp = gen_reg_rtx (SImode);
3982 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3983 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3989 [(set (match_operand:SI 0 "register_operand" "=r")
3990 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3991 (match_operand:SI 2 "const_int_operand" "n")))]
3993 "extrs %1,%P2,%L2,%0"
3994 [(set_attr "type" "shift")
3995 (set_attr "length" "4")])
3997 (define_insn "vextrs32"
3998 [(set (match_operand:SI 0 "register_operand" "=r")
3999 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4000 (minus:SI (const_int 31)
4001 (match_operand:SI 2 "register_operand" "q"))))]
4004 [(set_attr "type" "shift")
4005 (set_attr "length" "4")])
4007 (define_insn "lshrsi3"
4008 [(set (match_operand:SI 0 "register_operand" "=r,r")
4009 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
4010 (match_operand:SI 2 "arith32_operand" "q,n")))]
4014 extru %1,%P2,%L2,%0"
4015 [(set_attr "type" "shift")
4016 (set_attr "length" "4")])
4018 (define_insn "rotrsi3"
4019 [(set (match_operand:SI 0 "register_operand" "=r,r")
4020 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
4021 (match_operand:SI 2 "arith32_operand" "q,n")))]
4025 if (GET_CODE (operands[2]) == CONST_INT)
4027 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
4028 return \"shd %1,%1,%2,%0\";
4031 return \"vshd %1,%1,%0\";
4033 [(set_attr "type" "shift")
4034 (set_attr "length" "4")])
4036 (define_expand "rotlsi3"
4037 [(set (match_operand:SI 0 "register_operand" "")
4038 (rotate:SI (match_operand:SI 1 "register_operand" "")
4039 (match_operand:SI 2 "arith32_operand" "")))]
4043 if (GET_CODE (operands[2]) != CONST_INT)
4045 rtx temp = gen_reg_rtx (SImode);
4046 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
4047 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
4050 /* Else expand normally. */
4054 [(set (match_operand:SI 0 "register_operand" "=r")
4055 (rotate:SI (match_operand:SI 1 "register_operand" "r")
4056 (match_operand:SI 2 "const_int_operand" "n")))]
4060 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
4061 return \"shd %1,%1,%2,%0\";
4063 [(set_attr "type" "shift")
4064 (set_attr "length" "4")])
4067 [(set (match_operand:SI 0 "register_operand" "=r")
4068 (match_operator:SI 5 "plus_xor_ior_operator"
4069 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
4070 (match_operand:SI 3 "const_int_operand" "n"))
4071 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4072 (match_operand:SI 4 "const_int_operand" "n"))]))]
4073 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4075 [(set_attr "type" "shift")
4076 (set_attr "length" "4")])
4079 [(set (match_operand:SI 0 "register_operand" "=r")
4080 (match_operator:SI 5 "plus_xor_ior_operator"
4081 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4082 (match_operand:SI 4 "const_int_operand" "n"))
4083 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4084 (match_operand:SI 3 "const_int_operand" "n"))]))]
4085 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4087 [(set_attr "type" "shift")
4088 (set_attr "length" "4")])
4091 [(set (match_operand:SI 0 "register_operand" "=r")
4092 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
4093 (match_operand:SI 2 "const_int_operand" ""))
4094 (match_operand:SI 3 "const_int_operand" "")))]
4095 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
4098 int cnt = INTVAL (operands[2]) & 31;
4099 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
4100 operands[2] = GEN_INT (31 - cnt);
4101 return \"zdep %1,%2,%3,%0\";
4103 [(set_attr "type" "shift")
4104 (set_attr "length" "4")])
4106 ;; Unconditional and other jump instructions.
4108 (define_insn "return"
4110 "hppa_can_use_return_insn_p ()"
4112 [(set_attr "type" "branch")
4113 (set_attr "length" "4")])
4115 ;; Use a different pattern for functions which have non-trivial
4116 ;; epilogues so as not to confuse jump and reorg.
4117 (define_insn "return_internal"
4122 [(set_attr "type" "branch")
4123 (set_attr "length" "4")])
4125 (define_expand "prologue"
4128 "hppa_expand_prologue ();DONE;")
4130 (define_expand "epilogue"
4135 /* Try to use the trivial return first. Else use the full
4137 if (hppa_can_use_return_insn_p ())
4138 emit_jump_insn (gen_return ());
4141 hppa_expand_epilogue ();
4142 emit_jump_insn (gen_return_internal ());
4147 ;; Special because we use the value placed in %r2 by the bl instruction
4148 ;; from within its delay slot to set the value for the 2nd parameter to
4150 (define_insn "call_profiler"
4151 [(unspec_volatile [(const_int 0)] 0)
4152 (use (match_operand:SI 0 "const_int_operand" ""))]
4154 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4155 [(set_attr "type" "multi")
4156 (set_attr "length" "8")])
4158 (define_insn "blockage"
4159 [(unspec_volatile [(const_int 2)] 0)]
4162 [(set_attr "length" "0")])
4165 [(set (pc) (label_ref (match_operand 0 "" "")))]
4169 extern int optimize;
4171 if (GET_MODE (insn) == SImode)
4174 /* An unconditional branch which can reach its target. */
4175 if (get_attr_length (insn) != 24
4176 && get_attr_length (insn) != 16)
4179 /* An unconditional branch which can not reach its target.
4181 We need to be able to use %r1 as a scratch register; however,
4182 we can never be sure whether or not it's got a live value in
4183 it. Therefore, we must restore its original value after the
4186 To make matters worse, we don't have a stack slot which we
4187 can always clobber. sp-12/sp-16 shouldn't ever have a live
4188 value during a non-optimizing compilation, so we use those
4189 slots for now. We don't support very long branches when
4190 optimizing -- they should be quite rare when optimizing.
4192 Really the way to go long term is a register scavenger; goto
4193 the target of the jump and find a register which we can use
4194 as a scratch to hold the value in %r1. */
4196 /* We don't know how to register scavenge yet. */
4200 /* First store %r1 into the stack. */
4201 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4203 /* Now load the target address into %r1 and do an indirect jump
4204 to the value specified in %r1. Be careful to generate PIC
4209 xoperands[0] = operands[0];
4210 xoperands[1] = gen_label_rtx ();
4212 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4213 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4214 CODE_LABEL_NUMBER (xoperands[1]));
4215 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4219 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4221 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4222 so we know nothing else can be in the delay slot. */
4223 return \"ldw -16(%%r30),%%r1\";
4225 [(set_attr "type" "uncond_branch")
4226 (set_attr "pa_combine_type" "uncond_branch")
4227 (set (attr "length")
4228 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4229 (if_then_else (lt (abs (minus (match_dup 0)
4230 (plus (pc) (const_int 8))))
4234 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4236 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4241 ;; Subroutines of "casesi".
4242 ;; operand 0 is index
4243 ;; operand 1 is the minimum bound
4244 ;; operand 2 is the maximum bound - minimum bound + 1
4245 ;; operand 3 is CODE_LABEL for the table;
4246 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4248 (define_expand "casesi"
4249 [(match_operand:SI 0 "general_operand" "")
4250 (match_operand:SI 1 "const_int_operand" "")
4251 (match_operand:SI 2 "const_int_operand" "")
4252 (match_operand 3 "" "")
4253 (match_operand 4 "" "")]
4257 if (GET_CODE (operands[0]) != REG)
4258 operands[0] = force_reg (SImode, operands[0]);
4260 if (operands[1] != const0_rtx)
4262 rtx reg = gen_reg_rtx (SImode);
4264 operands[1] = GEN_INT (-INTVAL (operands[1]));
4265 if (!INT_14_BITS (operands[1]))
4266 operands[1] = force_reg (SImode, operands[1]);
4267 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4272 if (!INT_5_BITS (operands[2]))
4273 operands[2] = force_reg (SImode, operands[2]);
4275 emit_insn (gen_cmpsi (operands[0], operands[2]));
4276 emit_jump_insn (gen_bgtu (operands[4]));
4277 if (TARGET_BIG_SWITCH)
4279 rtx temp = gen_reg_rtx (SImode);
4280 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4283 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4287 (define_insn "casesi0"
4289 (mem:SI (plus:SI (pc)
4290 (match_operand:SI 0 "register_operand" "r")))
4291 (label_ref (match_operand 1 "" ""))))]
4294 [(set_attr "type" "multi")
4295 (set_attr "length" "8")])
4297 ;; Need nops for the calls because execution is supposed to continue
4298 ;; past; we don't want to nullify an instruction that we need.
4299 ;;- jump to subroutine
4301 (define_expand "call"
4302 [(parallel [(call (match_operand:SI 0 "" "")
4303 (match_operand 1 "" ""))
4304 (clobber (reg:SI 2))])]
4311 if (TARGET_PORTABLE_RUNTIME)
4312 op = force_reg (SImode, XEXP (operands[0], 0));
4314 op = XEXP (operands[0], 0);
4316 /* Use two different patterns for calls to explicitly named functions
4317 and calls through function pointers. This is necessary as these two
4318 types of calls use different calling conventions, and CSE might try
4319 to change the named call into an indirect call in some cases (using
4320 two patterns keeps CSE from performing this optimization). */
4321 if (GET_CODE (op) == SYMBOL_REF)
4322 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4325 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4326 emit_move_insn (tmpreg, force_reg (word_mode, op));
4327 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4332 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4334 /* After each call we must restore the PIC register, even if it
4335 doesn't appear to be used.
4337 This will set regs_ever_live for the callee saved register we
4338 stored the PIC register in. */
4339 emit_move_insn (pic_offset_table_rtx,
4340 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4341 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4343 /* Gross. We have to keep the scheduler from moving the restore
4344 of the PIC register away from the call. SCHED_GROUP_P is
4345 supposed to do this, but for some reason the compiler will
4346 go into an infinite loop when we use that.
4348 This method (blockage insn) may make worse code (then again
4349 it may not since calls are nearly blockages anyway), but at
4350 least it should work. */
4351 emit_insn (gen_blockage ());
4356 (define_insn "call_internal_symref"
4357 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4358 (match_operand 1 "" "i"))
4359 (clobber (reg:SI 2))
4360 (use (const_int 0))]
4361 "! TARGET_PORTABLE_RUNTIME"
4364 output_arg_descriptor (insn);
4365 return output_call (insn, operands[0]);
4367 [(set_attr "type" "call")
4368 (set (attr "length")
4369 ;; If we're sure that we can either reach the target or that the
4370 ;; linker can use a long-branch stub, then the length is 4 bytes.
4372 ;; For long-calls the length will be either 52 bytes (non-pic)
4373 ;; or 68 bytes (pic). */
4374 ;; Else we have to use a long-call;
4375 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4378 (if_then_else (eq (symbol_ref "flag_pic")
4383 (define_insn "call_internal_reg"
4384 [(call (mem:SI (reg:SI 22))
4385 (match_operand 0 "" "i"))
4386 (clobber (reg:SI 2))
4387 (use (const_int 1))]
4393 /* First the special case for kernels, level 0 systems, etc. */
4394 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4395 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4397 /* Now the normal case -- we can reach $$dyncall directly or
4398 we're sure that we can get there via a long-branch stub.
4400 No need to check target flags as the length uniquely identifies
4401 the remaining cases. */
4402 if (get_attr_length (insn) == 8)
4403 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4405 /* Long millicode call, but we are not generating PIC or portable runtime
4407 if (get_attr_length (insn) == 12)
4408 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4410 /* Long millicode call for portable runtime. */
4411 if (get_attr_length (insn) == 20)
4412 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4414 /* If we're generating PIC code. */
4415 xoperands[0] = operands[0];
4416 xoperands[1] = gen_label_rtx ();
4417 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4418 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4419 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4420 CODE_LABEL_NUMBER (xoperands[1]));
4421 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4422 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4423 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4426 [(set_attr "type" "dyncall")
4427 (set (attr "length")
4429 ;; First NO_SPACE_REGS
4430 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4434 ;; Target (or stub) within reach
4435 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4437 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4441 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4442 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4444 (eq (symbol_ref "flag_pic")
4448 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4452 ;; Out of range PIC case
4455 (define_expand "call_value"
4456 [(parallel [(set (match_operand 0 "" "")
4457 (call (match_operand:SI 1 "" "")
4458 (match_operand 2 "" "")))
4459 (clobber (reg:SI 2))])]
4466 if (TARGET_PORTABLE_RUNTIME)
4467 op = force_reg (word_mode, XEXP (operands[1], 0));
4469 op = XEXP (operands[1], 0);
4471 /* Use two different patterns for calls to explicitly named functions
4472 and calls through function pointers. This is necessary as these two
4473 types of calls use different calling conventions, and CSE might try
4474 to change the named call into an indirect call in some cases (using
4475 two patterns keeps CSE from performing this optimization). */
4476 if (GET_CODE (op) == SYMBOL_REF)
4477 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4482 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4483 emit_move_insn (tmpreg, force_reg (word_mode, op));
4484 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4489 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4491 /* After each call we must restore the PIC register, even if it
4492 doesn't appear to be used.
4494 This will set regs_ever_live for the callee saved register we
4495 stored the PIC register in. */
4496 emit_move_insn (pic_offset_table_rtx,
4497 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4498 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4500 /* Gross. We have to keep the scheduler from moving the restore
4501 of the PIC register away from the call. SCHED_GROUP_P is
4502 supposed to do this, but for some reason the compiler will
4503 go into an infinite loop when we use that.
4505 This method (blockage insn) may make worse code (then again
4506 it may not since calls are nearly blockages anyway), but at
4507 least it should work. */
4508 emit_insn (gen_blockage ());
4513 (define_insn "call_value_internal_symref"
4514 [(set (match_operand 0 "" "=rf")
4515 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4516 (match_operand 2 "" "i")))
4517 (clobber (reg:SI 2))
4518 (use (const_int 0))]
4519 ;;- Don't use operand 1 for most machines.
4520 "! TARGET_PORTABLE_RUNTIME"
4523 output_arg_descriptor (insn);
4524 return output_call (insn, operands[1]);
4526 [(set_attr "type" "call")
4527 (set (attr "length")
4528 ;; If we're sure that we can either reach the target or that the
4529 ;; linker can use a long-branch stub, then the length is 4 bytes.
4531 ;; For long-calls the length will be either 52 bytes (non-pic)
4532 ;; or 68 bytes (pic). */
4533 ;; Else we have to use a long-call;
4534 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4537 (if_then_else (eq (symbol_ref "flag_pic")
4542 (define_insn "call_value_internal_reg"
4543 [(set (match_operand 0 "" "=rf")
4544 (call (mem:SI (reg:SI 22))
4545 (match_operand 1 "" "i")))
4546 (clobber (reg:SI 2))
4547 (use (const_int 1))]
4553 /* First the special case for kernels, level 0 systems, etc. */
4554 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4555 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4557 /* Now the normal case -- we can reach $$dyncall directly or
4558 we're sure that we can get there via a long-branch stub.
4560 No need to check target flags as the length uniquely identifies
4561 the remaining cases. */
4562 if (get_attr_length (insn) == 8)
4563 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4565 /* Long millicode call, but we are not generating PIC or portable runtime
4567 if (get_attr_length (insn) == 12)
4568 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4570 /* Long millicode call for portable runtime. */
4571 if (get_attr_length (insn) == 20)
4572 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4574 /* If we're generating PIC code. */
4575 xoperands[0] = operands[1];
4576 xoperands[1] = gen_label_rtx ();
4577 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4578 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4579 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4580 CODE_LABEL_NUMBER (xoperands[1]));
4581 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4582 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4583 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4586 [(set_attr "type" "dyncall")
4587 (set (attr "length")
4589 ;; First NO_SPACE_REGS
4590 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4594 ;; Target (or stub) within reach
4595 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4597 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4601 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4602 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4604 (eq (symbol_ref "flag_pic")
4608 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4612 ;; Out of range PIC case
4615 ;; Call subroutine returning any type.
4617 (define_expand "untyped_call"
4618 [(parallel [(call (match_operand 0 "" "")
4620 (match_operand 1 "" "")
4621 (match_operand 2 "" "")])]
4627 emit_call_insn (gen_call (operands[0], const0_rtx));
4629 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4631 rtx set = XVECEXP (operands[2], 0, i);
4632 emit_move_insn (SET_DEST (set), SET_SRC (set));
4635 /* The optimizer does not know that the call sets the function value
4636 registers we stored in the result block. We avoid problems by
4637 claiming that all hard registers are used and clobbered at this
4639 emit_insn (gen_blockage ());
4647 [(set_attr "type" "move")
4648 (set_attr "length" "4")])
4650 ;; These are just placeholders so we know where branch tables
4652 (define_insn "begin_brtab"
4657 /* Only GAS actually supports this pseudo-op. */
4659 return \".begin_brtab\";
4663 [(set_attr "type" "move")
4664 (set_attr "length" "0")])
4666 (define_insn "end_brtab"
4671 /* Only GAS actually supports this pseudo-op. */
4673 return \".end_brtab\";
4677 [(set_attr "type" "move")
4678 (set_attr "length" "0")])
4680 ;;; Hope this is only within a function...
4681 (define_insn "indirect_jump"
4682 [(set (pc) (match_operand 0 "register_operand" "r"))]
4683 "GET_MODE (operands[0]) == word_mode"
4685 [(set_attr "type" "branch")
4686 (set_attr "length" "4")])
4688 (define_insn "extzv"
4689 [(set (match_operand:SI 0 "register_operand" "=r")
4690 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4691 (match_operand:SI 2 "uint5_operand" "")
4692 (match_operand:SI 3 "uint5_operand" "")))]
4694 "extru %1,%3+%2-1,%2,%0"
4695 [(set_attr "type" "shift")
4696 (set_attr "length" "4")])
4699 [(set (match_operand:SI 0 "register_operand" "=r")
4700 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4702 (match_operand:SI 3 "register_operand" "q")))]
4705 [(set_attr "type" "shift")
4706 (set_attr "length" "4")])
4709 [(set (match_operand:SI 0 "register_operand" "=r")
4710 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4711 (match_operand:SI 2 "uint5_operand" "")
4712 (match_operand:SI 3 "uint5_operand" "")))]
4714 "extrs %1,%3+%2-1,%2,%0"
4715 [(set_attr "type" "shift")
4716 (set_attr "length" "4")])
4719 [(set (match_operand:SI 0 "register_operand" "=r")
4720 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4722 (match_operand:SI 3 "register_operand" "q")))]
4725 [(set_attr "type" "shift")
4726 (set_attr "length" "4")])
4729 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4730 (match_operand:SI 1 "uint5_operand" "")
4731 (match_operand:SI 2 "uint5_operand" ""))
4732 (match_operand:SI 3 "arith5_operand" "r,L"))]
4735 dep %3,%2+%1-1,%1,%0
4736 depi %3,%2+%1-1,%1,%0"
4737 [(set_attr "type" "shift,shift")
4738 (set_attr "length" "4,4")])
4740 ;; Optimize insertion of const_int values of type 1...1xxxx.
4742 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4743 (match_operand:SI 1 "uint5_operand" "")
4744 (match_operand:SI 2 "uint5_operand" ""))
4745 (match_operand:SI 3 "const_int_operand" ""))]
4746 "(INTVAL (operands[3]) & 0x10) != 0 &&
4747 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4750 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4751 return \"depi %3,%2+%1-1,%1,%0\";
4753 [(set_attr "type" "shift")
4754 (set_attr "length" "4")])
4756 ;; This insn is used for some loop tests, typically loops reversed when
4757 ;; strength reduction is used. It is actually created when the instruction
4758 ;; combination phase combines the special loop test. Since this insn
4759 ;; is both a jump insn and has an output, it must deal with its own
4760 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4761 ;; to not choose the register alternatives in the event a reload is needed.
4762 (define_insn "decrement_and_branch_until_zero"
4765 (match_operator 2 "comparison_operator"
4766 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4767 (match_operand:SI 1 "int5_operand" "L,L,L"))
4769 (label_ref (match_operand 3 "" ""))
4772 (plus:SI (match_dup 0) (match_dup 1)))
4773 (clobber (match_scratch:SI 4 "=X,r,r"))]
4775 "* return output_dbra (operands, insn, which_alternative); "
4776 ;; Do not expect to understand this the first time through.
4777 [(set_attr "type" "cbranch,multi,multi")
4778 (set (attr "length")
4779 (if_then_else (eq_attr "alternative" "0")
4780 ;; Loop counter in register case
4781 ;; Short branch has length of 4
4782 ;; Long branch has length of 8
4783 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4788 ;; Loop counter in FP reg case.
4789 ;; Extra goo to deal with additional reload insns.
4790 (if_then_else (eq_attr "alternative" "1")
4791 (if_then_else (lt (match_dup 3) (pc))
4793 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4798 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4802 ;; Loop counter in memory case.
4803 ;; Extra goo to deal with additional reload insns.
4804 (if_then_else (lt (match_dup 3) (pc))
4806 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4811 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4814 (const_int 16))))))])
4819 (match_operator 2 "movb_comparison_operator"
4820 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4821 (label_ref (match_operand 3 "" ""))
4823 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4826 "* return output_movb (operands, insn, which_alternative, 0); "
4827 ;; Do not expect to understand this the first time through.
4828 [(set_attr "type" "cbranch,multi,multi,multi")
4829 (set (attr "length")
4830 (if_then_else (eq_attr "alternative" "0")
4831 ;; Loop counter in register case
4832 ;; Short branch has length of 4
4833 ;; Long branch has length of 8
4834 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4839 ;; Loop counter in FP reg case.
4840 ;; Extra goo to deal with additional reload insns.
4841 (if_then_else (eq_attr "alternative" "1")
4842 (if_then_else (lt (match_dup 3) (pc))
4844 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4849 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4853 ;; Loop counter in memory or sar case.
4854 ;; Extra goo to deal with additional reload insns.
4856 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4859 (const_int 12)))))])
4861 ;; Handle negated branch.
4865 (match_operator 2 "movb_comparison_operator"
4866 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4868 (label_ref (match_operand 3 "" ""))))
4869 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4872 "* return output_movb (operands, insn, which_alternative, 1); "
4873 ;; Do not expect to understand this the first time through.
4874 [(set_attr "type" "cbranch,multi,multi,multi")
4875 (set (attr "length")
4876 (if_then_else (eq_attr "alternative" "0")
4877 ;; Loop counter in register case
4878 ;; Short branch has length of 4
4879 ;; Long branch has length of 8
4880 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4885 ;; Loop counter in FP reg case.
4886 ;; Extra goo to deal with additional reload insns.
4887 (if_then_else (eq_attr "alternative" "1")
4888 (if_then_else (lt (match_dup 3) (pc))
4890 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4895 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4899 ;; Loop counter in memory or SAR case.
4900 ;; Extra goo to deal with additional reload insns.
4902 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4905 (const_int 12)))))])
4907 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4908 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4910 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4913 [(set (pc) (label_ref (match_operand 3 "" "" )))
4914 (set (match_operand:SI 0 "register_operand" "=r")
4915 (plus:SI (match_operand:SI 1 "register_operand" "r")
4916 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4917 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4920 return output_parallel_addb (operands, get_attr_length (insn));
4922 [(set_attr "type" "parallel_branch")
4923 (set (attr "length")
4924 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4930 [(set (pc) (label_ref (match_operand 2 "" "" )))
4931 (set (match_operand:SF 0 "register_operand" "=r")
4932 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4936 return output_parallel_movb (operands, get_attr_length (insn));
4938 [(set_attr "type" "parallel_branch")
4939 (set (attr "length")
4940 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4946 [(set (pc) (label_ref (match_operand 2 "" "" )))
4947 (set (match_operand:SI 0 "register_operand" "=r")
4948 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4952 return output_parallel_movb (operands, get_attr_length (insn));
4954 [(set_attr "type" "parallel_branch")
4955 (set (attr "length")
4956 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4962 [(set (pc) (label_ref (match_operand 2 "" "" )))
4963 (set (match_operand:HI 0 "register_operand" "=r")
4964 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4968 return output_parallel_movb (operands, get_attr_length (insn));
4970 [(set_attr "type" "parallel_branch")
4971 (set (attr "length")
4972 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4978 [(set (pc) (label_ref (match_operand 2 "" "" )))
4979 (set (match_operand:QI 0 "register_operand" "=r")
4980 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4984 return output_parallel_movb (operands, get_attr_length (insn));
4986 [(set_attr "type" "parallel_branch")
4987 (set (attr "length")
4988 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4994 [(set (match_operand 0 "register_operand" "=f")
4995 (mult (match_operand 1 "register_operand" "f")
4996 (match_operand 2 "register_operand" "f")))
4997 (set (match_operand 3 "register_operand" "+f")
4998 (plus (match_operand 4 "register_operand" "f")
4999 (match_operand 5 "register_operand" "f")))]
5000 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5001 && reload_completed && fmpyaddoperands (operands)"
5004 if (GET_MODE (operands[0]) == DFmode)
5006 if (rtx_equal_p (operands[3], operands[5]))
5007 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5009 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5013 if (rtx_equal_p (operands[3], operands[5]))
5014 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5016 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5019 [(set_attr "type" "fpalu")
5020 (set_attr "length" "4")])
5023 [(set (match_operand 3 "register_operand" "+f")
5024 (plus (match_operand 4 "register_operand" "f")
5025 (match_operand 5 "register_operand" "f")))
5026 (set (match_operand 0 "register_operand" "=f")
5027 (mult (match_operand 1 "register_operand" "f")
5028 (match_operand 2 "register_operand" "f")))]
5029 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5030 && reload_completed && fmpyaddoperands (operands)"
5033 if (GET_MODE (operands[0]) == DFmode)
5035 if (rtx_equal_p (operands[3], operands[5]))
5036 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5038 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5042 if (rtx_equal_p (operands[3], operands[5]))
5043 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5045 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5048 [(set_attr "type" "fpalu")
5049 (set_attr "length" "4")])
5052 [(set (match_operand 0 "register_operand" "=f")
5053 (mult (match_operand 1 "register_operand" "f")
5054 (match_operand 2 "register_operand" "f")))
5055 (set (match_operand 3 "register_operand" "+f")
5056 (minus (match_operand 4 "register_operand" "f")
5057 (match_operand 5 "register_operand" "f")))]
5058 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5059 && reload_completed && fmpysuboperands (operands)"
5062 if (GET_MODE (operands[0]) == DFmode)
5063 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5065 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5067 [(set_attr "type" "fpalu")
5068 (set_attr "length" "4")])
5071 [(set (match_operand 3 "register_operand" "+f")
5072 (minus (match_operand 4 "register_operand" "f")
5073 (match_operand 5 "register_operand" "f")))
5074 (set (match_operand 0 "register_operand" "=f")
5075 (mult (match_operand 1 "register_operand" "f")
5076 (match_operand 2 "register_operand" "f")))]
5077 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5078 && reload_completed && fmpysuboperands (operands)"
5081 if (GET_MODE (operands[0]) == DFmode)
5082 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5084 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5086 [(set_attr "type" "fpalu")
5087 (set_attr "length" "4")])
5089 ;; Clean up turds left by reload.
5091 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
5092 (match_operand 1 "register_operand" "fr"))
5093 (set (match_operand 2 "register_operand" "fr")
5095 "! TARGET_SOFT_FLOAT
5096 && GET_CODE (operands[0]) == MEM
5097 && ! MEM_VOLATILE_P (operands[0])
5098 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5099 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5100 && GET_MODE (operands[0]) == DFmode
5101 && GET_CODE (operands[1]) == REG
5102 && GET_CODE (operands[2]) == REG
5103 && ! side_effects_p (XEXP (operands[0], 0))
5104 && REGNO_REG_CLASS (REGNO (operands[1]))
5105 == REGNO_REG_CLASS (REGNO (operands[2]))"
5110 if (FP_REG_P (operands[1]))
5111 output_asm_insn (output_fp_move_double (operands), operands);
5113 output_asm_insn (output_move_double (operands), operands);
5115 if (rtx_equal_p (operands[1], operands[2]))
5118 xoperands[0] = operands[2];
5119 xoperands[1] = operands[1];
5121 if (FP_REG_P (xoperands[1]))
5122 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5124 output_asm_insn (output_move_double (xoperands), xoperands);
5130 [(set (match_operand 0 "register_operand" "fr")
5131 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
5132 (set (match_operand 2 "register_operand" "fr")
5134 "! TARGET_SOFT_FLOAT
5135 && GET_CODE (operands[1]) == MEM
5136 && ! MEM_VOLATILE_P (operands[1])
5137 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5138 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5139 && GET_MODE (operands[0]) == DFmode
5140 && GET_CODE (operands[0]) == REG
5141 && GET_CODE (operands[2]) == REG
5142 && ! side_effects_p (XEXP (operands[1], 0))
5143 && REGNO_REG_CLASS (REGNO (operands[0]))
5144 == REGNO_REG_CLASS (REGNO (operands[2]))"
5149 if (FP_REG_P (operands[0]))
5150 output_asm_insn (output_fp_move_double (operands), operands);
5152 output_asm_insn (output_move_double (operands), operands);
5154 xoperands[0] = operands[2];
5155 xoperands[1] = operands[0];
5157 if (FP_REG_P (xoperands[1]))
5158 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5160 output_asm_insn (output_move_double (xoperands), xoperands);
5165 ;; Flush the I and D cache line found at the address in operand 0.
5166 ;; This is used by the trampoline code for nested functions.
5167 ;; So long as the trampoline itself is less than 32 bytes this
5170 (define_insn "dcacheflush"
5171 [(unspec_volatile [(const_int 1)] 0)
5172 (use (mem:SI (match_operand 0 "register_operand" "r")))
5173 (use (mem:SI (match_operand 1 "register_operand" "r")))]
5175 "fdc 0(%0)\;fdc 0(%1)\;sync"
5176 [(set_attr "type" "multi")
5177 (set_attr "length" "12")])
5179 (define_insn "icacheflush"
5180 [(unspec_volatile [(const_int 2)] 0)
5181 (use (mem:SI (match_operand 0 "register_operand" "r")))
5182 (use (mem:SI (match_operand 1 "register_operand" "r")))
5183 (use (match_operand 2 "register_operand" "r"))
5184 (clobber (match_operand 3 "register_operand" "=&r"))
5185 (clobber (match_operand 4 "register_operand" "=&r"))]
5187 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5188 [(set_attr "type" "multi")
5189 (set_attr "length" "52")])
5191 ;; An out-of-line prologue.
5192 (define_insn "outline_prologue_call"
5193 [(unspec_volatile [(const_int 0)] 0)
5194 (clobber (reg:SI 31))
5195 (clobber (reg:SI 22))
5196 (clobber (reg:SI 21))
5197 (clobber (reg:SI 20))
5198 (clobber (reg:SI 19))
5199 (clobber (reg:SI 1))]
5203 extern int frame_pointer_needed;
5205 /* We need two different versions depending on whether or not we
5206 need a frame pointer. Also note that we return to the instruction
5207 immediately after the branch rather than two instructions after the
5208 break as normally is the case. */
5209 if (frame_pointer_needed)
5211 /* Must import the magic millicode routine(s). */
5212 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5214 if (TARGET_PORTABLE_RUNTIME)
5216 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5217 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5221 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5225 /* Must import the magic millicode routine(s). */
5226 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5228 if (TARGET_PORTABLE_RUNTIME)
5230 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5231 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5234 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5238 [(set_attr "type" "multi")
5239 (set_attr "length" "8")])
5241 ;; An out-of-line epilogue.
5242 (define_insn "outline_epilogue_call"
5243 [(unspec_volatile [(const_int 1)] 0)
5246 (clobber (reg:SI 31))
5247 (clobber (reg:SI 22))
5248 (clobber (reg:SI 21))
5249 (clobber (reg:SI 20))
5250 (clobber (reg:SI 19))
5251 (clobber (reg:SI 2))
5252 (clobber (reg:SI 1))]
5256 extern int frame_pointer_needed;
5258 /* We need two different versions depending on whether or not we
5259 need a frame pointer. Also note that we return to the instruction
5260 immediately after the branch rather than two instructions after the
5261 break as normally is the case. */
5262 if (frame_pointer_needed)
5264 /* Must import the magic millicode routine. */
5265 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5267 /* The out-of-line prologue will make sure we return to the right
5269 if (TARGET_PORTABLE_RUNTIME)
5271 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5272 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5276 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5280 /* Must import the magic millicode routine. */
5281 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5283 /* The out-of-line prologue will make sure we return to the right
5285 if (TARGET_PORTABLE_RUNTIME)
5287 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5288 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5291 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5295 [(set_attr "type" "multi")
5296 (set_attr "length" "8")])
5298 ;; Given a function pointer, canonicalize it so it can be
5299 ;; reliably compared to another function pointer. */
5300 (define_expand "canonicalize_funcptr_for_compare"
5301 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5302 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5303 (clobber (match_dup 2))
5304 (clobber (reg:SI 26))
5305 (clobber (reg:SI 22))
5306 (clobber (reg:SI 31))])
5307 (set (match_operand:SI 0 "register_operand" "")
5309 "! TARGET_PORTABLE_RUNTIME"
5312 operands[2] = gen_reg_rtx (SImode);
5313 if (GET_CODE (operands[1]) != REG)
5315 rtx tmp = gen_reg_rtx (Pmode);
5316 emit_move_insn (tmp, operands[1]);
5322 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5323 (clobber (match_operand:SI 0 "register_operand" "=a"))
5324 (clobber (reg:SI 26))
5325 (clobber (reg:SI 22))
5326 (clobber (reg:SI 31))]
5330 /* Must import the magic millicode routine. */
5331 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5333 /* This is absolutely amazing.
5335 First, copy our input parameter into %r29 just in case we don't
5336 need to call $$sh_func_adrs. */
5337 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5339 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5340 we use %r26 unchanged. */
5341 if (get_attr_length (insn) == 32)
5342 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5343 else if (get_attr_length (insn) == 40)
5344 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5345 else if (get_attr_length (insn) == 44)
5346 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5348 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5350 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5351 4096, then we use %r26 unchanged. */
5352 if (get_attr_length (insn) == 32)
5353 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5354 else if (get_attr_length (insn) == 40)
5355 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5356 else if (get_attr_length (insn) == 44)
5357 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5359 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5361 /* Else call $$sh_func_adrs to extract the function's real add24. */
5362 return output_millicode_call (insn,
5363 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5365 [(set_attr "type" "multi")
5366 (set (attr "length")
5368 ;; Target (or stub) within reach
5369 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5371 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5376 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5380 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5381 ;; same as NO_SPACE_REGS code
5382 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5384 (eq (symbol_ref "flag_pic")
5389 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5393 ;; Out of range and PIC
5396 ;; On the PA, the PIC register is call clobbered, so it must
5397 ;; be saved & restored around calls by the caller. If the call
5398 ;; doesn't return normally (nonlocal goto, or an exception is
5399 ;; thrown), then the code at the exception handler label must
5400 ;; restore the PIC register.
5401 (define_expand "exception_receiver"
5403 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5406 /* Load the PIC register from the stack slot (in our caller's
5408 emit_move_insn (pic_offset_table_rtx,
5409 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5410 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5411 emit_insn (gen_blockage ());