1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of bytes).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
371 ;; It is not necessary to define the shifter and integer alu units.
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
380 (define_function_unit "pa8000alu" 2 1
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
392 (define_function_unit "pa8000fdiv" 2 1
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
397 (define_function_unit "pa8000fdiv" 2 1
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
411 (define_expand "cmpsi"
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
424 (define_expand "cmpsf"
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
437 (define_expand "cmpdf"
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
473 [(set (match_operand:SI 0 "register_operand" "")
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
489 [(set (match_operand:SI 0 "register_operand" "")
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
503 [(set (match_operand:SI 0 "register_operand" "")
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
517 [(set (match_operand:SI 0 "register_operand" "")
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
531 [(set (match_operand:SI 0 "register_operand" "")
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
545 [(set (match_operand:SI 0 "register_operand" "")
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
565 if (hppa_branch_type != CMP_SI)
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
578 if (hppa_branch_type != CMP_SI)
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
591 if (hppa_branch_type != CMP_SI)
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
604 if (hppa_branch_type != CMP_SI)
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
840 ;;; Experimental conditional move patterns
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
845 (match_operator 1 "comparison_operator"
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
853 enum rtx_code code = GET_CODE (operands[1]);
855 if (hppa_branch_type != CMP_SI)
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
906 ;; Conditional Branches
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
916 if (hppa_branch_type != CMP_SI)
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
936 if (hppa_branch_type != CMP_SI)
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
954 if (hppa_branch_type != CMP_SI)
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
972 if (hppa_branch_type != CMP_SI)
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
990 if (hppa_branch_type != CMP_SI)
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
1000 (define_expand "ble"
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1008 if (hppa_branch_type != CMP_SI)
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1018 (define_expand "bgtu"
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1026 if (hppa_branch_type != CMP_SI)
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1032 (define_expand "bltu"
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1040 if (hppa_branch_type != CMP_SI)
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1046 (define_expand "bgeu"
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1054 if (hppa_branch_type != CMP_SI)
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1060 (define_expand "bleu"
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1068 if (hppa_branch_type != CMP_SI)
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1074 ;; Match the branch patterns.
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1105 ;; Match the negated branch.
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1114 (label_ref (match_operand 0 "" ""))))]
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1133 ;; Branch on Bit patterns.
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1139 (match_operand:SI 1 "uint5_operand" ""))
1141 (label_ref (match_operand 2 "" ""))
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1161 (match_operand:SI 1 "uint5_operand" ""))
1164 (label_ref (match_operand 2 "" ""))))]
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1183 (match_operand:SI 1 "uint5_operand" ""))
1185 (label_ref (match_operand 2 "" ""))
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1205 (match_operand:SI 1 "uint5_operand" ""))
1208 (label_ref (match_operand 2 "" ""))))]
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1222 ;; Branch on Variable Bit patterns.
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1228 (match_operand:SI 1 "register_operand" "q"))
1230 (label_ref (match_operand 2 "" ""))
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1250 (match_operand:SI 1 "register_operand" "q"))
1253 (label_ref (match_operand 2 "" ""))))]
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1272 (match_operand:SI 1 "register_operand" "q"))
1274 (label_ref (match_operand 2 "" ""))
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1294 (match_operand:SI 1 "register_operand" "q"))
1297 (label_ref (match_operand 2 "" ""))))]
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1311 ;; Floating point branches
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1316 "! TARGET_SOFT_FLOAT"
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1322 return \"ftest\;b%* %0\";
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1342 ;; Move instructions
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1350 if (emit_move_sequence (operands, SImode, 0))
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1387 ;;; pic symbol references
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1455 return \"ldwx %2(%1),%0\";
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1474 return \"ldwx %1(%2),%0\";
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1479 ;; Load or store with base-register modification.
1481 (define_expand "pre_load"
1482 [(parallel [(set (match_operand:SI 0 "register_operand" "")
1483 (mem (plus (match_operand 1 "register_operand" "")
1484 (match_operand 2 "pre_cint_operand" ""))))
1486 (plus (match_dup 1) (match_dup 2)))])]
1490 emit_insn (gen_pre_ldw (operands[0], operands[1], operands[2]));
1494 (define_insn "pre_ldw"
1495 [(set (match_operand:SI 0 "register_operand" "=r")
1496 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1497 (match_operand:SI 2 "pre_cint_operand" ""))))
1499 (plus:SI (match_dup 1) (match_dup 2)))]
1503 if (INTVAL (operands[2]) < 0)
1504 return \"ldwm %2(%1),%0\";
1505 return \"ldws,mb %2(%1),%0\";
1507 [(set_attr "type" "load")
1508 (set_attr "length" "4")])
1511 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1512 (match_operand:SI 1 "pre_cint_operand" "")))
1513 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1515 (plus:SI (match_dup 0) (match_dup 1)))]
1519 if (INTVAL (operands[1]) < 0)
1520 return \"stwm %r2,%1(%0)\";
1521 return \"stws,mb %r2,%1(%0)\";
1523 [(set_attr "type" "store")
1524 (set_attr "length" "4")])
1527 [(set (match_operand:SI 0 "register_operand" "=r")
1528 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1530 (plus:SI (match_dup 1)
1531 (match_operand:SI 2 "post_cint_operand" "")))]
1535 if (INTVAL (operands[2]) > 0)
1536 return \"ldwm %2(%1),%0\";
1537 return \"ldws,ma %2(%1),%0\";
1539 [(set_attr "type" "load")
1540 (set_attr "length" "4")])
1542 (define_expand "post_store"
1543 [(parallel [(set (mem (match_operand 0 "register_operand" ""))
1544 (match_operand 1 "reg_or_0_operand" ""))
1547 (match_operand 2 "post_cint_operand" "")))])]
1551 emit_insn (gen_post_stw (operands[0], operands[1], operands[2]));
1555 (define_insn "post_stw"
1556 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1557 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1559 (plus:SI (match_dup 0)
1560 (match_operand:SI 2 "post_cint_operand" "")))]
1564 if (INTVAL (operands[2]) > 0)
1565 return \"stwm %r1,%2(%0)\";
1566 return \"stws,ma %r1,%2(%0)\";
1568 [(set_attr "type" "store")
1569 (set_attr "length" "4")])
1571 ;; For loading the address of a label while generating PIC code.
1572 ;; Note since this pattern can be created at reload time (via movsi), all
1573 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1575 [(set (match_operand 0 "register_operand" "=a")
1576 (match_operand 1 "pic_label_operand" ""))]
1580 rtx label_rtx = gen_label_rtx ();
1582 extern FILE *asm_out_file;
1584 xoperands[0] = operands[0];
1585 xoperands[1] = operands[1];
1586 xoperands[2] = label_rtx;
1587 output_asm_insn (\"bl .+8,%0\", xoperands);
1588 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1590 CODE_LABEL_NUMBER (label_rtx));
1592 /* If we're trying to load the address of a label that happens to be
1593 close, then we can use a shorter sequence. */
1594 if (GET_CODE (operands[1]) == LABEL_REF
1596 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1597 - insn_addresses[INSN_UID (insn)]) < 8100)
1599 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1600 always non-negative. */
1601 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1605 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1606 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1610 [(set_attr "type" "multi")
1611 (set_attr "length" "16")]) ; 12 or 16
1614 [(set (match_operand:SI 0 "register_operand" "=a")
1615 (plus:SI (match_operand:SI 1 "register_operand" "r")
1616 (high:SI (match_operand 2 "" ""))))]
1617 "symbolic_operand (operands[2], Pmode)
1618 && ! function_label_operand (operands[2])
1621 [(set_attr "type" "binary")
1622 (set_attr "length" "4")])
1624 ; We need this to make sure CSE doesn't simplify a memory load with a
1625 ; symbolic address, whose content it think it knows. For PIC, what CSE
1626 ; think is the real value will be the address of that value.
1628 [(set (match_operand:SI 0 "register_operand" "=r")
1630 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1632 [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1638 return \"ldw RT'%G2(%1),%0\";
1640 [(set_attr "type" "load")
1641 (set_attr "length" "4")])
1643 ;; Always use addil rather than ldil;add sequences. This allows the
1644 ;; HP linker to eliminate the dp relocation if the symbolic operand
1645 ;; lives in the TEXT space.
1647 [(set (match_operand:SI 0 "register_operand" "=a")
1648 (high:SI (match_operand 1 "" "")))]
1649 "symbolic_operand (operands[1], Pmode)
1650 && ! function_label_operand (operands[1])
1651 && ! read_only_operand (operands[1])
1655 if (TARGET_LONG_LOAD_STORE)
1656 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1658 return \"addil LR'%H1,%%r27\";
1660 [(set_attr "type" "binary")
1661 (set (attr "length")
1662 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1667 ;; This is for use in the prologue/epilogue code. We need it
1668 ;; to add large constants to a stack pointer or frame pointer.
1669 ;; Because of the additional %r1 pressure, we probably do not
1670 ;; want to use this in general code, so make it available
1671 ;; only after reload.
1673 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1674 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1675 (high:SI (match_operand 2 "const_int_operand" ""))))]
1679 ldil L'%G2,%0\;addl %0,%1,%0"
1680 [(set_attr "type" "binary,binary")
1681 (set_attr "length" "4,8")])
1684 [(set (match_operand:SI 0 "register_operand" "=r")
1685 (high:SI (match_operand 1 "" "")))]
1686 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1687 && !is_function_label_plus_const (operands[1])"
1690 if (symbolic_operand (operands[1], Pmode))
1691 return \"ldil LR'%H1,%0\";
1693 return \"ldil L'%G1,%0\";
1695 [(set_attr "type" "move")
1696 (set_attr "length" "4")])
1699 [(set (match_operand:SI 0 "register_operand" "=r")
1700 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1701 (match_operand:SI 2 "immediate_operand" "i")))]
1702 "!is_function_label_plus_const (operands[2])"
1705 if (flag_pic && symbolic_operand (operands[2], Pmode))
1707 else if (symbolic_operand (operands[2], Pmode))
1708 return \"ldo RR'%G2(%1),%0\";
1710 return \"ldo R'%G2(%1),%0\";
1712 [(set_attr "type" "move")
1713 (set_attr "length" "4")])
1715 ;; Now that a symbolic_address plus a constant is broken up early
1716 ;; in the compilation phase (for better CSE) we need a special
1717 ;; combiner pattern to load the symbolic address plus the constant
1718 ;; in only 2 instructions. (For cases where the symbolic address
1719 ;; was not a common subexpression.)
1721 [(set (match_operand:SI 0 "register_operand" "")
1722 (match_operand:SI 1 "symbolic_operand" ""))
1723 (clobber (match_operand:SI 2 "register_operand" ""))]
1724 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1725 [(set (match_dup 2) (high:SI (match_dup 1)))
1726 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1729 ;; hppa_legitimize_address goes to a great deal of trouble to
1730 ;; create addresses which use indexing. In some cases, this
1731 ;; is a lose because there isn't any store instructions which
1732 ;; allow indexed addresses (with integer register source).
1734 ;; These define_splits try to turn a 3 insn store into
1735 ;; a 2 insn store with some creative RTL rewriting.
1737 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1738 (match_operand:SI 1 "shadd_operand" ""))
1739 (plus:SI (match_operand:SI 2 "register_operand" "")
1740 (match_operand:SI 3 "const_int_operand" ""))))
1741 (match_operand:SI 4 "register_operand" ""))
1742 (clobber (match_operand:SI 5 "register_operand" ""))]
1744 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1746 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1750 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1751 (match_operand:SI 1 "shadd_operand" ""))
1752 (plus:SI (match_operand:SI 2 "register_operand" "")
1753 (match_operand:SI 3 "const_int_operand" ""))))
1754 (match_operand:HI 4 "register_operand" ""))
1755 (clobber (match_operand:SI 5 "register_operand" ""))]
1757 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1759 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1763 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1764 (match_operand:SI 1 "shadd_operand" ""))
1765 (plus:SI (match_operand:SI 2 "register_operand" "")
1766 (match_operand:SI 3 "const_int_operand" ""))))
1767 (match_operand:QI 4 "register_operand" ""))
1768 (clobber (match_operand:SI 5 "register_operand" ""))]
1770 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1772 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1775 (define_expand "movhi"
1776 [(set (match_operand:HI 0 "general_operand" "")
1777 (match_operand:HI 1 "general_operand" ""))]
1781 if (emit_move_sequence (operands, HImode, 0))
1786 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1787 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1788 "register_operand (operands[0], HImode)
1789 || reg_or_0_operand (operands[1], HImode)"
1799 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1800 (set_attr "pa_combine_type" "addmove")
1801 (set_attr "length" "4,4,4,4,4,4,4,4")])
1804 [(set (match_operand:HI 0 "register_operand" "=r")
1805 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1806 (match_operand:SI 2 "register_operand" "r"))))]
1807 "! TARGET_DISABLE_INDEXING"
1810 /* Reload can create backwards (relative to cse) unscaled index
1811 address modes when eliminating registers and possibly for
1812 pseudos that don't get hard registers. Deal with it. */
1813 if (operands[2] == hard_frame_pointer_rtx
1814 || operands[2] == stack_pointer_rtx)
1815 return \"ldhx %1(%2),%0\";
1817 return \"ldhx %2(%1),%0\";
1819 [(set_attr "type" "load")
1820 (set_attr "length" "4")])
1823 [(set (match_operand:HI 0 "register_operand" "=r")
1824 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1825 (match_operand:SI 2 "basereg_operand" "r"))))]
1826 "! TARGET_DISABLE_INDEXING"
1829 /* Reload can create backwards (relative to cse) unscaled index
1830 address modes when eliminating registers and possibly for
1831 pseudos that don't get hard registers. Deal with it. */
1832 if (operands[1] == hard_frame_pointer_rtx
1833 || operands[1] == stack_pointer_rtx)
1834 return \"ldhx %2(%1),%0\";
1836 return \"ldhx %1(%2),%0\";
1838 [(set_attr "type" "load")
1839 (set_attr "length" "4")])
1841 ; Now zero extended variants.
1843 [(set (match_operand:SI 0 "register_operand" "=r")
1844 (zero_extend:SI (mem:HI
1846 (match_operand:SI 1 "basereg_operand" "r")
1847 (match_operand:SI 2 "register_operand" "r")))))]
1848 "! TARGET_DISABLE_INDEXING"
1851 /* Reload can create backwards (relative to cse) unscaled index
1852 address modes when eliminating registers and possibly for
1853 pseudos that don't get hard registers. Deal with it. */
1854 if (operands[2] == hard_frame_pointer_rtx
1855 || operands[2] == stack_pointer_rtx)
1856 return \"ldhx %1(%2),%0\";
1858 return \"ldhx %2(%1),%0\";
1860 [(set_attr "type" "load")
1861 (set_attr "length" "4")])
1864 [(set (match_operand:SI 0 "register_operand" "=r")
1865 (zero_extend:SI (mem:HI
1867 (match_operand:SI 1 "register_operand" "r")
1868 (match_operand:SI 2 "basereg_operand" "r")))))]
1869 "! TARGET_DISABLE_INDEXING"
1872 /* Reload can create backwards (relative to cse) unscaled index
1873 address modes when eliminating registers and possibly for
1874 pseudos that don't get hard registers. Deal with it. */
1875 if (operands[1] == hard_frame_pointer_rtx
1876 || operands[1] == stack_pointer_rtx)
1877 return \"ldhx %2(%1),%0\";
1879 return \"ldhx %1(%2),%0\";
1881 [(set_attr "type" "load")
1882 (set_attr "length" "4")])
1885 [(set (match_operand:HI 0 "register_operand" "=r")
1886 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1887 (match_operand:SI 2 "int5_operand" "L"))))
1889 (plus:SI (match_dup 1) (match_dup 2)))]
1892 [(set_attr "type" "load")
1893 (set_attr "length" "4")])
1895 ; And a zero extended variant.
1897 [(set (match_operand:SI 0 "register_operand" "=r")
1898 (zero_extend:SI (mem:HI
1900 (match_operand:SI 1 "register_operand" "+r")
1901 (match_operand:SI 2 "int5_operand" "L")))))
1903 (plus:SI (match_dup 1) (match_dup 2)))]
1906 [(set_attr "type" "load")
1907 (set_attr "length" "4")])
1910 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1911 (match_operand:SI 1 "int5_operand" "L")))
1912 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1914 (plus:SI (match_dup 0) (match_dup 1)))]
1916 "sths,mb %r2,%1(%0)"
1917 [(set_attr "type" "store")
1918 (set_attr "length" "4")])
1921 [(set (match_operand:HI 0 "register_operand" "=r")
1922 (high:HI (match_operand 1 "const_int_operand" "")))]
1925 [(set_attr "type" "move")
1926 (set_attr "length" "4")])
1929 [(set (match_operand:HI 0 "register_operand" "=r")
1930 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1931 (match_operand 2 "const_int_operand" "")))]
1934 [(set_attr "type" "move")
1935 (set_attr "length" "4")])
1937 (define_expand "movqi"
1938 [(set (match_operand:QI 0 "general_operand" "")
1939 (match_operand:QI 1 "general_operand" ""))]
1943 if (emit_move_sequence (operands, QImode, 0))
1948 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1949 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1950 "register_operand (operands[0], QImode)
1951 || reg_or_0_operand (operands[1], QImode)"
1961 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1962 (set_attr "pa_combine_type" "addmove")
1963 (set_attr "length" "4,4,4,4,4,4,4,4")])
1966 [(set (match_operand:QI 0 "register_operand" "=r")
1967 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1968 (match_operand:SI 2 "register_operand" "r"))))]
1969 "! TARGET_DISABLE_INDEXING"
1972 /* Reload can create backwards (relative to cse) unscaled index
1973 address modes when eliminating registers and possibly for
1974 pseudos that don't get hard registers. Deal with it. */
1975 if (operands[2] == hard_frame_pointer_rtx
1976 || operands[2] == stack_pointer_rtx)
1977 return \"ldbx %1(%2),%0\";
1979 return \"ldbx %2(%1),%0\";
1981 [(set_attr "type" "load")
1982 (set_attr "length" "4")])
1985 [(set (match_operand:QI 0 "register_operand" "=r")
1986 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1987 (match_operand:SI 2 "basereg_operand" "r"))))]
1988 "! TARGET_DISABLE_INDEXING"
1991 /* Reload can create backwards (relative to cse) unscaled index
1992 address modes when eliminating registers and possibly for
1993 pseudos that don't get hard registers. Deal with it. */
1994 if (operands[1] == hard_frame_pointer_rtx
1995 || operands[1] == stack_pointer_rtx)
1996 return \"ldbx %2(%1),%0\";
1998 return \"ldbx %1(%2),%0\";
2000 [(set_attr "type" "load")
2001 (set_attr "length" "4")])
2003 ; Indexed byte load with zero extension to SImode or HImode.
2005 [(set (match_operand:SI 0 "register_operand" "=r")
2006 (zero_extend:SI (mem:QI
2008 (match_operand:SI 1 "basereg_operand" "r")
2009 (match_operand:SI 2 "register_operand" "r")))))]
2010 "! TARGET_DISABLE_INDEXING"
2013 /* Reload can create backwards (relative to cse) unscaled index
2014 address modes when eliminating registers and possibly for
2015 pseudos that don't get hard registers. Deal with it. */
2016 if (operands[2] == hard_frame_pointer_rtx
2017 || operands[2] == stack_pointer_rtx)
2018 return \"ldbx %1(%2),%0\";
2020 return \"ldbx %2(%1),%0\";
2022 [(set_attr "type" "load")
2023 (set_attr "length" "4")])
2026 [(set (match_operand:SI 0 "register_operand" "=r")
2027 (zero_extend:SI (mem:QI
2029 (match_operand:SI 1 "register_operand" "r")
2030 (match_operand:SI 2 "basereg_operand" "r")))))]
2031 "! TARGET_DISABLE_INDEXING"
2034 /* Reload can create backwards (relative to cse) unscaled index
2035 address modes when eliminating registers and possibly for
2036 pseudos that don't get hard registers. Deal with it. */
2037 if (operands[1] == hard_frame_pointer_rtx
2038 || operands[1] == stack_pointer_rtx)
2039 return \"ldbx %2(%1),%0\";
2041 return \"ldbx %1(%2),%0\";
2043 [(set_attr "type" "load")
2044 (set_attr "length" "4")])
2047 [(set (match_operand:HI 0 "register_operand" "=r")
2048 (zero_extend:HI (mem:QI
2050 (match_operand:SI 1 "basereg_operand" "r")
2051 (match_operand:SI 2 "register_operand" "r")))))]
2052 "! TARGET_DISABLE_INDEXING"
2055 /* Reload can create backwards (relative to cse) unscaled index
2056 address modes when eliminating registers and possibly for
2057 pseudos that don't get hard registers. Deal with it. */
2058 if (operands[2] == hard_frame_pointer_rtx
2059 || operands[2] == stack_pointer_rtx)
2060 return \"ldbx %1(%2),%0\";
2062 return \"ldbx %2(%1),%0\";
2064 [(set_attr "type" "load")
2065 (set_attr "length" "4")])
2068 [(set (match_operand:HI 0 "register_operand" "=r")
2069 (zero_extend:HI (mem:QI
2071 (match_operand:SI 1 "register_operand" "r")
2072 (match_operand:SI 2 "basereg_operand" "r")))))]
2073 "! TARGET_DISABLE_INDEXING"
2076 /* Reload can create backwards (relative to cse) unscaled index
2077 address modes when eliminating registers and possibly for
2078 pseudos that don't get hard registers. Deal with it. */
2079 if (operands[1] == hard_frame_pointer_rtx
2080 || operands[1] == stack_pointer_rtx)
2081 return \"ldbx %2(%1),%0\";
2083 return \"ldbx %1(%2),%0\";
2085 [(set_attr "type" "load")
2086 (set_attr "length" "4")])
2089 [(set (match_operand:QI 0 "register_operand" "=r")
2090 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2091 (match_operand:SI 2 "int5_operand" "L"))))
2092 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2095 [(set_attr "type" "load")
2096 (set_attr "length" "4")])
2098 ; Now the same thing with zero extensions.
2100 [(set (match_operand:SI 0 "register_operand" "=r")
2101 (zero_extend:SI (mem:QI (plus:SI
2102 (match_operand:SI 1 "register_operand" "+r")
2103 (match_operand:SI 2 "int5_operand" "L")))))
2104 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2107 [(set_attr "type" "load")
2108 (set_attr "length" "4")])
2111 [(set (match_operand:HI 0 "register_operand" "=r")
2112 (zero_extend:HI (mem:QI (plus:SI
2113 (match_operand:SI 1 "register_operand" "+r")
2114 (match_operand:SI 2 "int5_operand" "L")))))
2115 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2118 [(set_attr "type" "load")
2119 (set_attr "length" "4")])
2122 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2123 (match_operand:SI 1 "int5_operand" "L")))
2124 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2126 (plus:SI (match_dup 0) (match_dup 1)))]
2128 "stbs,mb %r2,%1(%0)"
2129 [(set_attr "type" "store")
2130 (set_attr "length" "4")])
2132 ;; The definition of this insn does not really explain what it does,
2133 ;; but it should suffice
2134 ;; that anything generated as this insn will be recognized as one
2135 ;; and that it will not successfully combine with anything.
2136 (define_expand "movstrsi"
2137 [(parallel [(set (match_operand:BLK 0 "" "")
2138 (match_operand:BLK 1 "" ""))
2139 (clobber (match_dup 7))
2140 (clobber (match_dup 8))
2141 (clobber (match_dup 4))
2142 (clobber (match_dup 5))
2143 (clobber (match_dup 6))
2144 (use (match_operand:SI 2 "arith_operand" ""))
2145 (use (match_operand:SI 3 "const_int_operand" ""))])]
2151 /* HP provides very fast block move library routine for the PA;
2152 this routine includes:
2154 4x4 byte at a time block moves,
2155 1x4 byte at a time with alignment checked at runtime with
2156 attempts to align the source and destination as needed
2159 With that in mind, here's the heuristics to try and guess when
2160 the inlined block move will be better than the library block
2163 If the size isn't constant, then always use the library routines.
2165 If the size is large in respect to the known alignment, then use
2166 the library routines.
2168 If the size is small in repsect to the known alignment, then open
2169 code the copy (since that will lead to better scheduling).
2171 Else use the block move pattern. */
2173 /* Undetermined size, use the library routine. */
2174 if (GET_CODE (operands[2]) != CONST_INT)
2177 size = INTVAL (operands[2]);
2178 align = INTVAL (operands[3]);
2179 align = align > 4 ? 4 : align;
2181 /* If size/alignment > 8 (eg size is large in respect to alignment),
2182 then use the library routines. */
2183 if (size / align > 16)
2186 /* This does happen, but not often enough to worry much about. */
2187 if (size / align < MOVE_RATIO)
2190 /* Fall through means we're going to use our block move pattern. */
2192 = change_address (operands[0], VOIDmode,
2193 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2195 = change_address (operands[1], VOIDmode,
2196 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2197 operands[4] = gen_reg_rtx (SImode);
2198 operands[5] = gen_reg_rtx (SImode);
2199 operands[6] = gen_reg_rtx (SImode);
2200 operands[7] = XEXP (operands[0], 0);
2201 operands[8] = XEXP (operands[1], 0);
2204 ;; The operand constraints are written like this to support both compile-time
2205 ;; and run-time determined byte count. If the count is run-time determined,
2206 ;; the register with the byte count is clobbered by the copying code, and
2207 ;; therefore it is forced to operand 2. If the count is compile-time
2208 ;; determined, we need two scratch registers for the unrolled code.
2209 (define_insn "movstrsi_internal"
2210 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2211 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2212 (clobber (match_dup 0))
2213 (clobber (match_dup 1))
2214 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2215 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2216 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2217 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2218 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2220 "* return output_block_move (operands, !which_alternative);"
2221 [(set_attr "type" "multi,multi")])
2223 ;; Floating point move insns
2225 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2226 ;; to be reloaded by putting the constant into memory when
2227 ;; reg is a floating point register.
2229 ;; For integer registers we use ldil;ldo to set the appropriate
2232 ;; This must come before the movdf pattern, and it must be present
2233 ;; to handle obscure reloading cases.
2235 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2236 (match_operand:DF 1 "" "?F,m"))]
2237 "GET_CODE (operands[1]) == CONST_DOUBLE
2238 && operands[1] != CONST0_RTX (DFmode)
2239 && ! TARGET_SOFT_FLOAT"
2240 "* return (which_alternative == 0 ? output_move_double (operands)
2241 : \"fldd%F1 %1,%0\");"
2242 [(set_attr "type" "move,fpload")
2243 (set_attr "length" "16,4")])
2245 (define_expand "movdf"
2246 [(set (match_operand:DF 0 "general_operand" "")
2247 (match_operand:DF 1 "general_operand" ""))]
2251 if (emit_move_sequence (operands, DFmode, 0))
2255 ;; Reloading an SImode or DImode value requires a scratch register if
2256 ;; going in to or out of float point registers.
2258 (define_expand "reload_indf"
2259 [(set (match_operand:DF 0 "register_operand" "=Z")
2260 (match_operand:DF 1 "non_hard_reg_operand" ""))
2261 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2265 if (emit_move_sequence (operands, DFmode, operands[2]))
2268 /* We don't want the clobber emitted, so handle this ourselves. */
2269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2273 (define_expand "reload_outdf"
2274 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2275 (match_operand:DF 1 "register_operand" "Z"))
2276 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2280 if (emit_move_sequence (operands, DFmode, operands[2]))
2283 /* We don't want the clobber emitted, so handle this ourselves. */
2284 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2289 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2290 "=f,*r,RQ,?o,?Q,f,*r,*r")
2291 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2292 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2293 "(register_operand (operands[0], DFmode)
2294 || reg_or_0_operand (operands[1], DFmode))
2295 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2296 && GET_CODE (operands[0]) == MEM)
2297 && ! TARGET_SOFT_FLOAT"
2300 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2301 || operands[1] == CONST0_RTX (DFmode))
2302 return output_fp_move_double (operands);
2303 return output_move_double (operands);
2305 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2306 (set_attr "length" "4,8,4,8,16,4,8,16")])
2309 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2311 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2313 "(register_operand (operands[0], DFmode)
2314 || reg_or_0_operand (operands[1], DFmode))
2315 && TARGET_SOFT_FLOAT"
2318 return output_move_double (operands);
2320 [(set_attr "type" "move,store,store,load,load")
2321 (set_attr "length" "8,8,16,8,16")])
2324 [(set (match_operand:DF 0 "register_operand" "=fx")
2325 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2326 (match_operand:SI 2 "register_operand" "r"))))]
2327 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2330 /* Reload can create backwards (relative to cse) unscaled index
2331 address modes when eliminating registers and possibly for
2332 pseudos that don't get hard registers. Deal with it. */
2333 if (operands[2] == hard_frame_pointer_rtx
2334 || operands[2] == stack_pointer_rtx)
2335 return \"flddx %1(%2),%0\";
2337 return \"flddx %2(%1),%0\";
2339 [(set_attr "type" "fpload")
2340 (set_attr "length" "4")])
2343 [(set (match_operand:DF 0 "register_operand" "=fx")
2344 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2345 (match_operand:SI 2 "basereg_operand" "r"))))]
2346 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2349 /* Reload can create backwards (relative to cse) unscaled index
2350 address modes when eliminating registers and possibly for
2351 pseudos that don't get hard registers. Deal with it. */
2352 if (operands[1] == hard_frame_pointer_rtx
2353 || operands[1] == stack_pointer_rtx)
2354 return \"flddx %2(%1),%0\";
2356 return \"flddx %1(%2),%0\";
2358 [(set_attr "type" "fpload")
2359 (set_attr "length" "4")])
2362 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2363 (match_operand:SI 2 "register_operand" "r")))
2364 (match_operand:DF 0 "register_operand" "fx"))]
2365 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2368 /* Reload can create backwards (relative to cse) unscaled index
2369 address modes when eliminating registers and possibly for
2370 pseudos that don't get hard registers. Deal with it. */
2371 if (operands[2] == hard_frame_pointer_rtx
2372 || operands[2] == stack_pointer_rtx)
2373 return \"fstdx %0,%1(%2)\";
2375 return \"fstdx %0,%2(%1)\";
2377 [(set_attr "type" "fpstore")
2378 (set_attr "length" "4")])
2381 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2382 (match_operand:SI 2 "basereg_operand" "r")))
2383 (match_operand:DF 0 "register_operand" "fx"))]
2384 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2387 /* Reload can create backwards (relative to cse) unscaled index
2388 address modes when eliminating registers and possibly for
2389 pseudos that don't get hard registers. Deal with it. */
2390 if (operands[1] == hard_frame_pointer_rtx
2391 || operands[1] == stack_pointer_rtx)
2392 return \"fstdx %0,%2(%1)\";
2394 return \"fstdx %0,%1(%2)\";
2396 [(set_attr "type" "fpstore")
2397 (set_attr "length" "4")])
2399 (define_expand "movdi"
2400 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2401 (match_operand:DI 1 "general_operand" ""))]
2405 if (emit_move_sequence (operands, DImode, 0))
2409 (define_expand "reload_indi"
2410 [(set (match_operand:DI 0 "register_operand" "=Z")
2411 (match_operand:DI 1 "non_hard_reg_operand" ""))
2412 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2416 if (emit_move_sequence (operands, DImode, operands[2]))
2419 /* We don't want the clobber emitted, so handle this ourselves. */
2420 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2424 (define_expand "reload_outdi"
2425 [(set (match_operand:DI 0 "general_operand" "")
2426 (match_operand:DI 1 "register_operand" "Z"))
2427 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2431 if (emit_move_sequence (operands, DImode, operands[2]))
2434 /* We don't want the clobber emitted, so handle this ourselves. */
2435 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2440 [(set (match_operand:DI 0 "register_operand" "=r")
2441 (high:DI (match_operand 1 "" "")))]
2445 rtx op0 = operands[0];
2446 rtx op1 = operands[1];
2448 if (GET_CODE (op1) == CONST_INT)
2450 operands[0] = operand_subword (op0, 1, 0, DImode);
2451 output_asm_insn (\"ldil L'%1,%0\", operands);
2453 operands[0] = operand_subword (op0, 0, 0, DImode);
2454 if (INTVAL (op1) < 0)
2455 output_asm_insn (\"ldi -1,%0\", operands);
2457 output_asm_insn (\"ldi 0,%0\", operands);
2460 else if (GET_CODE (op1) == CONST_DOUBLE)
2462 operands[0] = operand_subword (op0, 1, 0, DImode);
2463 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2464 output_asm_insn (\"ldil L'%1,%0\", operands);
2466 operands[0] = operand_subword (op0, 0, 0, DImode);
2467 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2468 output_asm_insn (singlemove_string (operands), operands);
2474 [(set_attr "type" "move")
2475 (set_attr "length" "8")])
2478 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2479 "=r,o,Q,r,r,r,f,f,*TR")
2480 (match_operand:DI 1 "general_operand"
2481 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2482 "(register_operand (operands[0], DImode)
2483 || reg_or_0_operand (operands[1], DImode))
2484 && ! TARGET_SOFT_FLOAT"
2487 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2488 || (operands[1] == CONST0_RTX (DImode)))
2489 return output_fp_move_double (operands);
2490 return output_move_double (operands);
2492 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2493 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2496 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2498 (match_operand:DI 1 "general_operand"
2500 "(register_operand (operands[0], DImode)
2501 || reg_or_0_operand (operands[1], DImode))
2502 && TARGET_SOFT_FLOAT"
2505 return output_move_double (operands);
2507 [(set_attr "type" "move,store,store,load,load,multi")
2508 (set_attr "length" "8,8,16,8,16,16")])
2511 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2512 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2513 (match_operand:DI 2 "immediate_operand" "i,i")))]
2517 /* Don't output a 64 bit constant, since we can't trust the assembler to
2518 handle it correctly. */
2519 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2520 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2521 if (which_alternative == 1)
2522 output_asm_insn (\"copy %1,%0\", operands);
2523 return \"ldo R'%G2(%R1),%R0\";
2525 [(set_attr "type" "move,move")
2526 (set_attr "length" "4,8")])
2528 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2529 ;; to be reloaded by putting the constant into memory when
2530 ;; reg is a floating point register.
2532 ;; For integer registers we use ldil;ldo to set the appropriate
2535 ;; This must come before the movsf pattern, and it must be present
2536 ;; to handle obscure reloading cases.
2538 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2539 (match_operand:SF 1 "" "?F,m"))]
2540 "GET_CODE (operands[1]) == CONST_DOUBLE
2541 && operands[1] != CONST0_RTX (SFmode)
2542 && ! TARGET_SOFT_FLOAT"
2543 "* return (which_alternative == 0 ? singlemove_string (operands)
2544 : \" fldw%F1 %1,%0\");"
2545 [(set_attr "type" "move,fpload")
2546 (set_attr "length" "8,4")])
2548 (define_expand "movsf"
2549 [(set (match_operand:SF 0 "general_operand" "")
2550 (match_operand:SF 1 "general_operand" ""))]
2554 if (emit_move_sequence (operands, SFmode, 0))
2558 ;; Reloading an SImode or DImode value requires a scratch register if
2559 ;; going in to or out of float point registers.
2561 (define_expand "reload_insf"
2562 [(set (match_operand:SF 0 "register_operand" "=Z")
2563 (match_operand:SF 1 "non_hard_reg_operand" ""))
2564 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2568 if (emit_move_sequence (operands, SFmode, operands[2]))
2571 /* We don't want the clobber emitted, so handle this ourselves. */
2572 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2576 (define_expand "reload_outsf"
2577 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2578 (match_operand:SF 1 "register_operand" "Z"))
2579 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2583 if (emit_move_sequence (operands, SFmode, operands[2]))
2586 /* We don't want the clobber emitted, so handle this ourselves. */
2587 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2592 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2594 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2595 "fG,rG,RQ,RQ,f,rG"))]
2596 "(register_operand (operands[0], SFmode)
2597 || reg_or_0_operand (operands[1], SFmode))
2598 && ! TARGET_SOFT_FLOAT"
2606 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2607 (set_attr "pa_combine_type" "addmove")
2608 (set_attr "length" "4,4,4,4,4,4")])
2611 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2613 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2615 "(register_operand (operands[0], SFmode)
2616 || reg_or_0_operand (operands[1], SFmode))
2617 && TARGET_SOFT_FLOAT"
2622 [(set_attr "type" "move,load,store")
2623 (set_attr "pa_combine_type" "addmove")
2624 (set_attr "length" "4,4,4")])
2627 [(set (match_operand:SF 0 "register_operand" "=fx")
2628 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2629 (match_operand:SI 2 "register_operand" "r"))))]
2630 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2633 /* Reload can create backwards (relative to cse) unscaled index
2634 address modes when eliminating registers and possibly for
2635 pseudos that don't get hard registers. Deal with it. */
2636 if (operands[2] == hard_frame_pointer_rtx
2637 || operands[2] == stack_pointer_rtx)
2638 return \"fldwx %1(%2),%0\";
2640 return \"fldwx %2(%1),%0\";
2642 [(set_attr "type" "fpload")
2643 (set_attr "length" "4")])
2646 [(set (match_operand:SF 0 "register_operand" "=fx")
2647 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2648 (match_operand:SI 2 "basereg_operand" "r"))))]
2649 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2652 /* Reload can create backwards (relative to cse) unscaled index
2653 address modes when eliminating registers and possibly for
2654 pseudos that don't get hard registers. Deal with it. */
2655 if (operands[1] == hard_frame_pointer_rtx
2656 || operands[1] == stack_pointer_rtx)
2657 return \"fldwx %2(%1),%0\";
2659 return \"fldwx %1(%2),%0\";
2661 [(set_attr "type" "fpload")
2662 (set_attr "length" "4")])
2665 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2666 (match_operand:SI 2 "register_operand" "r")))
2667 (match_operand:SF 0 "register_operand" "fx"))]
2668 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2671 /* Reload can create backwards (relative to cse) unscaled index
2672 address modes when eliminating registers and possibly for
2673 pseudos that don't get hard registers. Deal with it. */
2674 if (operands[2] == hard_frame_pointer_rtx
2675 || operands[2] == stack_pointer_rtx)
2676 return \"fstwx %0,%1(%2)\";
2678 return \"fstwx %0,%2(%1)\";
2680 [(set_attr "type" "fpstore")
2681 (set_attr "length" "4")])
2684 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2685 (match_operand:SI 2 "basereg_operand" "r")))
2686 (match_operand:SF 0 "register_operand" "fx"))]
2687 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2690 /* Reload can create backwards (relative to cse) unscaled index
2691 address modes when eliminating registers and possibly for
2692 pseudos that don't get hard registers. Deal with it. */
2693 if (operands[1] == hard_frame_pointer_rtx
2694 || operands[1] == stack_pointer_rtx)
2695 return \"fstwx %0,%2(%1)\";
2697 return \"fstwx %0,%1(%2)\";
2699 [(set_attr "type" "fpstore")
2700 (set_attr "length" "4")])
2703 ;;- zero extension instructions
2704 ;; We have define_expand for zero extension patterns to make sure the
2705 ;; operands get loaded into registers. The define_insns accept
2706 ;; memory operands. This gives us better overall code than just
2707 ;; having a pattern that does or does not accept memory operands.
2709 (define_expand "zero_extendhisi2"
2710 [(set (match_operand:SI 0 "register_operand" "")
2712 (match_operand:HI 1 "register_operand" "")))]
2717 [(set (match_operand:SI 0 "register_operand" "=r,r")
2719 (match_operand:HI 1 "move_operand" "r,RQ")))]
2720 "GET_CODE (operands[1]) != CONST_INT"
2724 [(set_attr "type" "shift,load")
2725 (set_attr "length" "4,4")])
2727 (define_expand "zero_extendqihi2"
2728 [(set (match_operand:HI 0 "register_operand" "")
2730 (match_operand:QI 1 "register_operand" "")))]
2735 [(set (match_operand:HI 0 "register_operand" "=r,r")
2737 (match_operand:QI 1 "move_operand" "r,RQ")))]
2738 "GET_CODE (operands[1]) != CONST_INT"
2742 [(set_attr "type" "shift,load")
2743 (set_attr "length" "4,4")])
2745 (define_expand "zero_extendqisi2"
2746 [(set (match_operand:SI 0 "register_operand" "")
2748 (match_operand:QI 1 "register_operand" "")))]
2753 [(set (match_operand:SI 0 "register_operand" "=r,r")
2755 (match_operand:QI 1 "move_operand" "r,RQ")))]
2756 "GET_CODE (operands[1]) != CONST_INT"
2760 [(set_attr "type" "shift,load")
2761 (set_attr "length" "4,4")])
2763 ;;- sign extension instructions
2765 (define_insn "extendhisi2"
2766 [(set (match_operand:SI 0 "register_operand" "=r")
2767 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2770 [(set_attr "type" "shift")
2771 (set_attr "length" "4")])
2773 (define_insn "extendqihi2"
2774 [(set (match_operand:HI 0 "register_operand" "=r")
2775 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2778 [(set_attr "type" "shift")
2779 (set_attr "length" "4")])
2781 (define_insn "extendqisi2"
2782 [(set (match_operand:SI 0 "register_operand" "=r")
2783 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2786 [(set_attr "type" "shift")
2787 (set_attr "length" "4")])
2789 ;; Conversions between float and double.
2791 (define_insn "extendsfdf2"
2792 [(set (match_operand:DF 0 "register_operand" "=f")
2794 (match_operand:SF 1 "register_operand" "f")))]
2795 "! TARGET_SOFT_FLOAT"
2796 "fcnvff,sgl,dbl %1,%0"
2797 [(set_attr "type" "fpalu")
2798 (set_attr "length" "4")])
2800 (define_insn "truncdfsf2"
2801 [(set (match_operand:SF 0 "register_operand" "=f")
2803 (match_operand:DF 1 "register_operand" "f")))]
2804 "! TARGET_SOFT_FLOAT"
2805 "fcnvff,dbl,sgl %1,%0"
2806 [(set_attr "type" "fpalu")
2807 (set_attr "length" "4")])
2809 ;; Conversion between fixed point and floating point.
2810 ;; Note that among the fix-to-float insns
2811 ;; the ones that start with SImode come first.
2812 ;; That is so that an operand that is a CONST_INT
2813 ;; (and therefore lacks a specific machine mode).
2814 ;; will be recognized as SImode (which is always valid)
2815 ;; rather than as QImode or HImode.
2817 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2818 ;; to be reloaded by putting the constant into memory.
2819 ;; It must come before the more general floatsisf2 pattern.
2821 [(set (match_operand:SF 0 "register_operand" "=f")
2822 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2823 "! TARGET_SOFT_FLOAT"
2824 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2825 [(set_attr "type" "fpalu")
2826 (set_attr "length" "8")])
2828 (define_insn "floatsisf2"
2829 [(set (match_operand:SF 0 "register_operand" "=f")
2830 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2831 "! TARGET_SOFT_FLOAT"
2832 "fcnvxf,sgl,sgl %1,%0"
2833 [(set_attr "type" "fpalu")
2834 (set_attr "length" "4")])
2836 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2837 ;; to be reloaded by putting the constant into memory.
2838 ;; It must come before the more general floatsidf2 pattern.
2840 [(set (match_operand:DF 0 "register_operand" "=f")
2841 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2842 "! TARGET_SOFT_FLOAT"
2843 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2844 [(set_attr "type" "fpalu")
2845 (set_attr "length" "8")])
2847 (define_insn "floatsidf2"
2848 [(set (match_operand:DF 0 "register_operand" "=f")
2849 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2850 "! TARGET_SOFT_FLOAT"
2851 "fcnvxf,sgl,dbl %1,%0"
2852 [(set_attr "type" "fpalu")
2853 (set_attr "length" "4")])
2855 (define_expand "floatunssisf2"
2856 [(set (subreg:SI (match_dup 2) 1)
2857 (match_operand:SI 1 "register_operand" ""))
2858 (set (subreg:SI (match_dup 2) 0)
2860 (set (match_operand:SF 0 "register_operand" "")
2861 (float:SF (match_dup 2)))]
2862 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2863 "operands[2] = gen_reg_rtx (DImode);")
2865 (define_expand "floatunssidf2"
2866 [(set (subreg:SI (match_dup 2) 1)
2867 (match_operand:SI 1 "register_operand" ""))
2868 (set (subreg:SI (match_dup 2) 0)
2870 (set (match_operand:DF 0 "register_operand" "")
2871 (float:DF (match_dup 2)))]
2872 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2873 "operands[2] = gen_reg_rtx (DImode);")
2875 (define_insn "floatdisf2"
2876 [(set (match_operand:SF 0 "register_operand" "=f")
2877 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2878 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2879 "fcnvxf,dbl,sgl %1,%0"
2880 [(set_attr "type" "fpalu")
2881 (set_attr "length" "4")])
2883 (define_insn "floatdidf2"
2884 [(set (match_operand:DF 0 "register_operand" "=f")
2885 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2886 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2887 "fcnvxf,dbl,dbl %1,%0"
2888 [(set_attr "type" "fpalu")
2889 (set_attr "length" "4")])
2891 ;; Convert a float to an actual integer.
2892 ;; Truncation is performed as part of the conversion.
2894 (define_insn "fix_truncsfsi2"
2895 [(set (match_operand:SI 0 "register_operand" "=f")
2896 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2897 "! TARGET_SOFT_FLOAT"
2898 "fcnvfxt,sgl,sgl %1,%0"
2899 [(set_attr "type" "fpalu")
2900 (set_attr "length" "4")])
2902 (define_insn "fix_truncdfsi2"
2903 [(set (match_operand:SI 0 "register_operand" "=f")
2904 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2905 "! TARGET_SOFT_FLOAT"
2906 "fcnvfxt,dbl,sgl %1,%0"
2907 [(set_attr "type" "fpalu")
2908 (set_attr "length" "4")])
2910 (define_insn "fix_truncsfdi2"
2911 [(set (match_operand:DI 0 "register_operand" "=f")
2912 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2913 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2914 "fcnvfxt,sgl,dbl %1,%0"
2915 [(set_attr "type" "fpalu")
2916 (set_attr "length" "4")])
2918 (define_insn "fix_truncdfdi2"
2919 [(set (match_operand:DI 0 "register_operand" "=f")
2920 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2921 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2922 "fcnvfxt,dbl,dbl %1,%0"
2923 [(set_attr "type" "fpalu")
2924 (set_attr "length" "4")])
2926 ;;- arithmetic instructions
2928 (define_expand "adddi3"
2929 [(set (match_operand:DI 0 "register_operand" "")
2930 (plus:DI (match_operand:DI 1 "register_operand" "")
2931 (match_operand:DI 2 "arith11_operand" "")))]
2936 [(set (match_operand:DI 0 "register_operand" "=r")
2937 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2938 (match_operand:DI 2 "arith11_operand" "rI")))]
2942 if (GET_CODE (operands[2]) == CONST_INT)
2944 if (INTVAL (operands[2]) >= 0)
2945 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2947 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2950 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2952 [(set_attr "type" "binary")
2953 (set_attr "length" "8")])
2956 [(set (match_operand:SI 0 "register_operand" "=r")
2957 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2958 (match_operand:SI 2 "register_operand" "r")))]
2961 [(set_attr "type" "binary")
2962 (set_attr "length" "4")])
2964 ;; define_splits to optimize cases of adding a constant integer
2965 ;; to a register when the constant does not fit in 14 bits. */
2967 [(set (match_operand:SI 0 "register_operand" "")
2968 (plus:SI (match_operand:SI 1 "register_operand" "")
2969 (match_operand:SI 2 "const_int_operand" "")))
2970 (clobber (match_operand:SI 4 "register_operand" ""))]
2971 "! cint_ok_for_move (INTVAL (operands[2]))
2972 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2973 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2974 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2977 int val = INTVAL (operands[2]);
2978 int low = (val < 0) ? -0x2000 : 0x1fff;
2979 int rest = val - low;
2981 operands[2] = GEN_INT (rest);
2982 operands[3] = GEN_INT (low);
2986 [(set (match_operand:SI 0 "register_operand" "")
2987 (plus:SI (match_operand:SI 1 "register_operand" "")
2988 (match_operand:SI 2 "const_int_operand" "")))
2989 (clobber (match_operand:SI 4 "register_operand" ""))]
2990 "! cint_ok_for_move (INTVAL (operands[2]))"
2991 [(set (match_dup 4) (match_dup 2))
2992 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2996 HOST_WIDE_INT intval = INTVAL (operands[2]);
2998 /* Try dividing the constant by 2, then 4, and finally 8 to see
2999 if we can get a constant which can be loaded into a register
3000 in a single instruction (cint_ok_for_move).
3002 If that fails, try to negate the constant and subtract it
3003 from our input operand. */
3004 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
3006 operands[2] = GEN_INT (intval / 2);
3007 operands[3] = GEN_INT (2);
3009 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
3011 operands[2] = GEN_INT (intval / 4);
3012 operands[3] = GEN_INT (4);
3014 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
3016 operands[2] = GEN_INT (intval / 8);
3017 operands[3] = GEN_INT (8);
3019 else if (cint_ok_for_move (-intval))
3021 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
3022 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
3029 (define_insn "addsi3"
3030 [(set (match_operand:SI 0 "register_operand" "=r,r")
3031 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3032 (match_operand:SI 2 "arith_operand" "r,J")))]
3037 [(set_attr "type" "binary,binary")
3038 (set_attr "pa_combine_type" "addmove")
3039 (set_attr "length" "4,4")])
3041 ;; Disgusting kludge to work around reload bugs with frame pointer
3042 ;; elimination. Similar to other magic reload patterns in the
3043 ;; indexed memory operations.
3045 [(set (match_operand:SI 0 "register_operand" "=&r")
3046 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3047 (match_operand:SI 2 "register_operand" "r"))
3048 (match_operand:SI 3 "const_int_operand" "rL")))]
3049 "reload_in_progress"
3052 if (GET_CODE (operands[3]) == CONST_INT)
3053 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3055 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3057 [(set_attr "type" "binary")
3058 (set_attr "length" "8")])
3060 (define_expand "subdi3"
3061 [(set (match_operand:DI 0 "register_operand" "")
3062 (minus:DI (match_operand:DI 1 "register_operand" "")
3063 (match_operand:DI 2 "register_operand" "")))]
3068 [(set (match_operand:DI 0 "register_operand" "=r")
3069 (minus:DI (match_operand:DI 1 "register_operand" "r")
3070 (match_operand:DI 2 "register_operand" "r")))]
3072 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3073 [(set_attr "type" "binary")
3074 (set_attr "length" "8")])
3076 (define_insn "subsi3"
3077 [(set (match_operand:SI 0 "register_operand" "=r,r")
3078 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3079 (match_operand:SI 2 "register_operand" "r,r")))]
3084 [(set_attr "type" "binary,binary")
3085 (set_attr "length" "4,4")])
3087 ;; Clobbering a "register_operand" instead of a match_scratch
3088 ;; in operand3 of millicode calls avoids spilling %r1 and
3089 ;; produces better code.
3091 ;; The mulsi3 insns set up registers for the millicode call.
3092 (define_expand "mulsi3"
3093 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3094 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3095 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3096 (clobber (match_dup 3))
3097 (clobber (reg:SI 26))
3098 (clobber (reg:SI 25))
3099 (clobber (reg:SI 31))])
3100 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3104 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3106 rtx scratch = gen_reg_rtx (DImode);
3107 operands[1] = force_reg (SImode, operands[1]);
3108 operands[2] = force_reg (SImode, operands[2]);
3109 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3110 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3111 gen_rtx_SUBREG (SImode, scratch, 1)));
3114 operands[3] = gen_reg_rtx (SImode);
3117 (define_insn "umulsidi3"
3118 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3119 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3120 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3121 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3123 [(set_attr "type" "fpmuldbl")
3124 (set_attr "length" "4")])
3127 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3128 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3129 (match_operand:DI 2 "uint32_operand" "f")))]
3130 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3132 [(set_attr "type" "fpmuldbl")
3133 (set_attr "length" "4")])
3136 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3137 (clobber (match_operand:SI 0 "register_operand" "=a"))
3138 (clobber (reg:SI 26))
3139 (clobber (reg:SI 25))
3140 (clobber (reg:SI 31))]
3142 "* return output_mul_insn (0, insn);"
3143 [(set_attr "type" "milli")
3144 (set (attr "length")
3146 ;; Target (or stub) within reach
3147 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3149 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3154 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3158 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3159 ;; same as NO_SPACE_REGS code
3160 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3162 (eq (symbol_ref "flag_pic")
3166 ;; Out of range and either PIC or PORTABLE_RUNTIME
3169 ;;; Division and mod.
3170 (define_expand "divsi3"
3171 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3172 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3173 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3174 (clobber (match_dup 3))
3175 (clobber (match_dup 4))
3176 (clobber (reg:SI 26))
3177 (clobber (reg:SI 25))
3178 (clobber (reg:SI 31))])
3179 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3183 operands[3] = gen_reg_rtx (SImode);
3184 operands[4] = gen_reg_rtx (SImode);
3185 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3191 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3192 (clobber (match_operand:SI 1 "register_operand" "=a"))
3193 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3194 (clobber (reg:SI 26))
3195 (clobber (reg:SI 25))
3196 (clobber (reg:SI 31))]
3199 return output_div_insn (operands, 0, insn);"
3200 [(set_attr "type" "milli")
3201 (set (attr "length")
3203 ;; Target (or stub) within reach
3204 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3206 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3211 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3215 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3216 ;; same as NO_SPACE_REGS code
3217 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3219 (eq (symbol_ref "flag_pic")
3223 ;; Out of range and either PIC or PORTABLE_RUNTIME
3226 (define_expand "udivsi3"
3227 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3228 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3229 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3230 (clobber (match_dup 3))
3231 (clobber (match_dup 4))
3232 (clobber (reg:SI 26))
3233 (clobber (reg:SI 25))
3234 (clobber (reg:SI 31))])
3235 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3239 operands[3] = gen_reg_rtx (SImode);
3240 operands[4] = gen_reg_rtx (SImode);
3241 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3247 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3248 (clobber (match_operand:SI 1 "register_operand" "=a"))
3249 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3250 (clobber (reg:SI 26))
3251 (clobber (reg:SI 25))
3252 (clobber (reg:SI 31))]
3255 return output_div_insn (operands, 1, insn);"
3256 [(set_attr "type" "milli")
3257 (set (attr "length")
3259 ;; Target (or stub) within reach
3260 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3262 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3267 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3271 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3272 ;; same as NO_SPACE_REGS code
3273 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3275 (eq (symbol_ref "flag_pic")
3279 ;; Out of range and either PIC or PORTABLE_RUNTIME
3282 (define_expand "modsi3"
3283 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3284 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3285 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3286 (clobber (match_dup 3))
3287 (clobber (match_dup 4))
3288 (clobber (reg:SI 26))
3289 (clobber (reg:SI 25))
3290 (clobber (reg:SI 31))])
3291 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3295 operands[4] = gen_reg_rtx (SImode);
3296 operands[3] = gen_reg_rtx (SImode);
3300 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3301 (clobber (match_operand:SI 0 "register_operand" "=a"))
3302 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3303 (clobber (reg:SI 26))
3304 (clobber (reg:SI 25))
3305 (clobber (reg:SI 31))]
3308 return output_mod_insn (0, insn);"
3309 [(set_attr "type" "milli")
3310 (set (attr "length")
3312 ;; Target (or stub) within reach
3313 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3315 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3320 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3324 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3325 ;; same as NO_SPACE_REGS code
3326 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3328 (eq (symbol_ref "flag_pic")
3332 ;; Out of range and either PIC or PORTABLE_RUNTIME
3335 (define_expand "umodsi3"
3336 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3337 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3338 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3339 (clobber (match_dup 3))
3340 (clobber (match_dup 4))
3341 (clobber (reg:SI 26))
3342 (clobber (reg:SI 25))
3343 (clobber (reg:SI 31))])
3344 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3348 operands[4] = gen_reg_rtx (SImode);
3349 operands[3] = gen_reg_rtx (SImode);
3353 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3354 (clobber (match_operand:SI 0 "register_operand" "=a"))
3355 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3356 (clobber (reg:SI 26))
3357 (clobber (reg:SI 25))
3358 (clobber (reg:SI 31))]
3361 return output_mod_insn (1, insn);"
3362 [(set_attr "type" "milli")
3363 (set (attr "length")
3365 ;; Target (or stub) within reach
3366 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3368 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3373 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3377 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3378 ;; same as NO_SPACE_REGS code
3379 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3381 (eq (symbol_ref "flag_pic")
3385 ;; Out of range and either PIC or PORTABLE_RUNTIME
3388 ;;- and instructions
3389 ;; We define DImode `and` so with DImode `not` we can get
3390 ;; DImode `andn`. Other combinations are possible.
3392 (define_expand "anddi3"
3393 [(set (match_operand:DI 0 "register_operand" "")
3394 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3395 (match_operand:DI 2 "arith_double_operand" "")))]
3399 if (! register_operand (operands[1], DImode)
3400 || ! register_operand (operands[2], DImode))
3401 /* Let GCC break this into word-at-a-time operations. */
3406 [(set (match_operand:DI 0 "register_operand" "=r")
3407 (and:DI (match_operand:DI 1 "register_operand" "%r")
3408 (match_operand:DI 2 "register_operand" "r")))]
3410 "and %1,%2,%0\;and %R1,%R2,%R0"
3411 [(set_attr "type" "binary")
3412 (set_attr "length" "8")])
3414 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3415 ; constant with ldil;ldo.
3416 (define_insn "andsi3"
3417 [(set (match_operand:SI 0 "register_operand" "=r,r")
3418 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3419 (match_operand:SI 2 "and_operand" "rO,P")))]
3421 "* return output_and (operands); "
3422 [(set_attr "type" "binary,shift")
3423 (set_attr "length" "4,4")])
3426 [(set (match_operand:DI 0 "register_operand" "=r")
3427 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3428 (match_operand:DI 2 "register_operand" "r")))]
3430 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3431 [(set_attr "type" "binary")
3432 (set_attr "length" "8")])
3435 [(set (match_operand:SI 0 "register_operand" "=r")
3436 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3437 (match_operand:SI 2 "register_operand" "r")))]
3440 [(set_attr "type" "binary")
3441 (set_attr "length" "4")])
3443 (define_expand "iordi3"
3444 [(set (match_operand:DI 0 "register_operand" "")
3445 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3446 (match_operand:DI 2 "arith_double_operand" "")))]
3450 if (! register_operand (operands[1], DImode)
3451 || ! register_operand (operands[2], DImode))
3452 /* Let GCC break this into word-at-a-time operations. */
3457 [(set (match_operand:DI 0 "register_operand" "=r")
3458 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3459 (match_operand:DI 2 "register_operand" "r")))]
3461 "or %1,%2,%0\;or %R1,%R2,%R0"
3462 [(set_attr "type" "binary")
3463 (set_attr "length" "8")])
3465 ;; Need a define_expand because we've run out of CONST_OK... characters.
3466 (define_expand "iorsi3"
3467 [(set (match_operand:SI 0 "register_operand" "")
3468 (ior:SI (match_operand:SI 1 "register_operand" "")
3469 (match_operand:SI 2 "arith32_operand" "")))]
3473 if (! (ior_operand (operands[2], SImode)
3474 || register_operand (operands[2], SImode)))
3475 operands[2] = force_reg (SImode, operands[2]);
3479 [(set (match_operand:SI 0 "register_operand" "=r,r")
3480 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3481 (match_operand:SI 2 "ior_operand" "M,i")))]
3483 "* return output_ior (operands); "
3484 [(set_attr "type" "binary,shift")
3485 (set_attr "length" "4,4")])
3488 [(set (match_operand:SI 0 "register_operand" "=r")
3489 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3490 (match_operand:SI 2 "register_operand" "r")))]
3493 [(set_attr "type" "binary")
3494 (set_attr "length" "4")])
3496 (define_expand "xordi3"
3497 [(set (match_operand:DI 0 "register_operand" "")
3498 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3499 (match_operand:DI 2 "arith_double_operand" "")))]
3503 if (! register_operand (operands[1], DImode)
3504 || ! register_operand (operands[2], DImode))
3505 /* Let GCC break this into word-at-a-time operations. */
3510 [(set (match_operand:DI 0 "register_operand" "=r")
3511 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3512 (match_operand:DI 2 "register_operand" "r")))]
3514 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3515 [(set_attr "type" "binary")
3516 (set_attr "length" "8")])
3518 (define_insn "xorsi3"
3519 [(set (match_operand:SI 0 "register_operand" "=r")
3520 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3521 (match_operand:SI 2 "register_operand" "r")))]
3524 [(set_attr "type" "binary")
3525 (set_attr "length" "4")])
3527 (define_insn "negdi2"
3528 [(set (match_operand:DI 0 "register_operand" "=r")
3529 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3531 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3532 [(set_attr "type" "unary")
3533 (set_attr "length" "8")])
3535 (define_insn "negsi2"
3536 [(set (match_operand:SI 0 "register_operand" "=r")
3537 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3540 [(set_attr "type" "unary")
3541 (set_attr "length" "4")])
3543 (define_expand "one_cmpldi2"
3544 [(set (match_operand:DI 0 "register_operand" "")
3545 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3549 if (! register_operand (operands[1], DImode))
3554 [(set (match_operand:DI 0 "register_operand" "=r")
3555 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3557 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3558 [(set_attr "type" "unary")
3559 (set_attr "length" "8")])
3561 (define_insn "one_cmplsi2"
3562 [(set (match_operand:SI 0 "register_operand" "=r")
3563 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3566 [(set_attr "type" "unary")
3567 (set_attr "length" "4")])
3569 ;; Floating point arithmetic instructions.
3571 (define_insn "adddf3"
3572 [(set (match_operand:DF 0 "register_operand" "=f")
3573 (plus:DF (match_operand:DF 1 "register_operand" "f")
3574 (match_operand:DF 2 "register_operand" "f")))]
3575 "! TARGET_SOFT_FLOAT"
3577 [(set_attr "type" "fpalu")
3578 (set_attr "pa_combine_type" "faddsub")
3579 (set_attr "length" "4")])
3581 (define_insn "addsf3"
3582 [(set (match_operand:SF 0 "register_operand" "=f")
3583 (plus:SF (match_operand:SF 1 "register_operand" "f")
3584 (match_operand:SF 2 "register_operand" "f")))]
3585 "! TARGET_SOFT_FLOAT"
3587 [(set_attr "type" "fpalu")
3588 (set_attr "pa_combine_type" "faddsub")
3589 (set_attr "length" "4")])
3591 (define_insn "subdf3"
3592 [(set (match_operand:DF 0 "register_operand" "=f")
3593 (minus:DF (match_operand:DF 1 "register_operand" "f")
3594 (match_operand:DF 2 "register_operand" "f")))]
3595 "! TARGET_SOFT_FLOAT"
3597 [(set_attr "type" "fpalu")
3598 (set_attr "pa_combine_type" "faddsub")
3599 (set_attr "length" "4")])
3601 (define_insn "subsf3"
3602 [(set (match_operand:SF 0 "register_operand" "=f")
3603 (minus:SF (match_operand:SF 1 "register_operand" "f")
3604 (match_operand:SF 2 "register_operand" "f")))]
3605 "! TARGET_SOFT_FLOAT"
3607 [(set_attr "type" "fpalu")
3608 (set_attr "pa_combine_type" "faddsub")
3609 (set_attr "length" "4")])
3611 (define_insn "muldf3"
3612 [(set (match_operand:DF 0 "register_operand" "=f")
3613 (mult:DF (match_operand:DF 1 "register_operand" "f")
3614 (match_operand:DF 2 "register_operand" "f")))]
3615 "! TARGET_SOFT_FLOAT"
3617 [(set_attr "type" "fpmuldbl")
3618 (set_attr "pa_combine_type" "fmpy")
3619 (set_attr "length" "4")])
3621 (define_insn "mulsf3"
3622 [(set (match_operand:SF 0 "register_operand" "=f")
3623 (mult:SF (match_operand:SF 1 "register_operand" "f")
3624 (match_operand:SF 2 "register_operand" "f")))]
3625 "! TARGET_SOFT_FLOAT"
3627 [(set_attr "type" "fpmulsgl")
3628 (set_attr "pa_combine_type" "fmpy")
3629 (set_attr "length" "4")])
3631 (define_insn "divdf3"
3632 [(set (match_operand:DF 0 "register_operand" "=f")
3633 (div:DF (match_operand:DF 1 "register_operand" "f")
3634 (match_operand:DF 2 "register_operand" "f")))]
3635 "! TARGET_SOFT_FLOAT"
3637 [(set_attr "type" "fpdivdbl")
3638 (set_attr "length" "4")])
3640 (define_insn "divsf3"
3641 [(set (match_operand:SF 0 "register_operand" "=f")
3642 (div:SF (match_operand:SF 1 "register_operand" "f")
3643 (match_operand:SF 2 "register_operand" "f")))]
3644 "! TARGET_SOFT_FLOAT"
3646 [(set_attr "type" "fpdivsgl")
3647 (set_attr "length" "4")])
3649 (define_insn "negdf2"
3650 [(set (match_operand:DF 0 "register_operand" "=f")
3651 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3652 "! TARGET_SOFT_FLOAT"
3656 return \"fneg,dbl %1,%0\";
3658 return \"fsub,dbl %%fr0,%1,%0\";
3660 [(set_attr "type" "fpalu")
3661 (set_attr "length" "4")])
3663 (define_insn "negsf2"
3664 [(set (match_operand:SF 0 "register_operand" "=f")
3665 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3666 "! TARGET_SOFT_FLOAT"
3670 return \"fneg,sgl %1,%0\";
3672 return \"fsub,sgl %%fr0,%1,%0\";
3674 [(set_attr "type" "fpalu")
3675 (set_attr "length" "4")])
3677 (define_insn "absdf2"
3678 [(set (match_operand:DF 0 "register_operand" "=f")
3679 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3680 "! TARGET_SOFT_FLOAT"
3682 [(set_attr "type" "fpalu")
3683 (set_attr "length" "4")])
3685 (define_insn "abssf2"
3686 [(set (match_operand:SF 0 "register_operand" "=f")
3687 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3688 "! TARGET_SOFT_FLOAT"
3690 [(set_attr "type" "fpalu")
3691 (set_attr "length" "4")])
3693 (define_insn "sqrtdf2"
3694 [(set (match_operand:DF 0 "register_operand" "=f")
3695 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3696 "! TARGET_SOFT_FLOAT"
3698 [(set_attr "type" "fpsqrtdbl")
3699 (set_attr "length" "4")])
3701 (define_insn "sqrtsf2"
3702 [(set (match_operand:SF 0 "register_operand" "=f")
3703 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3704 "! TARGET_SOFT_FLOAT"
3706 [(set_attr "type" "fpsqrtsgl")
3707 (set_attr "length" "4")])
3709 ;; PA 2.0 floating point instructions
3713 [(set (match_operand:DF 0 "register_operand" "=f")
3714 (plus:DF (mult:DF (match_operand:DF 1 "register_operand" "f")
3715 (match_operand:DF 2 "register_operand" "f"))
3716 (match_operand:DF 3 "register_operand" "f")))]
3717 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3718 "fmpyfadd,dbl %1,%2,%3,%0"
3719 [(set_attr "type" "fpmuldbl")
3720 (set_attr "length" "4")])
3723 [(set (match_operand:DF 0 "register_operand" "=f")
3724 (plus:DF (match_operand:DF 1 "register_operand" "f")
3725 (mult:DF (match_operand:DF 2 "register_operand" "f")
3726 (match_operand:DF 3 "register_operand" "f"))))]
3727 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3728 "fmpyfadd,dbl %2,%3,%1,%0"
3729 [(set_attr "type" "fpmuldbl")
3730 (set_attr "length" "4")])
3733 [(set (match_operand:SF 0 "register_operand" "=f")
3734 (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "f")
3735 (match_operand:SF 2 "register_operand" "f"))
3736 (match_operand:SF 3 "register_operand" "f")))]
3737 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3738 "fmpyfadd,sgl %1,%2,%3,%0"
3739 [(set_attr "type" "fpmulsgl")
3740 (set_attr "length" "4")])
3743 [(set (match_operand:SF 0 "register_operand" "=f")
3744 (plus:SF (match_operand:SF 1 "register_operand" "f")
3745 (mult:SF (match_operand:SF 2 "register_operand" "f")
3746 (match_operand:SF 3 "register_operand" "f"))))]
3747 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3748 "fmpyfadd,sgl %2,%3,%1,%0"
3749 [(set_attr "type" "fpmulsgl")
3750 (set_attr "length" "4")])
3752 ; fmpynfadd patterns
3754 [(set (match_operand:DF 0 "register_operand" "=f")
3755 (minus:DF (match_operand:DF 1 "register_operand" "f")
3756 (mult:DF (match_operand:DF 2 "register_operand" "f")
3757 (match_operand:DF 3 "register_operand" "f"))))]
3758 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3759 "fmpynfadd,dbl %2,%3,%1,%0"
3760 [(set_attr "type" "fpmuldbl")
3761 (set_attr "length" "4")])
3764 [(set (match_operand:SF 0 "register_operand" "=f")
3765 (minus:SF (match_operand:SF 1 "register_operand" "f")
3766 (mult:SF (match_operand:SF 2 "register_operand" "f")
3767 (match_operand:SF 3 "register_operand" "f"))))]
3768 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3769 "fmpynfadd,sgl %2,%3,%1,%0"
3770 [(set_attr "type" "fpmulsgl")
3771 (set_attr "length" "4")])
3775 [(set (match_operand:DF 0 "register_operand" "=f")
3776 (neg:DF (abs:DF (match_operand:DF 1 "register_operand" "f"))))]
3777 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3779 [(set_attr "type" "fpalu")
3780 (set_attr "length" "4")])
3783 [(set (match_operand:SF 0 "register_operand" "=f")
3784 (neg:SF (abs:SF (match_operand:SF 1 "register_operand" "f"))))]
3785 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3787 [(set_attr "type" "fpalu")
3788 (set_attr "length" "4")])
3791 ;;- Shift instructions
3793 ;; Optimized special case of shifting.
3796 [(set (match_operand:SI 0 "register_operand" "=r")
3797 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3801 [(set_attr "type" "load")
3802 (set_attr "length" "4")])
3805 [(set (match_operand:SI 0 "register_operand" "=r")
3806 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3810 [(set_attr "type" "load")
3811 (set_attr "length" "4")])
3814 [(set (match_operand:SI 0 "register_operand" "=r")
3815 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3816 (match_operand:SI 3 "shadd_operand" ""))
3817 (match_operand:SI 1 "register_operand" "r")))]
3819 "sh%O3addl %2,%1,%0"
3820 [(set_attr "type" "binary")
3821 (set_attr "length" "4")])
3823 ;; This variant of the above insn can occur if the first operand
3824 ;; is the frame pointer. This is a kludge, but there doesn't
3825 ;; seem to be a way around it. Only recognize it while reloading.
3826 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3827 ;; has constraints allowing a register. I don't know how this works,
3828 ;; but it somehow makes sure that out-of-range constants are placed
3829 ;; in a register which somehow magically is a "const_int_operand".
3830 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3833 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3834 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3835 (match_operand:SI 4 "shadd_operand" ""))
3836 (match_operand:SI 1 "register_operand" "r,r"))
3837 (match_operand:SI 3 "const_int_operand" "r,J")))]
3838 "reload_in_progress"
3840 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3841 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3842 [(set_attr "type" "multi")
3843 (set_attr "length" "8")])
3845 ;; This anonymous pattern and splitter wins because it reduces the latency
3846 ;; of the shadd sequence without increasing the latency of the shift.
3848 ;; We want to make sure and split up the operations for the scheduler since
3849 ;; these instructions can (and should) schedule independently.
3851 ;; It would be clearer if combine used the same operator for both expressions,
3852 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3855 ;; If this pattern is not split before register allocation, then we must expose
3856 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3858 [(set (match_operand:SI 0 "register_operand" "=r")
3859 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3860 (match_operand:SI 3 "shadd_operand" ""))
3861 (match_operand:SI 1 "register_operand" "r")))
3862 (set (match_operand:SI 4 "register_operand" "=&r")
3863 (ashift:SI (match_dup 2)
3864 (match_operand:SI 5 "const_int_operand" "i")))]
3865 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3867 [(set_attr "type" "binary")
3868 (set_attr "length" "8")])
3871 [(set (match_operand:SI 0 "register_operand" "=r")
3872 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3873 (match_operand:SI 3 "shadd_operand" ""))
3874 (match_operand:SI 1 "register_operand" "r")))
3875 (set (match_operand:SI 4 "register_operand" "=&r")
3876 (ashift:SI (match_dup 2)
3877 (match_operand:SI 5 "const_int_operand" "i")))]
3878 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3879 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3880 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3884 (define_expand "ashlsi3"
3885 [(set (match_operand:SI 0 "register_operand" "")
3886 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3887 (match_operand:SI 2 "arith32_operand" "")))]
3891 if (GET_CODE (operands[2]) != CONST_INT)
3893 rtx temp = gen_reg_rtx (SImode);
3894 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3895 if (GET_CODE (operands[1]) == CONST_INT)
3896 emit_insn (gen_zvdep_imm32 (operands[0], operands[1], temp));
3898 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3901 /* Make sure both inputs are not constants,
3902 there are no patterns for that. */
3903 operands[1] = force_reg (SImode, operands[1]);
3907 [(set (match_operand:SI 0 "register_operand" "=r")
3908 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3909 (match_operand:SI 2 "const_int_operand" "n")))]
3911 "zdep %1,%P2,%L2,%0"
3912 [(set_attr "type" "shift")
3913 (set_attr "length" "4")])
3915 ; Match cases of op1 a CONST_INT here that zvdep_imm32 doesn't handle.
3916 ; Doing it like this makes slightly better code since reload can
3917 ; replace a register with a known value in range -16..15 with a
3918 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm32,
3919 ; but since we have no more CONST_OK... characters, that is not
3921 (define_insn "zvdep32"
3922 [(set (match_operand:SI 0 "register_operand" "=r,r")
3923 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3924 (minus:SI (const_int 31)
3925 (match_operand:SI 2 "register_operand" "q,q"))))]
3930 [(set_attr "type" "shift,shift")
3931 (set_attr "length" "4,4")])
3933 (define_insn "zvdep_imm32"
3934 [(set (match_operand:SI 0 "register_operand" "=r")
3935 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3936 (minus:SI (const_int 31)
3937 (match_operand:SI 2 "register_operand" "q"))))]
3941 int x = INTVAL (operands[1]);
3942 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3943 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3944 return \"zvdepi %1,%2,%0\";
3946 [(set_attr "type" "shift")
3947 (set_attr "length" "4")])
3949 (define_insn "vdepi_ior"
3950 [(set (match_operand:SI 0 "register_operand" "=r")
3951 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3952 (minus:SI (const_int 31)
3953 (match_operand:SI 2 "register_operand" "q")))
3954 (match_operand:SI 3 "register_operand" "0")))]
3955 ; accept ...0001...1, can this be generalized?
3956 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3959 int x = INTVAL (operands[1]);
3960 operands[2] = GEN_INT (exact_log2 (x + 1));
3961 return \"vdepi -1,%2,%0\";
3963 [(set_attr "type" "shift")
3964 (set_attr "length" "4")])
3966 (define_insn "vdepi_and"
3967 [(set (match_operand:SI 0 "register_operand" "=r")
3968 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3969 (minus:SI (const_int 31)
3970 (match_operand:SI 2 "register_operand" "q")))
3971 (match_operand:SI 3 "register_operand" "0")))]
3972 ; this can be generalized...!
3973 "INTVAL (operands[1]) == -2"
3976 int x = INTVAL (operands[1]);
3977 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3978 return \"vdepi 0,%2,%0\";
3980 [(set_attr "type" "shift")
3981 (set_attr "length" "4")])
3983 (define_expand "ashrsi3"
3984 [(set (match_operand:SI 0 "register_operand" "")
3985 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3986 (match_operand:SI 2 "arith32_operand" "")))]
3990 if (GET_CODE (operands[2]) != CONST_INT)
3992 rtx temp = gen_reg_rtx (SImode);
3993 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3994 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
4000 [(set (match_operand:SI 0 "register_operand" "=r")
4001 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4002 (match_operand:SI 2 "const_int_operand" "n")))]
4004 "extrs %1,%P2,%L2,%0"
4005 [(set_attr "type" "shift")
4006 (set_attr "length" "4")])
4008 (define_insn "vextrs32"
4009 [(set (match_operand:SI 0 "register_operand" "=r")
4010 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4011 (minus:SI (const_int 31)
4012 (match_operand:SI 2 "register_operand" "q"))))]
4015 [(set_attr "type" "shift")
4016 (set_attr "length" "4")])
4018 (define_insn "lshrsi3"
4019 [(set (match_operand:SI 0 "register_operand" "=r,r")
4020 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
4021 (match_operand:SI 2 "arith32_operand" "q,n")))]
4025 extru %1,%P2,%L2,%0"
4026 [(set_attr "type" "shift")
4027 (set_attr "length" "4")])
4029 (define_insn "rotrsi3"
4030 [(set (match_operand:SI 0 "register_operand" "=r,r")
4031 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
4032 (match_operand:SI 2 "arith32_operand" "q,n")))]
4036 if (GET_CODE (operands[2]) == CONST_INT)
4038 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
4039 return \"shd %1,%1,%2,%0\";
4042 return \"vshd %1,%1,%0\";
4044 [(set_attr "type" "shift")
4045 (set_attr "length" "4")])
4047 (define_expand "rotlsi3"
4048 [(set (match_operand:SI 0 "register_operand" "")
4049 (rotate:SI (match_operand:SI 1 "register_operand" "")
4050 (match_operand:SI 2 "arith32_operand" "")))]
4054 if (GET_CODE (operands[2]) != CONST_INT)
4056 rtx temp = gen_reg_rtx (SImode);
4057 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
4058 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
4061 /* Else expand normally. */
4065 [(set (match_operand:SI 0 "register_operand" "=r")
4066 (rotate:SI (match_operand:SI 1 "register_operand" "r")
4067 (match_operand:SI 2 "const_int_operand" "n")))]
4071 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
4072 return \"shd %1,%1,%2,%0\";
4074 [(set_attr "type" "shift")
4075 (set_attr "length" "4")])
4078 [(set (match_operand:SI 0 "register_operand" "=r")
4079 (match_operator:SI 5 "plus_xor_ior_operator"
4080 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
4081 (match_operand:SI 3 "const_int_operand" "n"))
4082 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4083 (match_operand:SI 4 "const_int_operand" "n"))]))]
4084 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4086 [(set_attr "type" "shift")
4087 (set_attr "length" "4")])
4090 [(set (match_operand:SI 0 "register_operand" "=r")
4091 (match_operator:SI 5 "plus_xor_ior_operator"
4092 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4093 (match_operand:SI 4 "const_int_operand" "n"))
4094 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4095 (match_operand:SI 3 "const_int_operand" "n"))]))]
4096 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4098 [(set_attr "type" "shift")
4099 (set_attr "length" "4")])
4102 [(set (match_operand:SI 0 "register_operand" "=r")
4103 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
4104 (match_operand:SI 2 "const_int_operand" ""))
4105 (match_operand:SI 3 "const_int_operand" "")))]
4106 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
4109 int cnt = INTVAL (operands[2]) & 31;
4110 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
4111 operands[2] = GEN_INT (31 - cnt);
4112 return \"zdep %1,%2,%3,%0\";
4114 [(set_attr "type" "shift")
4115 (set_attr "length" "4")])
4117 ;; Unconditional and other jump instructions.
4119 (define_insn "return"
4121 "hppa_can_use_return_insn_p ()"
4123 [(set_attr "type" "branch")
4124 (set_attr "length" "4")])
4126 ;; Use a different pattern for functions which have non-trivial
4127 ;; epilogues so as not to confuse jump and reorg.
4128 (define_insn "return_internal"
4133 [(set_attr "type" "branch")
4134 (set_attr "length" "4")])
4136 (define_expand "prologue"
4139 "hppa_expand_prologue ();DONE;")
4141 (define_expand "epilogue"
4146 /* Try to use the trivial return first. Else use the full
4148 if (hppa_can_use_return_insn_p ())
4149 emit_jump_insn (gen_return ());
4152 hppa_expand_epilogue ();
4153 emit_jump_insn (gen_return_internal ());
4158 ;; Special because we use the value placed in %r2 by the bl instruction
4159 ;; from within its delay slot to set the value for the 2nd parameter to
4161 (define_insn "call_profiler"
4162 [(unspec_volatile [(const_int 0)] 0)
4163 (use (match_operand:SI 0 "const_int_operand" ""))]
4165 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4166 [(set_attr "type" "multi")
4167 (set_attr "length" "8")])
4169 (define_insn "blockage"
4170 [(unspec_volatile [(const_int 2)] 0)]
4173 [(set_attr "length" "0")])
4176 [(set (pc) (label_ref (match_operand 0 "" "")))]
4180 extern int optimize;
4182 if (GET_MODE (insn) == SImode)
4185 /* An unconditional branch which can reach its target. */
4186 if (get_attr_length (insn) != 24
4187 && get_attr_length (insn) != 16)
4190 /* An unconditional branch which can not reach its target.
4192 We need to be able to use %r1 as a scratch register; however,
4193 we can never be sure whether or not it's got a live value in
4194 it. Therefore, we must restore its original value after the
4197 To make matters worse, we don't have a stack slot which we
4198 can always clobber. sp-12/sp-16 shouldn't ever have a live
4199 value during a non-optimizing compilation, so we use those
4200 slots for now. We don't support very long branches when
4201 optimizing -- they should be quite rare when optimizing.
4203 Really the way to go long term is a register scavenger; goto
4204 the target of the jump and find a register which we can use
4205 as a scratch to hold the value in %r1. */
4207 /* We don't know how to register scavenge yet. */
4211 /* First store %r1 into the stack. */
4212 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4214 /* Now load the target address into %r1 and do an indirect jump
4215 to the value specified in %r1. Be careful to generate PIC
4220 xoperands[0] = operands[0];
4221 xoperands[1] = gen_label_rtx ();
4223 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4224 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4225 CODE_LABEL_NUMBER (xoperands[1]));
4226 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4230 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4232 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4233 so we know nothing else can be in the delay slot. */
4234 return \"ldw -16(%%r30),%%r1\";
4236 [(set_attr "type" "uncond_branch")
4237 (set_attr "pa_combine_type" "uncond_branch")
4238 (set (attr "length")
4239 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4240 (if_then_else (lt (abs (minus (match_dup 0)
4241 (plus (pc) (const_int 8))))
4245 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4247 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4252 ;; Subroutines of "casesi".
4253 ;; operand 0 is index
4254 ;; operand 1 is the minimum bound
4255 ;; operand 2 is the maximum bound - minimum bound + 1
4256 ;; operand 3 is CODE_LABEL for the table;
4257 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4259 (define_expand "casesi"
4260 [(match_operand:SI 0 "general_operand" "")
4261 (match_operand:SI 1 "const_int_operand" "")
4262 (match_operand:SI 2 "const_int_operand" "")
4263 (match_operand 3 "" "")
4264 (match_operand 4 "" "")]
4268 if (GET_CODE (operands[0]) != REG)
4269 operands[0] = force_reg (SImode, operands[0]);
4271 if (operands[1] != const0_rtx)
4273 rtx reg = gen_reg_rtx (SImode);
4275 operands[1] = GEN_INT (-INTVAL (operands[1]));
4276 if (!INT_14_BITS (operands[1]))
4277 operands[1] = force_reg (SImode, operands[1]);
4278 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4283 if (!INT_5_BITS (operands[2]))
4284 operands[2] = force_reg (SImode, operands[2]);
4286 emit_insn (gen_cmpsi (operands[0], operands[2]));
4287 emit_jump_insn (gen_bgtu (operands[4]));
4288 if (TARGET_BIG_SWITCH)
4290 rtx temp = gen_reg_rtx (SImode);
4291 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4294 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4298 (define_insn "casesi0"
4300 (mem:SI (plus:SI (pc)
4301 (match_operand:SI 0 "register_operand" "r")))
4302 (label_ref (match_operand 1 "" ""))))]
4305 [(set_attr "type" "multi")
4306 (set_attr "length" "8")])
4308 ;; Need nops for the calls because execution is supposed to continue
4309 ;; past; we don't want to nullify an instruction that we need.
4310 ;;- jump to subroutine
4312 (define_expand "call"
4313 [(parallel [(call (match_operand:SI 0 "" "")
4314 (match_operand 1 "" ""))
4315 (clobber (reg:SI 2))])]
4322 if (TARGET_PORTABLE_RUNTIME)
4323 op = force_reg (SImode, XEXP (operands[0], 0));
4325 op = XEXP (operands[0], 0);
4327 /* Use two different patterns for calls to explicitly named functions
4328 and calls through function pointers. This is necessary as these two
4329 types of calls use different calling conventions, and CSE might try
4330 to change the named call into an indirect call in some cases (using
4331 two patterns keeps CSE from performing this optimization). */
4332 if (GET_CODE (op) == SYMBOL_REF)
4333 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4336 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4337 emit_move_insn (tmpreg, force_reg (word_mode, op));
4338 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4343 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4345 /* After each call we must restore the PIC register, even if it
4346 doesn't appear to be used.
4348 This will set regs_ever_live for the callee saved register we
4349 stored the PIC register in. */
4350 emit_move_insn (pic_offset_table_rtx,
4351 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4352 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4354 /* Gross. We have to keep the scheduler from moving the restore
4355 of the PIC register away from the call. SCHED_GROUP_P is
4356 supposed to do this, but for some reason the compiler will
4357 go into an infinite loop when we use that.
4359 This method (blockage insn) may make worse code (then again
4360 it may not since calls are nearly blockages anyway), but at
4361 least it should work. */
4362 emit_insn (gen_blockage ());
4367 (define_insn "call_internal_symref"
4368 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4369 (match_operand 1 "" "i"))
4370 (clobber (reg:SI 2))
4371 (use (const_int 0))]
4372 "! TARGET_PORTABLE_RUNTIME"
4375 output_arg_descriptor (insn);
4376 return output_call (insn, operands[0]);
4378 [(set_attr "type" "call")
4379 (set (attr "length")
4380 ;; If we're sure that we can either reach the target or that the
4381 ;; linker can use a long-branch stub, then the length is 4 bytes.
4383 ;; For long-calls the length will be either 52 bytes (non-pic)
4384 ;; or 68 bytes (pic). */
4385 ;; Else we have to use a long-call;
4386 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4389 (if_then_else (eq (symbol_ref "flag_pic")
4394 (define_insn "call_internal_reg"
4395 [(call (mem:SI (reg:SI 22))
4396 (match_operand 0 "" "i"))
4397 (clobber (reg:SI 2))
4398 (use (const_int 1))]
4404 /* First the special case for kernels, level 0 systems, etc. */
4405 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4406 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4408 /* Now the normal case -- we can reach $$dyncall directly or
4409 we're sure that we can get there via a long-branch stub.
4411 No need to check target flags as the length uniquely identifies
4412 the remaining cases. */
4413 if (get_attr_length (insn) == 8)
4414 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4416 /* Long millicode call, but we are not generating PIC or portable runtime
4418 if (get_attr_length (insn) == 12)
4419 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4421 /* Long millicode call for portable runtime. */
4422 if (get_attr_length (insn) == 20)
4423 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4425 /* If we're generating PIC code. */
4426 xoperands[0] = operands[0];
4427 xoperands[1] = gen_label_rtx ();
4428 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4429 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4430 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4431 CODE_LABEL_NUMBER (xoperands[1]));
4432 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4433 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4434 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4437 [(set_attr "type" "dyncall")
4438 (set (attr "length")
4440 ;; First NO_SPACE_REGS
4441 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4445 ;; Target (or stub) within reach
4446 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4448 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4452 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4453 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4455 (eq (symbol_ref "flag_pic")
4459 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4463 ;; Out of range PIC case
4466 (define_expand "call_value"
4467 [(parallel [(set (match_operand 0 "" "")
4468 (call (match_operand:SI 1 "" "")
4469 (match_operand 2 "" "")))
4470 (clobber (reg:SI 2))])]
4477 if (TARGET_PORTABLE_RUNTIME)
4478 op = force_reg (word_mode, XEXP (operands[1], 0));
4480 op = XEXP (operands[1], 0);
4482 /* Use two different patterns for calls to explicitly named functions
4483 and calls through function pointers. This is necessary as these two
4484 types of calls use different calling conventions, and CSE might try
4485 to change the named call into an indirect call in some cases (using
4486 two patterns keeps CSE from performing this optimization). */
4487 if (GET_CODE (op) == SYMBOL_REF)
4488 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4493 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4494 emit_move_insn (tmpreg, force_reg (word_mode, op));
4495 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4500 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4502 /* After each call we must restore the PIC register, even if it
4503 doesn't appear to be used.
4505 This will set regs_ever_live for the callee saved register we
4506 stored the PIC register in. */
4507 emit_move_insn (pic_offset_table_rtx,
4508 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4509 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4511 /* Gross. We have to keep the scheduler from moving the restore
4512 of the PIC register away from the call. SCHED_GROUP_P is
4513 supposed to do this, but for some reason the compiler will
4514 go into an infinite loop when we use that.
4516 This method (blockage insn) may make worse code (then again
4517 it may not since calls are nearly blockages anyway), but at
4518 least it should work. */
4519 emit_insn (gen_blockage ());
4524 (define_insn "call_value_internal_symref"
4525 [(set (match_operand 0 "" "=rf")
4526 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4527 (match_operand 2 "" "i")))
4528 (clobber (reg:SI 2))
4529 (use (const_int 0))]
4530 ;;- Don't use operand 1 for most machines.
4531 "! TARGET_PORTABLE_RUNTIME"
4534 output_arg_descriptor (insn);
4535 return output_call (insn, operands[1]);
4537 [(set_attr "type" "call")
4538 (set (attr "length")
4539 ;; If we're sure that we can either reach the target or that the
4540 ;; linker can use a long-branch stub, then the length is 4 bytes.
4542 ;; For long-calls the length will be either 52 bytes (non-pic)
4543 ;; or 68 bytes (pic). */
4544 ;; Else we have to use a long-call;
4545 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4548 (if_then_else (eq (symbol_ref "flag_pic")
4553 (define_insn "call_value_internal_reg"
4554 [(set (match_operand 0 "" "=rf")
4555 (call (mem:SI (reg:SI 22))
4556 (match_operand 1 "" "i")))
4557 (clobber (reg:SI 2))
4558 (use (const_int 1))]
4564 /* First the special case for kernels, level 0 systems, etc. */
4565 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4566 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4568 /* Now the normal case -- we can reach $$dyncall directly or
4569 we're sure that we can get there via a long-branch stub.
4571 No need to check target flags as the length uniquely identifies
4572 the remaining cases. */
4573 if (get_attr_length (insn) == 8)
4574 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4576 /* Long millicode call, but we are not generating PIC or portable runtime
4578 if (get_attr_length (insn) == 12)
4579 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4581 /* Long millicode call for portable runtime. */
4582 if (get_attr_length (insn) == 20)
4583 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4585 /* If we're generating PIC code. */
4586 xoperands[0] = operands[1];
4587 xoperands[1] = gen_label_rtx ();
4588 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4589 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4590 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4591 CODE_LABEL_NUMBER (xoperands[1]));
4592 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4593 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4594 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4597 [(set_attr "type" "dyncall")
4598 (set (attr "length")
4600 ;; First NO_SPACE_REGS
4601 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4605 ;; Target (or stub) within reach
4606 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4608 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4612 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4613 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4615 (eq (symbol_ref "flag_pic")
4619 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4623 ;; Out of range PIC case
4626 ;; Call subroutine returning any type.
4628 (define_expand "untyped_call"
4629 [(parallel [(call (match_operand 0 "" "")
4631 (match_operand 1 "" "")
4632 (match_operand 2 "" "")])]
4638 emit_call_insn (gen_call (operands[0], const0_rtx));
4640 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4642 rtx set = XVECEXP (operands[2], 0, i);
4643 emit_move_insn (SET_DEST (set), SET_SRC (set));
4646 /* The optimizer does not know that the call sets the function value
4647 registers we stored in the result block. We avoid problems by
4648 claiming that all hard registers are used and clobbered at this
4650 emit_insn (gen_blockage ());
4658 [(set_attr "type" "move")
4659 (set_attr "length" "4")])
4661 ;; These are just placeholders so we know where branch tables
4663 (define_insn "begin_brtab"
4668 /* Only GAS actually supports this pseudo-op. */
4670 return \".begin_brtab\";
4674 [(set_attr "type" "move")
4675 (set_attr "length" "0")])
4677 (define_insn "end_brtab"
4682 /* Only GAS actually supports this pseudo-op. */
4684 return \".end_brtab\";
4688 [(set_attr "type" "move")
4689 (set_attr "length" "0")])
4691 ;;; Hope this is only within a function...
4692 (define_insn "indirect_jump"
4693 [(set (pc) (match_operand 0 "register_operand" "r"))]
4694 "GET_MODE (operands[0]) == word_mode"
4696 [(set_attr "type" "branch")
4697 (set_attr "length" "4")])
4699 ;;; EH does longjmp's from and within the data section. Thus,
4700 ;;; an interspace branch is required for the longjmp implementation.
4701 ;;; Registers r1 and r2 are not saved in the jmpbuf environment.
4702 ;;; Thus, they can be used as scratch registers for the jump.
4703 (define_insn "interspace_jump"
4704 [(set (pc) (match_operand:SI 0 "register_operand" "a"))
4705 (clobber (reg:SI 2))]
4707 "ldsid (%%sr0,%0),%%r2\; mtsp %%r2,%%sr0\; be%* 0(%%sr0,%0)"
4708 [(set_attr "type" "branch")
4709 (set_attr "length" "12")])
4711 (define_expand "builtin_longjmp"
4712 [(unspec_volatile [(match_operand 0 "register_operand" "r")] 3)]
4716 /* The elements of the buffer are, in order: */
4717 rtx fp = gen_rtx_MEM (Pmode, operands[0]);
4718 rtx lab = gen_rtx_MEM (Pmode, plus_constant (operands[0], 4));
4719 rtx stack = gen_rtx_MEM (Pmode, plus_constant (operands[0], 8));
4720 rtx pv = gen_rtx_REG (Pmode, 1);
4722 /* This bit is the same as expand_builtin_longjmp. */
4723 emit_move_insn (hard_frame_pointer_rtx, fp);
4724 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
4725 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
4726 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
4728 /* Load the label we are jumping through into r1 so that we know
4729 where to look for it when we get back to setjmp's function for
4730 restoring the gp. */
4731 emit_move_insn (pv, lab);
4732 emit_jump_insn (gen_interspace_jump (pv));
4737 (define_insn "extzv"
4738 [(set (match_operand:SI 0 "register_operand" "=r")
4739 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4740 (match_operand:SI 2 "uint5_operand" "")
4741 (match_operand:SI 3 "uint5_operand" "")))]
4743 "extru %1,%3+%2-1,%2,%0"
4744 [(set_attr "type" "shift")
4745 (set_attr "length" "4")])
4748 [(set (match_operand:SI 0 "register_operand" "=r")
4749 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4751 (match_operand:SI 3 "register_operand" "q")))]
4754 [(set_attr "type" "shift")
4755 (set_attr "length" "4")])
4758 [(set (match_operand:SI 0 "register_operand" "=r")
4759 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4760 (match_operand:SI 2 "uint5_operand" "")
4761 (match_operand:SI 3 "uint5_operand" "")))]
4763 "extrs %1,%3+%2-1,%2,%0"
4764 [(set_attr "type" "shift")
4765 (set_attr "length" "4")])
4768 [(set (match_operand:SI 0 "register_operand" "=r")
4769 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4771 (match_operand:SI 3 "register_operand" "q")))]
4774 [(set_attr "type" "shift")
4775 (set_attr "length" "4")])
4778 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4779 (match_operand:SI 1 "uint5_operand" "")
4780 (match_operand:SI 2 "uint5_operand" ""))
4781 (match_operand:SI 3 "arith5_operand" "r,L"))]
4784 dep %3,%2+%1-1,%1,%0
4785 depi %3,%2+%1-1,%1,%0"
4786 [(set_attr "type" "shift,shift")
4787 (set_attr "length" "4,4")])
4789 ;; Optimize insertion of const_int values of type 1...1xxxx.
4791 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4792 (match_operand:SI 1 "uint5_operand" "")
4793 (match_operand:SI 2 "uint5_operand" ""))
4794 (match_operand:SI 3 "const_int_operand" ""))]
4795 "(INTVAL (operands[3]) & 0x10) != 0 &&
4796 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4799 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4800 return \"depi %3,%2+%1-1,%1,%0\";
4802 [(set_attr "type" "shift")
4803 (set_attr "length" "4")])
4805 ;; This insn is used for some loop tests, typically loops reversed when
4806 ;; strength reduction is used. It is actually created when the instruction
4807 ;; combination phase combines the special loop test. Since this insn
4808 ;; is both a jump insn and has an output, it must deal with its own
4809 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4810 ;; to not choose the register alternatives in the event a reload is needed.
4811 (define_insn "decrement_and_branch_until_zero"
4814 (match_operator 2 "comparison_operator"
4815 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4816 (match_operand:SI 1 "int5_operand" "L,L,L"))
4818 (label_ref (match_operand 3 "" ""))
4821 (plus:SI (match_dup 0) (match_dup 1)))
4822 (clobber (match_scratch:SI 4 "=X,r,r"))]
4824 "* return output_dbra (operands, insn, which_alternative); "
4825 ;; Do not expect to understand this the first time through.
4826 [(set_attr "type" "cbranch,multi,multi")
4827 (set (attr "length")
4828 (if_then_else (eq_attr "alternative" "0")
4829 ;; Loop counter in register case
4830 ;; Short branch has length of 4
4831 ;; Long branch has length of 8
4832 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4837 ;; Loop counter in FP reg case.
4838 ;; Extra goo to deal with additional reload insns.
4839 (if_then_else (eq_attr "alternative" "1")
4840 (if_then_else (lt (match_dup 3) (pc))
4842 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4847 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4851 ;; Loop counter in memory case.
4852 ;; Extra goo to deal with additional reload insns.
4853 (if_then_else (lt (match_dup 3) (pc))
4855 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4860 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4863 (const_int 16))))))])
4868 (match_operator 2 "movb_comparison_operator"
4869 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4870 (label_ref (match_operand 3 "" ""))
4872 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4875 "* return output_movb (operands, insn, which_alternative, 0); "
4876 ;; Do not expect to understand this the first time through.
4877 [(set_attr "type" "cbranch,multi,multi,multi")
4878 (set (attr "length")
4879 (if_then_else (eq_attr "alternative" "0")
4880 ;; Loop counter in register case
4881 ;; Short branch has length of 4
4882 ;; Long branch has length of 8
4883 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4888 ;; Loop counter in FP reg case.
4889 ;; Extra goo to deal with additional reload insns.
4890 (if_then_else (eq_attr "alternative" "1")
4891 (if_then_else (lt (match_dup 3) (pc))
4893 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4898 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4902 ;; Loop counter in memory or sar case.
4903 ;; Extra goo to deal with additional reload insns.
4905 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4908 (const_int 12)))))])
4910 ;; Handle negated branch.
4914 (match_operator 2 "movb_comparison_operator"
4915 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4917 (label_ref (match_operand 3 "" ""))))
4918 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4921 "* return output_movb (operands, insn, which_alternative, 1); "
4922 ;; Do not expect to understand this the first time through.
4923 [(set_attr "type" "cbranch,multi,multi,multi")
4924 (set (attr "length")
4925 (if_then_else (eq_attr "alternative" "0")
4926 ;; Loop counter in register case
4927 ;; Short branch has length of 4
4928 ;; Long branch has length of 8
4929 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4934 ;; Loop counter in FP reg case.
4935 ;; Extra goo to deal with additional reload insns.
4936 (if_then_else (eq_attr "alternative" "1")
4937 (if_then_else (lt (match_dup 3) (pc))
4939 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4944 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4948 ;; Loop counter in memory or SAR case.
4949 ;; Extra goo to deal with additional reload insns.
4951 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4954 (const_int 12)))))])
4956 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4957 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4959 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4962 [(set (pc) (label_ref (match_operand 3 "" "" )))
4963 (set (match_operand:SI 0 "register_operand" "=r")
4964 (plus:SI (match_operand:SI 1 "register_operand" "r")
4965 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4966 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4969 return output_parallel_addb (operands, get_attr_length (insn));
4971 [(set_attr "type" "parallel_branch")
4972 (set (attr "length")
4973 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4979 [(set (pc) (label_ref (match_operand 2 "" "" )))
4980 (set (match_operand:SF 0 "register_operand" "=r")
4981 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4985 return output_parallel_movb (operands, get_attr_length (insn));
4987 [(set_attr "type" "parallel_branch")
4988 (set (attr "length")
4989 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4995 [(set (pc) (label_ref (match_operand 2 "" "" )))
4996 (set (match_operand:SI 0 "register_operand" "=r")
4997 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
5001 return output_parallel_movb (operands, get_attr_length (insn));
5003 [(set_attr "type" "parallel_branch")
5004 (set (attr "length")
5005 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5011 [(set (pc) (label_ref (match_operand 2 "" "" )))
5012 (set (match_operand:HI 0 "register_operand" "=r")
5013 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
5017 return output_parallel_movb (operands, get_attr_length (insn));
5019 [(set_attr "type" "parallel_branch")
5020 (set (attr "length")
5021 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5027 [(set (pc) (label_ref (match_operand 2 "" "" )))
5028 (set (match_operand:QI 0 "register_operand" "=r")
5029 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
5033 return output_parallel_movb (operands, get_attr_length (insn));
5035 [(set_attr "type" "parallel_branch")
5036 (set (attr "length")
5037 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5043 [(set (match_operand 0 "register_operand" "=f")
5044 (mult (match_operand 1 "register_operand" "f")
5045 (match_operand 2 "register_operand" "f")))
5046 (set (match_operand 3 "register_operand" "+f")
5047 (plus (match_operand 4 "register_operand" "f")
5048 (match_operand 5 "register_operand" "f")))]
5049 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5050 && reload_completed && fmpyaddoperands (operands)"
5053 if (GET_MODE (operands[0]) == DFmode)
5055 if (rtx_equal_p (operands[3], operands[5]))
5056 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5058 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5062 if (rtx_equal_p (operands[3], operands[5]))
5063 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5065 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5068 [(set_attr "type" "fpalu")
5069 (set_attr "length" "4")])
5072 [(set (match_operand 3 "register_operand" "+f")
5073 (plus (match_operand 4 "register_operand" "f")
5074 (match_operand 5 "register_operand" "f")))
5075 (set (match_operand 0 "register_operand" "=f")
5076 (mult (match_operand 1 "register_operand" "f")
5077 (match_operand 2 "register_operand" "f")))]
5078 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5079 && reload_completed && fmpyaddoperands (operands)"
5082 if (GET_MODE (operands[0]) == DFmode)
5084 if (rtx_equal_p (operands[3], operands[5]))
5085 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5087 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5091 if (rtx_equal_p (operands[3], operands[5]))
5092 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5094 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5097 [(set_attr "type" "fpalu")
5098 (set_attr "length" "4")])
5101 [(set (match_operand 0 "register_operand" "=f")
5102 (mult (match_operand 1 "register_operand" "f")
5103 (match_operand 2 "register_operand" "f")))
5104 (set (match_operand 3 "register_operand" "+f")
5105 (minus (match_operand 4 "register_operand" "f")
5106 (match_operand 5 "register_operand" "f")))]
5107 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5108 && reload_completed && fmpysuboperands (operands)"
5111 if (GET_MODE (operands[0]) == DFmode)
5112 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5114 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5116 [(set_attr "type" "fpalu")
5117 (set_attr "length" "4")])
5120 [(set (match_operand 3 "register_operand" "+f")
5121 (minus (match_operand 4 "register_operand" "f")
5122 (match_operand 5 "register_operand" "f")))
5123 (set (match_operand 0 "register_operand" "=f")
5124 (mult (match_operand 1 "register_operand" "f")
5125 (match_operand 2 "register_operand" "f")))]
5126 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5127 && reload_completed && fmpysuboperands (operands)"
5130 if (GET_MODE (operands[0]) == DFmode)
5131 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5133 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5135 [(set_attr "type" "fpalu")
5136 (set_attr "length" "4")])
5138 ;; Clean up turds left by reload.
5140 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
5141 (match_operand 1 "register_operand" "fr"))
5142 (set (match_operand 2 "register_operand" "fr")
5144 "! TARGET_SOFT_FLOAT
5145 && GET_CODE (operands[0]) == MEM
5146 && ! MEM_VOLATILE_P (operands[0])
5147 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5148 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5149 && GET_MODE (operands[0]) == DFmode
5150 && GET_CODE (operands[1]) == REG
5151 && GET_CODE (operands[2]) == REG
5152 && ! side_effects_p (XEXP (operands[0], 0))
5153 && REGNO_REG_CLASS (REGNO (operands[1]))
5154 == REGNO_REG_CLASS (REGNO (operands[2]))"
5159 if (FP_REG_P (operands[1]))
5160 output_asm_insn (output_fp_move_double (operands), operands);
5162 output_asm_insn (output_move_double (operands), operands);
5164 if (rtx_equal_p (operands[1], operands[2]))
5167 xoperands[0] = operands[2];
5168 xoperands[1] = operands[1];
5170 if (FP_REG_P (xoperands[1]))
5171 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5173 output_asm_insn (output_move_double (xoperands), xoperands);
5179 [(set (match_operand 0 "register_operand" "fr")
5180 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
5181 (set (match_operand 2 "register_operand" "fr")
5183 "! TARGET_SOFT_FLOAT
5184 && GET_CODE (operands[1]) == MEM
5185 && ! MEM_VOLATILE_P (operands[1])
5186 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5187 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5188 && GET_MODE (operands[0]) == DFmode
5189 && GET_CODE (operands[0]) == REG
5190 && GET_CODE (operands[2]) == REG
5191 && ! side_effects_p (XEXP (operands[1], 0))
5192 && REGNO_REG_CLASS (REGNO (operands[0]))
5193 == REGNO_REG_CLASS (REGNO (operands[2]))"
5198 if (FP_REG_P (operands[0]))
5199 output_asm_insn (output_fp_move_double (operands), operands);
5201 output_asm_insn (output_move_double (operands), operands);
5203 xoperands[0] = operands[2];
5204 xoperands[1] = operands[0];
5206 if (FP_REG_P (xoperands[1]))
5207 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5209 output_asm_insn (output_move_double (xoperands), xoperands);
5214 ;; Flush the I and D cache line found at the address in operand 0.
5215 ;; This is used by the trampoline code for nested functions.
5216 ;; So long as the trampoline itself is less than 32 bytes this
5219 (define_insn "dcacheflush"
5220 [(unspec_volatile [(const_int 1)] 0)
5221 (use (mem:SI (match_operand 0 "register_operand" "r")))
5222 (use (mem:SI (match_operand 1 "register_operand" "r")))]
5224 "fdc 0(%0)\;fdc 0(%1)\;sync"
5225 [(set_attr "type" "multi")
5226 (set_attr "length" "12")])
5228 (define_insn "icacheflush"
5229 [(unspec_volatile [(const_int 2)] 0)
5230 (use (mem:SI (match_operand 0 "register_operand" "r")))
5231 (use (mem:SI (match_operand 1 "register_operand" "r")))
5232 (use (match_operand 2 "register_operand" "r"))
5233 (clobber (match_operand 3 "register_operand" "=&r"))
5234 (clobber (match_operand 4 "register_operand" "=&r"))]
5236 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5237 [(set_attr "type" "multi")
5238 (set_attr "length" "52")])
5240 ;; An out-of-line prologue.
5241 (define_insn "outline_prologue_call"
5242 [(unspec_volatile [(const_int 0)] 0)
5243 (clobber (reg:SI 31))
5244 (clobber (reg:SI 22))
5245 (clobber (reg:SI 21))
5246 (clobber (reg:SI 20))
5247 (clobber (reg:SI 19))
5248 (clobber (reg:SI 1))]
5252 extern int frame_pointer_needed;
5254 /* We need two different versions depending on whether or not we
5255 need a frame pointer. Also note that we return to the instruction
5256 immediately after the branch rather than two instructions after the
5257 break as normally is the case. */
5258 if (frame_pointer_needed)
5260 /* Must import the magic millicode routine(s). */
5261 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5263 if (TARGET_PORTABLE_RUNTIME)
5265 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5266 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5270 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5274 /* Must import the magic millicode routine(s). */
5275 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5277 if (TARGET_PORTABLE_RUNTIME)
5279 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5280 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5283 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5287 [(set_attr "type" "multi")
5288 (set_attr "length" "8")])
5290 ;; An out-of-line epilogue.
5291 (define_insn "outline_epilogue_call"
5292 [(unspec_volatile [(const_int 1)] 0)
5295 (clobber (reg:SI 31))
5296 (clobber (reg:SI 22))
5297 (clobber (reg:SI 21))
5298 (clobber (reg:SI 20))
5299 (clobber (reg:SI 19))
5300 (clobber (reg:SI 2))
5301 (clobber (reg:SI 1))]
5305 extern int frame_pointer_needed;
5307 /* We need two different versions depending on whether or not we
5308 need a frame pointer. Also note that we return to the instruction
5309 immediately after the branch rather than two instructions after the
5310 break as normally is the case. */
5311 if (frame_pointer_needed)
5313 /* Must import the magic millicode routine. */
5314 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5316 /* The out-of-line prologue will make sure we return to the right
5318 if (TARGET_PORTABLE_RUNTIME)
5320 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5321 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5325 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5329 /* Must import the magic millicode routine. */
5330 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5332 /* The out-of-line prologue will make sure we return to the right
5334 if (TARGET_PORTABLE_RUNTIME)
5336 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5337 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5340 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5344 [(set_attr "type" "multi")
5345 (set_attr "length" "8")])
5347 ;; Given a function pointer, canonicalize it so it can be
5348 ;; reliably compared to another function pointer. */
5349 (define_expand "canonicalize_funcptr_for_compare"
5350 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5351 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5352 (clobber (match_dup 2))
5353 (clobber (reg:SI 26))
5354 (clobber (reg:SI 22))
5355 (clobber (reg:SI 31))])
5356 (set (match_operand:SI 0 "register_operand" "")
5358 "! TARGET_PORTABLE_RUNTIME"
5361 operands[2] = gen_reg_rtx (SImode);
5362 if (GET_CODE (operands[1]) != REG)
5364 rtx tmp = gen_reg_rtx (Pmode);
5365 emit_move_insn (tmp, operands[1]);
5371 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5372 (clobber (match_operand:SI 0 "register_operand" "=a"))
5373 (clobber (reg:SI 26))
5374 (clobber (reg:SI 22))
5375 (clobber (reg:SI 31))]
5379 /* Must import the magic millicode routine. */
5380 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5382 /* This is absolutely amazing.
5384 First, copy our input parameter into %r29 just in case we don't
5385 need to call $$sh_func_adrs. */
5386 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5388 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5389 we use %r26 unchanged. */
5390 if (get_attr_length (insn) == 32)
5391 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5392 else if (get_attr_length (insn) == 40)
5393 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5394 else if (get_attr_length (insn) == 44)
5395 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5397 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5399 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5400 4096, then we use %r26 unchanged. */
5401 if (get_attr_length (insn) == 32)
5402 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5403 else if (get_attr_length (insn) == 40)
5404 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5405 else if (get_attr_length (insn) == 44)
5406 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5408 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5410 /* Else call $$sh_func_adrs to extract the function's real add24. */
5411 return output_millicode_call (insn,
5412 gen_rtx_SYMBOL_REF (SImode,
5413 \"$$sh_func_adrs\"));
5415 [(set_attr "type" "multi")
5416 (set (attr "length")
5418 ;; Target (or stub) within reach
5419 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5421 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5426 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5430 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5431 ;; same as NO_SPACE_REGS code
5432 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5434 (eq (symbol_ref "flag_pic")
5439 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5443 ;; Out of range and PIC
5446 ;; On the PA, the PIC register is call clobbered, so it must
5447 ;; be saved & restored around calls by the caller. If the call
5448 ;; doesn't return normally (nonlocal goto, or an exception is
5449 ;; thrown), then the code at the exception handler label must
5450 ;; restore the PIC register.
5451 (define_expand "exception_receiver"
5453 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5456 /* Load the PIC register from the stack slot (in our caller's
5458 emit_move_insn (pic_offset_table_rtx,
5459 gen_rtx_MEM (SImode,
5460 plus_constant (stack_pointer_rtx, -32)));
5461 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
5462 emit_insn (gen_blockage ());