1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of insns).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops actually execute in one of the integer
327 ;; ALUs, but we can't really model that.
328 (define_function_unit "pa7100LCshiftmem" 1 1
330 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
331 (eq_attr "cpu" "7100LC,7200")) 1 1)
333 ;; We have two basic ALUs.
334 (define_function_unit "pa7100LCalu" 2 2
336 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl,load,fpload,store,fpstore,shift,nullshift")
337 (eq_attr "cpu" "7100LC,7200")) 1 1)
339 ;; I don't have complete information on the PA7200; however, most of
340 ;; what I've heard makes it look like a 7100LC without the store-store
341 ;; penalty. So that's how we'll model it.
343 ;; Memory. Disregarding Cache misses, memory loads and stores take
344 ;; two cycles. Any special cases are handled in pa_adjust_cost.
345 (define_function_unit "pa7200memory" 1 0
346 (and (eq_attr "type" "load,fpload,store,fpstore")
347 (eq_attr "cpu" "7200")) 2 0)
349 ;; I don't have detailed information on the PA7200 FP pipeline, so I
350 ;; treat it just like the 7100LC pipeline.
351 ;; Similarly for the multi-issue fake units.
354 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
355 ;; traditional architecture.
357 ;; The PA8000 has a large (56) entry reorder buffer that is split between
358 ;; memory and non-memory operations.
360 ;; The PA800 can issue two memory and two non-memory operations per cycle to
361 ;; the function units. Similarly, the PA8000 can retire two memory and two
362 ;; non-memory operations per cycle.
364 ;; Given the large reorder buffer, the processor can hide most latencies.
365 ;; According to HP, they've got the best results by scheduling for retirement
366 ;; bandwidth with limited latency scheduling for floating point operations.
367 ;; Latency for integer operations and memory references is ignored.
369 ;; We claim floating point operations have a 2 cycle latency and are
370 ;; fully pipelined, except for div and sqrt which are not pipelined.
372 ;; It is not necessary to define the shifter and integer alu units.
374 ;; These first two define_unit_unit descriptions model retirement from
375 ;; the reorder buffer.
376 (define_function_unit "pa8000lsu" 2 1
378 (eq_attr "type" "load,fpload,store,fpstore")
379 (eq_attr "cpu" "8000")) 1 1)
381 (define_function_unit "pa8000alu" 2 1
383 (eq_attr "type" "!load,fpload,store,fpstore")
384 (eq_attr "cpu" "8000")) 1 1)
386 ;; Claim floating point ops have a 2 cycle latency, excluding div and
387 ;; sqrt, which are not pipelined and issue to different units.
388 (define_function_unit "pa8000fmac" 2 0
390 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
391 (eq_attr "cpu" "8000")) 2 1)
393 (define_function_unit "pa8000fdiv" 2 1
395 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
396 (eq_attr "cpu" "8000")) 17 17)
398 (define_function_unit "pa8000fdiv" 2 1
400 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
401 (eq_attr "cpu" "8000")) 31 31)
404 ;; Compare instructions.
405 ;; This controls RTL generation and register allocation.
407 ;; We generate RTL for comparisons and branches by having the cmpxx
408 ;; patterns store away the operands. Then, the scc and bcc patterns
409 ;; emit RTL for both the compare and the branch.
412 (define_expand "cmpsi"
414 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
415 (match_operand:SI 1 "arith5_operand" "")))]
419 hppa_compare_op0 = operands[0];
420 hppa_compare_op1 = operands[1];
421 hppa_branch_type = CMP_SI;
425 (define_expand "cmpsf"
427 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
428 (match_operand:SF 1 "reg_or_0_operand" "")))]
429 "! TARGET_SOFT_FLOAT"
432 hppa_compare_op0 = operands[0];
433 hppa_compare_op1 = operands[1];
434 hppa_branch_type = CMP_SF;
438 (define_expand "cmpdf"
440 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
441 (match_operand:DF 1 "reg_or_0_operand" "")))]
442 "! TARGET_SOFT_FLOAT"
445 hppa_compare_op0 = operands[0];
446 hppa_compare_op1 = operands[1];
447 hppa_branch_type = CMP_DF;
453 (match_operator:CCFP 2 "comparison_operator"
454 [(match_operand:SF 0 "reg_or_0_operand" "fG")
455 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
456 "! TARGET_SOFT_FLOAT"
457 "fcmp,sgl,%Y2 %r0,%r1"
458 [(set_attr "length" "4")
459 (set_attr "type" "fpcc")])
463 (match_operator:CCFP 2 "comparison_operator"
464 [(match_operand:DF 0 "reg_or_0_operand" "fG")
465 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
466 "! TARGET_SOFT_FLOAT"
467 "fcmp,dbl,%Y2 %r0,%r1"
468 [(set_attr "length" "4")
469 (set_attr "type" "fpcc")])
474 [(set (match_operand:SI 0 "register_operand" "")
480 /* fp scc patterns rarely match, and are not a win on the PA. */
481 if (hppa_branch_type != CMP_SI)
483 /* set up operands from compare. */
484 operands[1] = hppa_compare_op0;
485 operands[2] = hppa_compare_op1;
486 /* fall through and generate default code */
490 [(set (match_operand:SI 0 "register_operand" "")
496 /* fp scc patterns rarely match, and are not a win on the PA. */
497 if (hppa_branch_type != CMP_SI)
499 operands[1] = hppa_compare_op0;
500 operands[2] = hppa_compare_op1;
504 [(set (match_operand:SI 0 "register_operand" "")
510 /* fp scc patterns rarely match, and are not a win on the PA. */
511 if (hppa_branch_type != CMP_SI)
513 operands[1] = hppa_compare_op0;
514 operands[2] = hppa_compare_op1;
518 [(set (match_operand:SI 0 "register_operand" "")
524 /* fp scc patterns rarely match, and are not a win on the PA. */
525 if (hppa_branch_type != CMP_SI)
527 operands[1] = hppa_compare_op0;
528 operands[2] = hppa_compare_op1;
532 [(set (match_operand:SI 0 "register_operand" "")
538 /* fp scc patterns rarely match, and are not a win on the PA. */
539 if (hppa_branch_type != CMP_SI)
541 operands[1] = hppa_compare_op0;
542 operands[2] = hppa_compare_op1;
546 [(set (match_operand:SI 0 "register_operand" "")
552 /* fp scc patterns rarely match, and are not a win on the PA. */
553 if (hppa_branch_type != CMP_SI)
555 operands[1] = hppa_compare_op0;
556 operands[2] = hppa_compare_op1;
559 (define_expand "sltu"
560 [(set (match_operand:SI 0 "register_operand" "")
561 (ltu:SI (match_dup 1)
566 if (hppa_branch_type != CMP_SI)
568 operands[1] = hppa_compare_op0;
569 operands[2] = hppa_compare_op1;
572 (define_expand "sgtu"
573 [(set (match_operand:SI 0 "register_operand" "")
574 (gtu:SI (match_dup 1)
579 if (hppa_branch_type != CMP_SI)
581 operands[1] = hppa_compare_op0;
582 operands[2] = hppa_compare_op1;
585 (define_expand "sleu"
586 [(set (match_operand:SI 0 "register_operand" "")
587 (leu:SI (match_dup 1)
592 if (hppa_branch_type != CMP_SI)
594 operands[1] = hppa_compare_op0;
595 operands[2] = hppa_compare_op1;
598 (define_expand "sgeu"
599 [(set (match_operand:SI 0 "register_operand" "")
600 (geu:SI (match_dup 1)
605 if (hppa_branch_type != CMP_SI)
607 operands[1] = hppa_compare_op0;
608 operands[2] = hppa_compare_op1;
611 ;; Instruction canonicalization puts immediate operands second, which
612 ;; is the reverse of what we want.
615 [(set (match_operand:SI 0 "register_operand" "=r")
616 (match_operator:SI 3 "comparison_operator"
617 [(match_operand:SI 1 "register_operand" "r")
618 (match_operand:SI 2 "arith11_operand" "rI")]))]
620 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
621 [(set_attr "type" "binary")
622 (set_attr "length" "8")])
624 (define_insn "iorscc"
625 [(set (match_operand:SI 0 "register_operand" "=r")
626 (ior:SI (match_operator:SI 3 "comparison_operator"
627 [(match_operand:SI 1 "register_operand" "r")
628 (match_operand:SI 2 "arith11_operand" "rI")])
629 (match_operator:SI 6 "comparison_operator"
630 [(match_operand:SI 4 "register_operand" "r")
631 (match_operand:SI 5 "arith11_operand" "rI")])))]
633 "com%I2clr,%S3 %2,%1,0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
634 [(set_attr "type" "binary")
635 (set_attr "length" "12")])
637 ;; Combiner patterns for common operations performed with the output
638 ;; from an scc insn (negscc and incscc).
639 (define_insn "negscc"
640 [(set (match_operand:SI 0 "register_operand" "=r")
641 (neg:SI (match_operator:SI 3 "comparison_operator"
642 [(match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "arith11_operand" "rI")])))]
645 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
646 [(set_attr "type" "binary")
647 (set_attr "length" "8")])
649 ;; Patterns for adding/subtracting the result of a boolean expression from
650 ;; a register. First we have special patterns that make use of the carry
651 ;; bit, and output only two instructions. For the cases we can't in
652 ;; general do in two instructions, the incscc pattern at the end outputs
653 ;; two or three instructions.
656 [(set (match_operand:SI 0 "register_operand" "=r")
657 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
658 (match_operand:SI 3 "arith11_operand" "rI"))
659 (match_operand:SI 1 "register_operand" "r")))]
661 "sub%I3 %3,%2,0\;addc 0,%1,%0"
662 [(set_attr "type" "binary")
663 (set_attr "length" "8")])
665 ; This need only accept registers for op3, since canonicalization
666 ; replaces geu with gtu when op3 is an integer.
668 [(set (match_operand:SI 0 "register_operand" "=r")
669 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
670 (match_operand:SI 3 "register_operand" "r"))
671 (match_operand:SI 1 "register_operand" "r")))]
673 "sub %2,%3,0\;addc 0,%1,%0"
674 [(set_attr "type" "binary")
675 (set_attr "length" "8")])
677 ; Match only integers for op3 here. This is used as canonical form of the
678 ; geu pattern when op3 is an integer. Don't match registers since we can't
679 ; make better code than the general incscc pattern.
681 [(set (match_operand:SI 0 "register_operand" "=r")
682 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
683 (match_operand:SI 3 "int11_operand" "I"))
684 (match_operand:SI 1 "register_operand" "r")))]
686 "addi %k3,%2,0\;addc 0,%1,%0"
687 [(set_attr "type" "binary")
688 (set_attr "length" "8")])
690 (define_insn "incscc"
691 [(set (match_operand:SI 0 "register_operand" "=r,r")
692 (plus:SI (match_operator:SI 4 "comparison_operator"
693 [(match_operand:SI 2 "register_operand" "r,r")
694 (match_operand:SI 3 "arith11_operand" "rI,rI")])
695 (match_operand:SI 1 "register_operand" "0,?r")))]
698 com%I3clr,%B4 %3,%2,0\;addi 1,%0,%0
699 com%I3clr,%B4 %3,%2,0\;addi,tr 1,%1,%0\;copy %1,%0"
700 [(set_attr "type" "binary,binary")
701 (set_attr "length" "8,12")])
704 [(set (match_operand:SI 0 "register_operand" "=r")
705 (minus:SI (match_operand:SI 1 "register_operand" "r")
706 (gtu:SI (match_operand:SI 2 "register_operand" "r")
707 (match_operand:SI 3 "arith11_operand" "rI"))))]
709 "sub%I3 %3,%2,0\;subb %1,0,%0"
710 [(set_attr "type" "binary")
711 (set_attr "length" "8")])
714 [(set (match_operand:SI 0 "register_operand" "=r")
715 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
716 (gtu:SI (match_operand:SI 2 "register_operand" "r")
717 (match_operand:SI 3 "arith11_operand" "rI")))
718 (match_operand:SI 4 "register_operand" "r")))]
720 "sub%I3 %3,%2,0\;subb %1,%4,%0"
721 [(set_attr "type" "binary")
722 (set_attr "length" "8")])
724 ; This need only accept registers for op3, since canonicalization
725 ; replaces ltu with leu when op3 is an integer.
727 [(set (match_operand:SI 0 "register_operand" "=r")
728 (minus:SI (match_operand:SI 1 "register_operand" "r")
729 (ltu:SI (match_operand:SI 2 "register_operand" "r")
730 (match_operand:SI 3 "register_operand" "r"))))]
732 "sub %2,%3,0\;subb %1,0,%0"
733 [(set_attr "type" "binary")
734 (set_attr "length" "8")])
737 [(set (match_operand:SI 0 "register_operand" "=r")
738 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
739 (ltu:SI (match_operand:SI 2 "register_operand" "r")
740 (match_operand:SI 3 "register_operand" "r")))
741 (match_operand:SI 4 "register_operand" "r")))]
743 "sub %2,%3,0\;subb %1,%4,%0"
744 [(set_attr "type" "binary")
745 (set_attr "length" "8")])
747 ; Match only integers for op3 here. This is used as canonical form of the
748 ; ltu pattern when op3 is an integer. Don't match registers since we can't
749 ; make better code than the general incscc pattern.
751 [(set (match_operand:SI 0 "register_operand" "=r")
752 (minus:SI (match_operand:SI 1 "register_operand" "r")
753 (leu:SI (match_operand:SI 2 "register_operand" "r")
754 (match_operand:SI 3 "int11_operand" "I"))))]
756 "addi %k3,%2,0\;subb %1,0,%0"
757 [(set_attr "type" "binary")
758 (set_attr "length" "8")])
761 [(set (match_operand:SI 0 "register_operand" "=r")
762 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
763 (leu:SI (match_operand:SI 2 "register_operand" "r")
764 (match_operand:SI 3 "int11_operand" "I")))
765 (match_operand:SI 4 "register_operand" "r")))]
767 "addi %k3,%2,0\;subb %1,%4,%0"
768 [(set_attr "type" "binary")
769 (set_attr "length" "8")])
771 (define_insn "decscc"
772 [(set (match_operand:SI 0 "register_operand" "=r,r")
773 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
774 (match_operator:SI 4 "comparison_operator"
775 [(match_operand:SI 2 "register_operand" "r,r")
776 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
779 com%I3clr,%B4 %3,%2,0\;addi -1,%0,%0
780 com%I3clr,%B4 %3,%2,0\;addi,tr -1,%1,%0\;copy %1,%0"
781 [(set_attr "type" "binary,binary")
782 (set_attr "length" "8,12")])
784 ; Patterns for max and min. (There is no need for an earlyclobber in the
785 ; last alternative since the middle alternative will match if op0 == op1.)
787 (define_insn "sminsi3"
788 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
789 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
790 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
793 comclr,> %2,%0,0\;copy %2,%0
794 comiclr,> %2,%0,0\;ldi %2,%0
795 comclr,> %1,%2,%0\;copy %1,%0"
796 [(set_attr "type" "multi,multi,multi")
797 (set_attr "length" "8,8,8")])
799 (define_insn "uminsi3"
800 [(set (match_operand:SI 0 "register_operand" "=r,r")
801 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
802 (match_operand:SI 2 "arith11_operand" "r,I")))]
805 comclr,>> %2,%0,0\;copy %2,%0
806 comiclr,>> %2,%0,0\;ldi %2,%0"
807 [(set_attr "type" "multi,multi")
808 (set_attr "length" "8,8")])
810 (define_insn "smaxsi3"
811 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
812 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
813 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
816 comclr,< %2,%0,0\;copy %2,%0
817 comiclr,< %2,%0,0\;ldi %2,%0
818 comclr,< %1,%2,%0\;copy %1,%0"
819 [(set_attr "type" "multi,multi,multi")
820 (set_attr "length" "8,8,8")])
822 (define_insn "umaxsi3"
823 [(set (match_operand:SI 0 "register_operand" "=r,r")
824 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
825 (match_operand:SI 2 "arith11_operand" "r,I")))]
828 comclr,<< %2,%0,0\;copy %2,%0
829 comiclr,<< %2,%0,0\;ldi %2,%0"
830 [(set_attr "type" "multi,multi")
831 (set_attr "length" "8,8")])
833 (define_insn "abssi2"
834 [(set (match_operand:SI 0 "register_operand" "=r")
835 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
837 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
838 [(set_attr "type" "multi")
839 (set_attr "length" "8")])
841 ;;; Experimental conditional move patterns
843 (define_expand "movsicc"
844 [(set (match_operand:SI 0 "register_operand" "")
846 (match_operator 1 "comparison_operator"
849 (match_operand:SI 2 "reg_or_cint_move_operand" "")
850 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
854 enum rtx_code code = GET_CODE (operands[1]);
856 if (hppa_branch_type != CMP_SI)
859 /* operands[1] is currently the result of compare_from_rtx. We want to
860 emit a compare of the original operands. */
861 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
862 operands[4] = hppa_compare_op0;
863 operands[5] = hppa_compare_op1;
866 ; We need the first constraint alternative in order to avoid
867 ; earlyclobbers on all other alternatives.
869 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
871 (match_operator 5 "comparison_operator"
872 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
873 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
874 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
878 com%I4clr,%S5 %4,%3,0\;ldi 0,%0
879 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
881 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
882 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
883 [(set_attr "type" "multi,multi,multi,multi,nullshift")
884 (set_attr "length" "8,8,8,8,8")])
887 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
889 (match_operator 5 "comparison_operator"
890 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
891 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
892 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
893 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
896 com%I4clr,%S5 %4,%3,0\;copy %2,%0
897 com%I4clr,%S5 %4,%3,0\;ldi %2,%0
898 com%I4clr,%S5 %4,%3,0\;ldil L'%2,%0
899 com%I4clr,%S5 %4,%3,0\;zdepi %Z2,%0
900 com%I4clr,%B5 %4,%3,0\;copy %1,%0
901 com%I4clr,%B5 %4,%3,0\;ldi %1,%0
902 com%I4clr,%B5 %4,%3,0\;ldil L'%1,%0
903 com%I4clr,%B5 %4,%3,0\;zdepi %Z1,%0"
904 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
905 (set_attr "length" "8,8,8,8,8,8,8,8")])
907 ;; Conditional Branches
911 (if_then_else (eq (match_dup 1) (match_dup 2))
912 (label_ref (match_operand 0 "" ""))
917 if (hppa_branch_type != CMP_SI)
919 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
920 emit_bcond_fp (NE, operands[0]);
923 /* set up operands from compare. */
924 operands[1] = hppa_compare_op0;
925 operands[2] = hppa_compare_op1;
926 /* fall through and generate default code */
931 (if_then_else (ne (match_dup 1) (match_dup 2))
932 (label_ref (match_operand 0 "" ""))
937 if (hppa_branch_type != CMP_SI)
939 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
940 emit_bcond_fp (NE, operands[0]);
943 operands[1] = hppa_compare_op0;
944 operands[2] = hppa_compare_op1;
949 (if_then_else (gt (match_dup 1) (match_dup 2))
950 (label_ref (match_operand 0 "" ""))
955 if (hppa_branch_type != CMP_SI)
957 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
958 emit_bcond_fp (NE, operands[0]);
961 operands[1] = hppa_compare_op0;
962 operands[2] = hppa_compare_op1;
967 (if_then_else (lt (match_dup 1) (match_dup 2))
968 (label_ref (match_operand 0 "" ""))
973 if (hppa_branch_type != CMP_SI)
975 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
976 emit_bcond_fp (NE, operands[0]);
979 operands[1] = hppa_compare_op0;
980 operands[2] = hppa_compare_op1;
985 (if_then_else (ge (match_dup 1) (match_dup 2))
986 (label_ref (match_operand 0 "" ""))
991 if (hppa_branch_type != CMP_SI)
993 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
994 emit_bcond_fp (NE, operands[0]);
997 operands[1] = hppa_compare_op0;
998 operands[2] = hppa_compare_op1;
1001 (define_expand "ble"
1003 (if_then_else (le (match_dup 1) (match_dup 2))
1004 (label_ref (match_operand 0 "" ""))
1009 if (hppa_branch_type != CMP_SI)
1011 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1012 emit_bcond_fp (NE, operands[0]);
1015 operands[1] = hppa_compare_op0;
1016 operands[2] = hppa_compare_op1;
1019 (define_expand "bgtu"
1021 (if_then_else (gtu (match_dup 1) (match_dup 2))
1022 (label_ref (match_operand 0 "" ""))
1027 if (hppa_branch_type != CMP_SI)
1029 operands[1] = hppa_compare_op0;
1030 operands[2] = hppa_compare_op1;
1033 (define_expand "bltu"
1035 (if_then_else (ltu (match_dup 1) (match_dup 2))
1036 (label_ref (match_operand 0 "" ""))
1041 if (hppa_branch_type != CMP_SI)
1043 operands[1] = hppa_compare_op0;
1044 operands[2] = hppa_compare_op1;
1047 (define_expand "bgeu"
1049 (if_then_else (geu (match_dup 1) (match_dup 2))
1050 (label_ref (match_operand 0 "" ""))
1055 if (hppa_branch_type != CMP_SI)
1057 operands[1] = hppa_compare_op0;
1058 operands[2] = hppa_compare_op1;
1061 (define_expand "bleu"
1063 (if_then_else (leu (match_dup 1) (match_dup 2))
1064 (label_ref (match_operand 0 "" ""))
1069 if (hppa_branch_type != CMP_SI)
1071 operands[1] = hppa_compare_op0;
1072 operands[2] = hppa_compare_op1;
1075 ;; Match the branch patterns.
1078 ;; Note a long backward conditional branch with an annulled delay slot
1079 ;; has a length of 12.
1083 (match_operator 3 "comparison_operator"
1084 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1085 (match_operand:SI 2 "arith5_operand" "rL")])
1086 (label_ref (match_operand 0 "" ""))
1091 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1092 get_attr_length (insn), 0, insn);
1094 [(set_attr "type" "cbranch")
1095 (set (attr "length")
1096 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1099 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1102 (eq (symbol_ref "flag_pic") (const_int 0))
1106 ;; Match the negated branch.
1111 (match_operator 3 "comparison_operator"
1112 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1113 (match_operand:SI 2 "arith5_operand" "rL")])
1115 (label_ref (match_operand 0 "" ""))))]
1119 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1120 get_attr_length (insn), 1, insn);
1122 [(set_attr "type" "cbranch")
1123 (set (attr "length")
1124 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1127 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1130 (eq (symbol_ref "flag_pic") (const_int 0))
1134 ;; Branch on Bit patterns.
1138 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1140 (match_operand:SI 1 "uint5_operand" ""))
1142 (label_ref (match_operand 2 "" ""))
1147 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1148 get_attr_length (insn), 0, insn, 0);
1150 [(set_attr "type" "cbranch")
1151 (set (attr "length")
1152 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1160 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1162 (match_operand:SI 1 "uint5_operand" ""))
1165 (label_ref (match_operand 2 "" ""))))]
1169 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1170 get_attr_length (insn), 1, insn, 0);
1172 [(set_attr "type" "cbranch")
1173 (set (attr "length")
1174 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1182 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1184 (match_operand:SI 1 "uint5_operand" ""))
1186 (label_ref (match_operand 2 "" ""))
1191 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1192 get_attr_length (insn), 0, insn, 1);
1194 [(set_attr "type" "cbranch")
1195 (set (attr "length")
1196 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1204 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1206 (match_operand:SI 1 "uint5_operand" ""))
1209 (label_ref (match_operand 2 "" ""))))]
1213 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1214 get_attr_length (insn), 1, insn, 1);
1216 [(set_attr "type" "cbranch")
1217 (set (attr "length")
1218 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1223 ;; Branch on Variable Bit patterns.
1227 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1229 (match_operand:SI 1 "register_operand" "q"))
1231 (label_ref (match_operand 2 "" ""))
1236 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1237 get_attr_length (insn), 0, insn, 0);
1239 [(set_attr "type" "cbranch")
1240 (set (attr "length")
1241 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1249 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1251 (match_operand:SI 1 "register_operand" "q"))
1254 (label_ref (match_operand 2 "" ""))))]
1258 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1259 get_attr_length (insn), 1, insn, 0);
1261 [(set_attr "type" "cbranch")
1262 (set (attr "length")
1263 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1271 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1273 (match_operand:SI 1 "register_operand" "q"))
1275 (label_ref (match_operand 2 "" ""))
1280 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1281 get_attr_length (insn), 0, insn, 1);
1283 [(set_attr "type" "cbranch")
1284 (set (attr "length")
1285 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1293 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1295 (match_operand:SI 1 "register_operand" "q"))
1298 (label_ref (match_operand 2 "" ""))))]
1302 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1303 get_attr_length (insn), 1, insn, 1);
1305 [(set_attr "type" "cbranch")
1306 (set (attr "length")
1307 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1312 ;; Floating point branches
1314 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1315 (label_ref (match_operand 0 "" ""))
1317 "! TARGET_SOFT_FLOAT"
1320 if (INSN_ANNULLED_BRANCH_P (insn))
1321 return \"ftest\;bl,n %0,0\";
1323 return \"ftest\;bl%* %0,0\";
1325 [(set_attr "type" "fbranch")
1326 (set_attr "length" "8")])
1329 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1331 (label_ref (match_operand 0 "" ""))))]
1332 "! TARGET_SOFT_FLOAT"
1335 if (INSN_ANNULLED_BRANCH_P (insn))
1336 return \"ftest\;add,tr 0,0,0\;bl,n %0,0\";
1338 return \"ftest\;add,tr 0,0,0\;bl%* %0,0\";
1340 [(set_attr "type" "fbranch")
1341 (set_attr "length" "12")])
1343 ;; Move instructions
1345 (define_expand "movsi"
1346 [(set (match_operand:SI 0 "general_operand" "")
1347 (match_operand:SI 1 "general_operand" ""))]
1351 if (emit_move_sequence (operands, SImode, 0))
1355 ;; Reloading an SImode or DImode value requires a scratch register if
1356 ;; going in to or out of float point registers.
1358 (define_expand "reload_insi"
1359 [(set (match_operand:SI 0 "register_operand" "=Z")
1360 (match_operand:SI 1 "non_hard_reg_operand" ""))
1361 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1365 if (emit_move_sequence (operands, SImode, operands[2]))
1368 /* We don't want the clobber emitted, so handle this ourselves. */
1369 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1373 (define_expand "reload_outsi"
1374 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1375 (match_operand:SI 1 "register_operand" "Z"))
1376 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1380 if (emit_move_sequence (operands, SImode, operands[2]))
1383 /* We don't want the clobber emitted, so handle this ourselves. */
1384 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1388 ;;; pic symbol references
1391 [(set (match_operand:SI 0 "register_operand" "=r")
1392 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1393 (match_operand:SI 2 "symbolic_operand" ""))))]
1394 "flag_pic && operands[1] == pic_offset_table_rtx"
1396 [(set_attr "type" "load")
1397 (set_attr "length" "4")])
1400 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1401 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1402 (match_operand:SI 1 "move_operand"
1403 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1404 "(register_operand (operands[0], SImode)
1405 || reg_or_0_operand (operands[1], SImode))
1406 && ! TARGET_SOFT_FLOAT"
1418 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1419 (set_attr "pa_combine_type" "addmove")
1420 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1423 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1425 (match_operand:SI 1 "move_operand"
1426 "r,J,N,K,RQ,rM,rM"))]
1427 "(register_operand (operands[0], SImode)
1428 || reg_or_0_operand (operands[1], SImode))
1429 && TARGET_SOFT_FLOAT"
1438 [(set_attr "type" "move,move,move,move,load,store,move")
1439 (set_attr "pa_combine_type" "addmove")
1440 (set_attr "length" "4,4,4,4,4,4,4")])
1443 [(set (match_operand:SI 0 "register_operand" "=r")
1444 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1445 (match_operand:SI 2 "register_operand" "r"))))]
1446 "! TARGET_DISABLE_INDEXING"
1449 /* Reload can create backwards (relative to cse) unscaled index
1450 address modes when eliminating registers and possibly for
1451 pseudos that don't get hard registers. Deal with it. */
1452 if (operands[2] == hard_frame_pointer_rtx
1453 || operands[2] == stack_pointer_rtx)
1454 return \"ldwx %1(0,%2),%0\";
1456 return \"ldwx %2(0,%1),%0\";
1458 [(set_attr "type" "load")
1459 (set_attr "length" "4")])
1462 [(set (match_operand:SI 0 "register_operand" "=r")
1463 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1464 (match_operand:SI 2 "basereg_operand" "r"))))]
1465 "! TARGET_DISABLE_INDEXING"
1468 /* Reload can create backwards (relative to cse) unscaled index
1469 address modes when eliminating registers and possibly for
1470 pseudos that don't get hard registers. Deal with it. */
1471 if (operands[1] == hard_frame_pointer_rtx
1472 || operands[1] == stack_pointer_rtx)
1473 return \"ldwx %2(0,%1),%0\";
1475 return \"ldwx %1(0,%2),%0\";
1477 [(set_attr "type" "load")
1478 (set_attr "length" "4")])
1480 ;; Load or store with base-register modification.
1482 (define_insn "pre_ldwm"
1483 [(set (match_operand:SI 0 "register_operand" "=r")
1484 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1485 (match_operand:SI 2 "pre_cint_operand" ""))))
1487 (plus:SI (match_dup 1) (match_dup 2)))]
1491 if (INTVAL (operands[2]) < 0)
1492 return \"ldwm %2(0,%1),%0\";
1493 return \"ldws,mb %2(0,%1),%0\";
1495 [(set_attr "type" "load")
1496 (set_attr "length" "4")])
1498 (define_insn "pre_stwm"
1499 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1500 (match_operand:SI 1 "pre_cint_operand" "")))
1501 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1503 (plus:SI (match_dup 0) (match_dup 1)))]
1507 if (INTVAL (operands[1]) < 0)
1508 return \"stwm %r2,%1(0,%0)\";
1509 return \"stws,mb %r2,%1(0,%0)\";
1511 [(set_attr "type" "store")
1512 (set_attr "length" "4")])
1514 (define_insn "post_ldwm"
1515 [(set (match_operand:SI 0 "register_operand" "=r")
1516 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1518 (plus:SI (match_dup 1)
1519 (match_operand:SI 2 "post_cint_operand" "")))]
1523 if (INTVAL (operands[2]) > 0)
1524 return \"ldwm %2(0,%1),%0\";
1525 return \"ldws,ma %2(0,%1),%0\";
1527 [(set_attr "type" "load")
1528 (set_attr "length" "4")])
1530 (define_insn "post_stwm"
1531 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1532 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1534 (plus:SI (match_dup 0)
1535 (match_operand:SI 2 "post_cint_operand" "")))]
1539 if (INTVAL (operands[2]) > 0)
1540 return \"stwm %r1,%2(0,%0)\";
1541 return \"stws,ma %r1,%2(0,%0)\";
1543 [(set_attr "type" "store")
1544 (set_attr "length" "4")])
1547 ;; Note since this pattern can be created at reload time (via movsi), all
1548 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1549 (define_insn "pic_load_label"
1550 [(set (match_operand:SI 0 "register_operand" "=a")
1551 (match_operand:SI 1 "pic_label_operand" ""))]
1555 rtx label_rtx = gen_label_rtx ();
1557 extern FILE *asm_out_file;
1559 xoperands[0] = operands[0];
1560 xoperands[1] = operands[1];
1561 xoperands[2] = label_rtx;
1562 output_asm_insn (\"bl .+8,%0\", xoperands);
1563 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1564 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1565 CODE_LABEL_NUMBER (label_rtx));
1567 /* If we're trying to load the address of a label that happens to be
1568 close, then we can use a shorter sequence. */
1569 if (GET_CODE (operands[1]) == LABEL_REF
1571 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1572 - insn_addresses[INSN_UID (insn)]) < 8100)
1574 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1575 always non-negative. */
1576 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1580 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1581 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1585 [(set_attr "type" "multi")
1586 (set_attr "length" "16")]) ; 12 or 16
1588 (define_insn "pic2_highpart"
1589 [(set (match_operand:SI 0 "register_operand" "=a")
1590 (plus:SI (match_operand:SI 1 "register_operand" "r")
1591 (high:SI (match_operand 2 "" ""))))]
1592 "symbolic_operand (operands[2], Pmode)
1593 && ! function_label_operand (operands[2])
1596 [(set_attr "type" "binary")
1597 (set_attr "length" "4")])
1599 ; We need this to make sure CSE doesn't simplify a memory load with a
1600 ; symbolic address, whose content it think it knows. For PIC, what CSE
1601 ; think is the real value will be the address of that value.
1602 (define_insn "pic2_lo_sum"
1603 [(set (match_operand:SI 0 "register_operand" "=r")
1604 (mem:SI (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1605 (unspec:SI [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1611 return \"ldw RT'%G2(%1),%0\";
1613 [(set_attr "type" "load")
1614 (set_attr "length" "4")])
1617 ;; Always use addil rather than ldil;add sequences. This allows the
1618 ;; HP linker to eliminate the dp relocation if the symbolic operand
1619 ;; lives in the TEXT space.
1621 [(set (match_operand:SI 0 "register_operand" "=a")
1622 (high:SI (match_operand 1 "" "")))]
1623 "symbolic_operand (operands[1], Pmode)
1624 && ! function_label_operand (operands[1])
1625 && ! read_only_operand (operands[1])
1629 if (TARGET_LONG_LOAD_STORE)
1630 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1632 return \"addil LR'%H1,%%r27\";
1634 [(set_attr "type" "binary")
1635 (set (attr "length")
1636 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1641 ;; This is for use in the prologue/epilogue code. We need it
1642 ;; to add large constants to a stack pointer or frame pointer.
1643 ;; Because of the additional %r1 pressure, we probably do not
1644 ;; want to use this in general code, so make it available
1645 ;; only after reload.
1646 (define_insn "add_high_const"
1647 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1648 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1649 (high:SI (match_operand 2 "const_int_operand" ""))))]
1653 ldil L'%G2,%0\;addl %0,%1,%0"
1654 [(set_attr "type" "binary,binary")
1655 (set_attr "length" "4,8")])
1658 [(set (match_operand:SI 0 "register_operand" "=r")
1659 (high:SI (match_operand 1 "" "")))]
1660 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1661 && !is_function_label_plus_const (operands[1])"
1664 if (symbolic_operand (operands[1], Pmode))
1665 return \"ldil LR'%H1,%0\";
1667 return \"ldil L'%G1,%0\";
1669 [(set_attr "type" "move")
1670 (set_attr "length" "4")])
1673 [(set (match_operand:SI 0 "register_operand" "=r")
1674 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1675 (match_operand:SI 2 "immediate_operand" "i")))]
1676 "!is_function_label_plus_const (operands[2])"
1679 if (flag_pic && symbolic_operand (operands[2], Pmode))
1681 else if (symbolic_operand (operands[2], Pmode))
1682 return \"ldo RR'%G2(%1),%0\";
1684 return \"ldo R'%G2(%1),%0\";
1686 [(set_attr "type" "move")
1687 (set_attr "length" "4")])
1689 ;; Now that a symbolic_address plus a constant is broken up early
1690 ;; in the compilation phase (for better CSE) we need a special
1691 ;; combiner pattern to load the symbolic address plus the constant
1692 ;; in only 2 instructions. (For cases where the symbolic address
1693 ;; was not a common subexpression.)
1695 [(set (match_operand:SI 0 "register_operand" "")
1696 (match_operand:SI 1 "symbolic_operand" ""))
1697 (clobber (match_operand:SI 2 "register_operand" ""))]
1698 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1699 [(set (match_dup 2) (high:SI (match_dup 1)))
1700 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1703 ;; hppa_legitimize_address goes to a great deal of trouble to
1704 ;; create addresses which use indexing. In some cases, this
1705 ;; is a lose because there isn't any store instructions which
1706 ;; allow indexed addresses (with integer register source).
1708 ;; These define_splits try to turn a 3 insn store into
1709 ;; a 2 insn store with some creative RTL rewriting.
1711 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1712 (match_operand:SI 1 "shadd_operand" ""))
1713 (plus:SI (match_operand:SI 2 "register_operand" "")
1714 (match_operand:SI 3 "const_int_operand" ""))))
1715 (match_operand:SI 4 "register_operand" ""))
1716 (clobber (match_operand:SI 5 "register_operand" ""))]
1718 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1720 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1724 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1725 (match_operand:SI 1 "shadd_operand" ""))
1726 (plus:SI (match_operand:SI 2 "register_operand" "")
1727 (match_operand:SI 3 "const_int_operand" ""))))
1728 (match_operand:HI 4 "register_operand" ""))
1729 (clobber (match_operand:SI 5 "register_operand" ""))]
1731 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1733 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1737 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1738 (match_operand:SI 1 "shadd_operand" ""))
1739 (plus:SI (match_operand:SI 2 "register_operand" "")
1740 (match_operand:SI 3 "const_int_operand" ""))))
1741 (match_operand:QI 4 "register_operand" ""))
1742 (clobber (match_operand:SI 5 "register_operand" ""))]
1744 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1746 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1749 (define_expand "movhi"
1750 [(set (match_operand:HI 0 "general_operand" "")
1751 (match_operand:HI 1 "general_operand" ""))]
1755 if (emit_move_sequence (operands, HImode, 0))
1760 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1761 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1762 "register_operand (operands[0], HImode)
1763 || reg_or_0_operand (operands[1], HImode)"
1773 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1774 (set_attr "pa_combine_type" "addmove")
1775 (set_attr "length" "4,4,4,4,4,4,4,4")])
1778 [(set (match_operand:HI 0 "register_operand" "=r")
1779 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1780 (match_operand:SI 2 "register_operand" "r"))))]
1781 "! TARGET_DISABLE_INDEXING"
1784 /* Reload can create backwards (relative to cse) unscaled index
1785 address modes when eliminating registers and possibly for
1786 pseudos that don't get hard registers. Deal with it. */
1787 if (operands[2] == hard_frame_pointer_rtx
1788 || operands[2] == stack_pointer_rtx)
1789 return \"ldhx %1(0,%2),%0\";
1791 return \"ldhx %2(0,%1),%0\";
1793 [(set_attr "type" "load")
1794 (set_attr "length" "4")])
1797 [(set (match_operand:HI 0 "register_operand" "=r")
1798 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1799 (match_operand:SI 2 "basereg_operand" "r"))))]
1800 "! TARGET_DISABLE_INDEXING"
1803 /* Reload can create backwards (relative to cse) unscaled index
1804 address modes when eliminating registers and possibly for
1805 pseudos that don't get hard registers. Deal with it. */
1806 if (operands[1] == hard_frame_pointer_rtx
1807 || operands[1] == stack_pointer_rtx)
1808 return \"ldhx %2(0,%1),%0\";
1810 return \"ldhx %1(0,%2),%0\";
1812 [(set_attr "type" "load")
1813 (set_attr "length" "4")])
1815 ; Now zero extended variants.
1817 [(set (match_operand:SI 0 "register_operand" "=r")
1818 (zero_extend:SI (mem:HI
1820 (match_operand:SI 1 "basereg_operand" "r")
1821 (match_operand:SI 2 "register_operand" "r")))))]
1822 "! TARGET_DISABLE_INDEXING"
1825 /* Reload can create backwards (relative to cse) unscaled index
1826 address modes when eliminating registers and possibly for
1827 pseudos that don't get hard registers. Deal with it. */
1828 if (operands[2] == hard_frame_pointer_rtx
1829 || operands[2] == stack_pointer_rtx)
1830 return \"ldhx %1(0,%2),%0\";
1832 return \"ldhx %2(0,%1),%0\";
1834 [(set_attr "type" "load")
1835 (set_attr "length" "4")])
1838 [(set (match_operand:SI 0 "register_operand" "=r")
1839 (zero_extend:SI (mem:HI
1841 (match_operand:SI 1 "register_operand" "r")
1842 (match_operand:SI 2 "basereg_operand" "r")))))]
1843 "! TARGET_DISABLE_INDEXING"
1846 /* Reload can create backwards (relative to cse) unscaled index
1847 address modes when eliminating registers and possibly for
1848 pseudos that don't get hard registers. Deal with it. */
1849 if (operands[1] == hard_frame_pointer_rtx
1850 || operands[1] == stack_pointer_rtx)
1851 return \"ldhx %2(0,%1),%0\";
1853 return \"ldhx %1(0,%2),%0\";
1855 [(set_attr "type" "load")
1856 (set_attr "length" "4")])
1859 [(set (match_operand:HI 0 "register_operand" "=r")
1860 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1861 (match_operand:SI 2 "int5_operand" "L"))))
1863 (plus:SI (match_dup 1) (match_dup 2)))]
1865 "ldhs,mb %2(0,%1),%0"
1866 [(set_attr "type" "load")
1867 (set_attr "length" "4")])
1869 ; And a zero extended variant.
1871 [(set (match_operand:SI 0 "register_operand" "=r")
1872 (zero_extend:SI (mem:HI
1874 (match_operand:SI 1 "register_operand" "+r")
1875 (match_operand:SI 2 "int5_operand" "L")))))
1877 (plus:SI (match_dup 1) (match_dup 2)))]
1879 "ldhs,mb %2(0,%1),%0"
1880 [(set_attr "type" "load")
1881 (set_attr "length" "4")])
1884 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1885 (match_operand:SI 1 "int5_operand" "L")))
1886 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1888 (plus:SI (match_dup 0) (match_dup 1)))]
1890 "sths,mb %r2,%1(0,%0)"
1891 [(set_attr "type" "store")
1892 (set_attr "length" "4")])
1895 [(set (match_operand:HI 0 "register_operand" "=r")
1896 (high:HI (match_operand 1 "const_int_operand" "")))]
1899 [(set_attr "type" "move")
1900 (set_attr "length" "4")])
1903 [(set (match_operand:HI 0 "register_operand" "=r")
1904 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1905 (match_operand 2 "const_int_operand" "")))]
1908 [(set_attr "type" "move")
1909 (set_attr "length" "4")])
1911 (define_expand "movqi"
1912 [(set (match_operand:QI 0 "general_operand" "")
1913 (match_operand:QI 1 "general_operand" ""))]
1917 if (emit_move_sequence (operands, QImode, 0))
1922 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1923 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1924 "register_operand (operands[0], QImode)
1925 || reg_or_0_operand (operands[1], QImode)"
1935 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1936 (set_attr "pa_combine_type" "addmove")
1937 (set_attr "length" "4,4,4,4,4,4,4,4")])
1940 [(set (match_operand:QI 0 "register_operand" "=r")
1941 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1942 (match_operand:SI 2 "register_operand" "r"))))]
1943 "! TARGET_DISABLE_INDEXING"
1946 /* Reload can create backwards (relative to cse) unscaled index
1947 address modes when eliminating registers and possibly for
1948 pseudos that don't get hard registers. Deal with it. */
1949 if (operands[2] == hard_frame_pointer_rtx
1950 || operands[2] == stack_pointer_rtx)
1951 return \"ldbx %1(0,%2),%0\";
1953 return \"ldbx %2(0,%1),%0\";
1955 [(set_attr "type" "load")
1956 (set_attr "length" "4")])
1959 [(set (match_operand:QI 0 "register_operand" "=r")
1960 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1961 (match_operand:SI 2 "basereg_operand" "r"))))]
1962 "! TARGET_DISABLE_INDEXING"
1965 /* Reload can create backwards (relative to cse) unscaled index
1966 address modes when eliminating registers and possibly for
1967 pseudos that don't get hard registers. Deal with it. */
1968 if (operands[1] == hard_frame_pointer_rtx
1969 || operands[1] == stack_pointer_rtx)
1970 return \"ldbx %2(0,%1),%0\";
1972 return \"ldbx %1(0,%2),%0\";
1974 [(set_attr "type" "load")
1975 (set_attr "length" "4")])
1977 ; Indexed byte load with zero extension to SImode or HImode.
1979 [(set (match_operand:SI 0 "register_operand" "=r")
1980 (zero_extend:SI (mem:QI
1982 (match_operand:SI 1 "basereg_operand" "r")
1983 (match_operand:SI 2 "register_operand" "r")))))]
1984 "! TARGET_DISABLE_INDEXING"
1987 /* Reload can create backwards (relative to cse) unscaled index
1988 address modes when eliminating registers and possibly for
1989 pseudos that don't get hard registers. Deal with it. */
1990 if (operands[2] == hard_frame_pointer_rtx
1991 || operands[2] == stack_pointer_rtx)
1992 return \"ldbx %1(0,%2),%0\";
1994 return \"ldbx %2(0,%1),%0\";
1996 [(set_attr "type" "load")
1997 (set_attr "length" "4")])
2000 [(set (match_operand:SI 0 "register_operand" "=r")
2001 (zero_extend:SI (mem:QI
2003 (match_operand:SI 1 "register_operand" "r")
2004 (match_operand:SI 2 "basereg_operand" "r")))))]
2005 "! TARGET_DISABLE_INDEXING"
2008 /* Reload can create backwards (relative to cse) unscaled index
2009 address modes when eliminating registers and possibly for
2010 pseudos that don't get hard registers. Deal with it. */
2011 if (operands[1] == hard_frame_pointer_rtx
2012 || operands[1] == stack_pointer_rtx)
2013 return \"ldbx %2(0,%1),%0\";
2015 return \"ldbx %1(0,%2),%0\";
2017 [(set_attr "type" "load")
2018 (set_attr "length" "4")])
2021 [(set (match_operand:HI 0 "register_operand" "=r")
2022 (zero_extend:HI (mem:QI
2024 (match_operand:SI 1 "basereg_operand" "r")
2025 (match_operand:SI 2 "register_operand" "r")))))]
2026 "! TARGET_DISABLE_INDEXING"
2029 /* Reload can create backwards (relative to cse) unscaled index
2030 address modes when eliminating registers and possibly for
2031 pseudos that don't get hard registers. Deal with it. */
2032 if (operands[2] == hard_frame_pointer_rtx
2033 || operands[2] == stack_pointer_rtx)
2034 return \"ldbx %1(0,%2),%0\";
2036 return \"ldbx %2(0,%1),%0\";
2038 [(set_attr "type" "load")
2039 (set_attr "length" "4")])
2042 [(set (match_operand:HI 0 "register_operand" "=r")
2043 (zero_extend:HI (mem:QI
2045 (match_operand:SI 1 "register_operand" "r")
2046 (match_operand:SI 2 "basereg_operand" "r")))))]
2047 "! TARGET_DISABLE_INDEXING"
2050 /* Reload can create backwards (relative to cse) unscaled index
2051 address modes when eliminating registers and possibly for
2052 pseudos that don't get hard registers. Deal with it. */
2053 if (operands[1] == hard_frame_pointer_rtx
2054 || operands[1] == stack_pointer_rtx)
2055 return \"ldbx %2(0,%1),%0\";
2057 return \"ldbx %1(0,%2),%0\";
2059 [(set_attr "type" "load")
2060 (set_attr "length" "4")])
2063 [(set (match_operand:QI 0 "register_operand" "=r")
2064 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2065 (match_operand:SI 2 "int5_operand" "L"))))
2066 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2068 "ldbs,mb %2(0,%1),%0"
2069 [(set_attr "type" "load")
2070 (set_attr "length" "4")])
2072 ; Now the same thing with zero extensions.
2074 [(set (match_operand:SI 0 "register_operand" "=r")
2075 (zero_extend:SI (mem:QI (plus:SI
2076 (match_operand:SI 1 "register_operand" "+r")
2077 (match_operand:SI 2 "int5_operand" "L")))))
2078 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2080 "ldbs,mb %2(0,%1),%0"
2081 [(set_attr "type" "load")
2082 (set_attr "length" "4")])
2085 [(set (match_operand:HI 0 "register_operand" "=r")
2086 (zero_extend:HI (mem:QI (plus:SI
2087 (match_operand:SI 1 "register_operand" "+r")
2088 (match_operand:SI 2 "int5_operand" "L")))))
2089 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2091 "ldbs,mb %2(0,%1),%0"
2092 [(set_attr "type" "load")
2093 (set_attr "length" "4")])
2096 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2097 (match_operand:SI 1 "int5_operand" "L")))
2098 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2100 (plus:SI (match_dup 0) (match_dup 1)))]
2102 "stbs,mb %r2,%1(0,%0)"
2103 [(set_attr "type" "store")
2104 (set_attr "length" "4")])
2106 ;; The definition of this insn does not really explain what it does,
2107 ;; but it should suffice
2108 ;; that anything generated as this insn will be recognized as one
2109 ;; and that it will not successfully combine with anything.
2110 (define_expand "movstrsi"
2111 [(parallel [(set (match_operand:BLK 0 "" "")
2112 (match_operand:BLK 1 "" ""))
2113 (clobber (match_dup 7))
2114 (clobber (match_dup 8))
2115 (clobber (match_dup 4))
2116 (clobber (match_dup 5))
2117 (clobber (match_dup 6))
2118 (use (match_operand:SI 2 "arith_operand" ""))
2119 (use (match_operand:SI 3 "const_int_operand" ""))])]
2125 /* HP provides very fast block move library routine for the PA;
2126 this routine includes:
2128 4x4 byte at a time block moves,
2129 1x4 byte at a time with alignment checked at runtime with
2130 attempts to align the source and destination as needed
2133 With that in mind, here's the heuristics to try and guess when
2134 the inlined block move will be better than the library block
2137 If the size isn't constant, then always use the library routines.
2139 If the size is large in respect to the known alignment, then use
2140 the library routines.
2142 If the size is small in repsect to the known alignment, then open
2143 code the copy (since that will lead to better scheduling).
2145 Else use the block move pattern. */
2147 /* Undetermined size, use the library routine. */
2148 if (GET_CODE (operands[2]) != CONST_INT)
2151 size = INTVAL (operands[2]);
2152 align = INTVAL (operands[3]);
2153 align = align > 4 ? 4 : align;
2155 /* If size/alignment > 8 (eg size is large in respect to alignment),
2156 then use the library routines. */
2157 if (size / align > 16)
2160 /* This does happen, but not often enough to worry much about. */
2161 if (size / align < MOVE_RATIO)
2164 /* Fall through means we're going to use our block move pattern. */
2166 = change_address (operands[0], VOIDmode,
2167 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2169 = change_address (operands[1], VOIDmode,
2170 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2171 operands[4] = gen_reg_rtx (SImode);
2172 operands[5] = gen_reg_rtx (SImode);
2173 operands[6] = gen_reg_rtx (SImode);
2174 operands[7] = XEXP (operands[0], 0);
2175 operands[8] = XEXP (operands[1], 0);
2178 ;; The operand constraints are written like this to support both compile-time
2179 ;; and run-time determined byte count. If the count is run-time determined,
2180 ;; the register with the byte count is clobbered by the copying code, and
2181 ;; therefore it is forced to operand 2. If the count is compile-time
2182 ;; determined, we need two scratch registers for the unrolled code.
2183 (define_insn "movstrsi_internal"
2184 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2185 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2186 (clobber (match_dup 0))
2187 (clobber (match_dup 1))
2188 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2189 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2190 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2191 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2192 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2194 "* return output_block_move (operands, !which_alternative);"
2195 [(set_attr "type" "multi,multi")])
2197 ;; Floating point move insns
2199 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2200 ;; to be reloaded by putting the constant into memory when
2201 ;; reg is a floating point register.
2203 ;; For integer registers we use ldil;ldo to set the appropriate
2206 ;; This must come before the movdf pattern, and it must be present
2207 ;; to handle obscure reloading cases.
2209 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2210 (match_operand:DF 1 "" "?F,m"))]
2211 "GET_CODE (operands[1]) == CONST_DOUBLE
2212 && operands[1] != CONST0_RTX (DFmode)
2213 && ! TARGET_SOFT_FLOAT"
2214 "* return (which_alternative == 0 ? output_move_double (operands)
2215 : \"fldd%F1 %1,%0\");"
2216 [(set_attr "type" "move,fpload")
2217 (set_attr "length" "16,4")])
2219 (define_expand "movdf"
2220 [(set (match_operand:DF 0 "general_operand" "")
2221 (match_operand:DF 1 "general_operand" ""))]
2225 if (emit_move_sequence (operands, DFmode, 0))
2229 ;; Reloading an SImode or DImode value requires a scratch register if
2230 ;; going in to or out of float point registers.
2232 (define_expand "reload_indf"
2233 [(set (match_operand:DF 0 "register_operand" "=Z")
2234 (match_operand:DF 1 "non_hard_reg_operand" ""))
2235 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2239 if (emit_move_sequence (operands, DFmode, operands[2]))
2242 /* We don't want the clobber emitted, so handle this ourselves. */
2243 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2247 (define_expand "reload_outdf"
2248 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2249 (match_operand:DF 1 "register_operand" "Z"))
2250 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2254 if (emit_move_sequence (operands, DFmode, operands[2]))
2257 /* We don't want the clobber emitted, so handle this ourselves. */
2258 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2263 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2264 "=f,*r,RQ,?o,?Q,f,*r,*r")
2265 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2266 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2267 "(register_operand (operands[0], DFmode)
2268 || reg_or_0_operand (operands[1], DFmode))
2269 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2270 && GET_CODE (operands[0]) == MEM)
2271 && ! TARGET_SOFT_FLOAT"
2274 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2275 || operands[1] == CONST0_RTX (DFmode))
2276 return output_fp_move_double (operands);
2277 return output_move_double (operands);
2279 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2280 (set_attr "length" "4,8,4,8,16,4,8,16")])
2283 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2285 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2287 "(register_operand (operands[0], DFmode)
2288 || reg_or_0_operand (operands[1], DFmode))
2289 && TARGET_SOFT_FLOAT"
2292 return output_move_double (operands);
2294 [(set_attr "type" "move,store,store,load,load")
2295 (set_attr "length" "8,8,16,8,16")])
2298 [(set (match_operand:DF 0 "register_operand" "=fx")
2299 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2300 (match_operand:SI 2 "register_operand" "r"))))]
2301 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2304 /* Reload can create backwards (relative to cse) unscaled index
2305 address modes when eliminating registers and possibly for
2306 pseudos that don't get hard registers. Deal with it. */
2307 if (operands[2] == hard_frame_pointer_rtx
2308 || operands[2] == stack_pointer_rtx)
2309 return \"flddx %1(0,%2),%0\";
2311 return \"flddx %2(0,%1),%0\";
2313 [(set_attr "type" "fpload")
2314 (set_attr "length" "4")])
2317 [(set (match_operand:DF 0 "register_operand" "=fx")
2318 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2319 (match_operand:SI 2 "basereg_operand" "r"))))]
2320 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2323 /* Reload can create backwards (relative to cse) unscaled index
2324 address modes when eliminating registers and possibly for
2325 pseudos that don't get hard registers. Deal with it. */
2326 if (operands[1] == hard_frame_pointer_rtx
2327 || operands[1] == stack_pointer_rtx)
2328 return \"flddx %2(0,%1),%0\";
2330 return \"flddx %1(0,%2),%0\";
2332 [(set_attr "type" "fpload")
2333 (set_attr "length" "4")])
2336 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2337 (match_operand:SI 2 "register_operand" "r")))
2338 (match_operand:DF 0 "register_operand" "fx"))]
2339 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2342 /* Reload can create backwards (relative to cse) unscaled index
2343 address modes when eliminating registers and possibly for
2344 pseudos that don't get hard registers. Deal with it. */
2345 if (operands[2] == hard_frame_pointer_rtx
2346 || operands[2] == stack_pointer_rtx)
2347 return \"fstdx %0,%1(0,%2)\";
2349 return \"fstdx %0,%2(0,%1)\";
2351 [(set_attr "type" "fpstore")
2352 (set_attr "length" "4")])
2355 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2356 (match_operand:SI 2 "basereg_operand" "r")))
2357 (match_operand:DF 0 "register_operand" "fx"))]
2358 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2361 /* Reload can create backwards (relative to cse) unscaled index
2362 address modes when eliminating registers and possibly for
2363 pseudos that don't get hard registers. Deal with it. */
2364 if (operands[1] == hard_frame_pointer_rtx
2365 || operands[1] == stack_pointer_rtx)
2366 return \"fstdx %0,%2(0,%1)\";
2368 return \"fstdx %0,%1(0,%2)\";
2370 [(set_attr "type" "fpstore")
2371 (set_attr "length" "4")])
2373 (define_expand "movdi"
2374 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2375 (match_operand:DI 1 "general_operand" ""))]
2379 if (emit_move_sequence (operands, DImode, 0))
2383 (define_expand "reload_indi"
2384 [(set (match_operand:DI 0 "register_operand" "=f")
2385 (match_operand:DI 1 "non_hard_reg_operand" ""))
2386 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2390 if (emit_move_sequence (operands, DImode, operands[2]))
2393 /* We don't want the clobber emitted, so handle this ourselves. */
2394 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2398 (define_expand "reload_outdi"
2399 [(set (match_operand:DI 0 "general_operand" "")
2400 (match_operand:DI 1 "register_operand" "f"))
2401 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2405 if (emit_move_sequence (operands, DImode, operands[2]))
2408 /* We don't want the clobber emitted, so handle this ourselves. */
2409 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2414 [(set (match_operand:DI 0 "register_operand" "=r")
2415 (high:DI (match_operand 1 "" "")))]
2419 rtx op0 = operands[0];
2420 rtx op1 = operands[1];
2422 if (GET_CODE (op1) == CONST_INT)
2424 operands[0] = operand_subword (op0, 1, 0, DImode);
2425 output_asm_insn (\"ldil L'%1,%0\", operands);
2427 operands[0] = operand_subword (op0, 0, 0, DImode);
2428 if (INTVAL (op1) < 0)
2429 output_asm_insn (\"ldi -1,%0\", operands);
2431 output_asm_insn (\"ldi 0,%0\", operands);
2434 else if (GET_CODE (op1) == CONST_DOUBLE)
2436 operands[0] = operand_subword (op0, 1, 0, DImode);
2437 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2438 output_asm_insn (\"ldil L'%1,%0\", operands);
2440 operands[0] = operand_subword (op0, 0, 0, DImode);
2441 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2442 output_asm_insn (singlemove_string (operands), operands);
2448 [(set_attr "type" "move")
2449 (set_attr "length" "8")])
2454 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2455 "=r,o,Q,r,r,r,f,f,*TR")
2456 (match_operand:DI 1 "general_operand"
2457 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2458 "(register_operand (operands[0], DImode)
2459 || reg_or_0_operand (operands[1], DImode))
2460 && ! TARGET_SOFT_FLOAT"
2463 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2464 || (operands[1] == CONST0_RTX (DImode)))
2465 return output_fp_move_double (operands);
2466 return output_move_double (operands);
2468 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2469 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2472 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2474 (match_operand:DI 1 "general_operand"
2476 "(register_operand (operands[0], DImode)
2477 || reg_or_0_operand (operands[1], DImode))
2478 && TARGET_SOFT_FLOAT"
2481 return output_move_double (operands);
2483 [(set_attr "type" "move,store,store,load,load,multi")
2484 (set_attr "length" "8,8,16,8,16,16")])
2487 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2488 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2489 (match_operand:DI 2 "immediate_operand" "i,i")))]
2493 /* Don't output a 64 bit constant, since we can't trust the assembler to
2494 handle it correctly. */
2495 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2496 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2497 if (which_alternative == 1)
2498 output_asm_insn (\"copy %1,%0\", operands);
2499 return \"ldo R'%G2(%R1),%R0\";
2501 [(set_attr "type" "move,move")
2502 (set_attr "length" "4,8")])
2504 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2505 ;; to be reloaded by putting the constant into memory when
2506 ;; reg is a floating point register.
2508 ;; For integer registers we use ldil;ldo to set the appropriate
2511 ;; This must come before the movsf pattern, and it must be present
2512 ;; to handle obscure reloading cases.
2514 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2515 (match_operand:SF 1 "" "?F,m"))]
2516 "GET_CODE (operands[1]) == CONST_DOUBLE
2517 && operands[1] != CONST0_RTX (SFmode)
2518 && ! TARGET_SOFT_FLOAT"
2519 "* return (which_alternative == 0 ? singlemove_string (operands)
2520 : \" fldw%F1 %1,%0\");"
2521 [(set_attr "type" "move,fpload")
2522 (set_attr "length" "8,4")])
2524 (define_expand "movsf"
2525 [(set (match_operand:SF 0 "general_operand" "")
2526 (match_operand:SF 1 "general_operand" ""))]
2530 if (emit_move_sequence (operands, SFmode, 0))
2534 ;; Reloading an SImode or DImode value requires a scratch register if
2535 ;; going in to or out of float point registers.
2537 (define_expand "reload_insf"
2538 [(set (match_operand:SF 0 "register_operand" "=Z")
2539 (match_operand:SF 1 "non_hard_reg_operand" ""))
2540 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2544 if (emit_move_sequence (operands, SFmode, operands[2]))
2547 /* We don't want the clobber emitted, so handle this ourselves. */
2548 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2552 (define_expand "reload_outsf"
2553 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2554 (match_operand:SF 1 "register_operand" "Z"))
2555 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2559 if (emit_move_sequence (operands, SFmode, operands[2]))
2562 /* We don't want the clobber emitted, so handle this ourselves. */
2563 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2568 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2570 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2571 "fG,rG,RQ,RQ,f,rG"))]
2572 "(register_operand (operands[0], SFmode)
2573 || reg_or_0_operand (operands[1], SFmode))
2574 && ! TARGET_SOFT_FLOAT"
2582 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2583 (set_attr "pa_combine_type" "addmove")
2584 (set_attr "length" "4,4,4,4,4,4")])
2587 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2589 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2591 "(register_operand (operands[0], SFmode)
2592 || reg_or_0_operand (operands[1], SFmode))
2593 && TARGET_SOFT_FLOAT"
2598 [(set_attr "type" "move,load,store")
2599 (set_attr "pa_combine_type" "addmove")
2600 (set_attr "length" "4,4,4")])
2603 [(set (match_operand:SF 0 "register_operand" "=fx")
2604 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2605 (match_operand:SI 2 "register_operand" "r"))))]
2606 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2609 /* Reload can create backwards (relative to cse) unscaled index
2610 address modes when eliminating registers and possibly for
2611 pseudos that don't get hard registers. Deal with it. */
2612 if (operands[2] == hard_frame_pointer_rtx
2613 || operands[2] == stack_pointer_rtx)
2614 return \"fldwx %1(0,%2),%0\";
2616 return \"fldwx %2(0,%1),%0\";
2618 [(set_attr "type" "fpload")
2619 (set_attr "length" "4")])
2622 [(set (match_operand:SF 0 "register_operand" "=fx")
2623 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2624 (match_operand:SI 2 "basereg_operand" "r"))))]
2625 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2628 /* Reload can create backwards (relative to cse) unscaled index
2629 address modes when eliminating registers and possibly for
2630 pseudos that don't get hard registers. Deal with it. */
2631 if (operands[1] == hard_frame_pointer_rtx
2632 || operands[1] == stack_pointer_rtx)
2633 return \"fldwx %2(0,%1),%0\";
2635 return \"fldwx %1(0,%2),%0\";
2637 [(set_attr "type" "fpload")
2638 (set_attr "length" "4")])
2641 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2642 (match_operand:SI 2 "register_operand" "r")))
2643 (match_operand:SF 0 "register_operand" "fx"))]
2644 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2647 /* Reload can create backwards (relative to cse) unscaled index
2648 address modes when eliminating registers and possibly for
2649 pseudos that don't get hard registers. Deal with it. */
2650 if (operands[2] == hard_frame_pointer_rtx
2651 || operands[2] == stack_pointer_rtx)
2652 return \"fstwx %0,%1(0,%2)\";
2654 return \"fstwx %0,%2(0,%1)\";
2656 [(set_attr "type" "fpstore")
2657 (set_attr "length" "4")])
2660 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2661 (match_operand:SI 2 "basereg_operand" "r")))
2662 (match_operand:SF 0 "register_operand" "fx"))]
2663 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2666 /* Reload can create backwards (relative to cse) unscaled index
2667 address modes when eliminating registers and possibly for
2668 pseudos that don't get hard registers. Deal with it. */
2669 if (operands[1] == hard_frame_pointer_rtx
2670 || operands[1] == stack_pointer_rtx)
2671 return \"fstwx %0,%2(0,%1)\";
2673 return \"fstwx %0,%1(0,%2)\";
2675 [(set_attr "type" "fpstore")
2676 (set_attr "length" "4")])
2679 ;;- zero extension instructions
2680 ;; We have define_expand for zero extension patterns to make sure the
2681 ;; operands get loaded into registers. The define_insns accept
2682 ;; memory operands. This gives us better overall code than just
2683 ;; having a pattern that does or does not accept memory operands.
2685 (define_expand "zero_extendhisi2"
2686 [(set (match_operand:SI 0 "register_operand" "")
2688 (match_operand:HI 1 "register_operand" "")))]
2693 [(set (match_operand:SI 0 "register_operand" "=r,r")
2695 (match_operand:HI 1 "move_operand" "r,RQ")))]
2696 "GET_CODE (operands[1]) != CONST_INT"
2700 [(set_attr "type" "shift,load")
2701 (set_attr "length" "4,4")])
2703 (define_expand "zero_extendqihi2"
2704 [(set (match_operand:HI 0 "register_operand" "")
2706 (match_operand:QI 1 "register_operand" "")))]
2711 [(set (match_operand:HI 0 "register_operand" "=r,r")
2713 (match_operand:QI 1 "move_operand" "r,RQ")))]
2714 "GET_CODE (operands[1]) != CONST_INT"
2718 [(set_attr "type" "shift,load")
2719 (set_attr "length" "4,4")])
2721 (define_expand "zero_extendqisi2"
2722 [(set (match_operand:SI 0 "register_operand" "")
2724 (match_operand:QI 1 "register_operand" "")))]
2729 [(set (match_operand:SI 0 "register_operand" "=r,r")
2731 (match_operand:QI 1 "move_operand" "r,RQ")))]
2732 "GET_CODE (operands[1]) != CONST_INT"
2736 [(set_attr "type" "shift,load")
2737 (set_attr "length" "4,4")])
2739 ;;- sign extension instructions
2741 (define_insn "extendhisi2"
2742 [(set (match_operand:SI 0 "register_operand" "=r")
2743 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2746 [(set_attr "type" "shift")
2747 (set_attr "length" "4")])
2749 (define_insn "extendqihi2"
2750 [(set (match_operand:HI 0 "register_operand" "=r")
2751 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2754 [(set_attr "type" "shift")
2755 (set_attr "length" "4")])
2757 (define_insn "extendqisi2"
2758 [(set (match_operand:SI 0 "register_operand" "=r")
2759 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2762 [(set_attr "type" "shift")
2763 (set_attr "length" "4")])
2765 ;; Conversions between float and double.
2767 (define_insn "extendsfdf2"
2768 [(set (match_operand:DF 0 "register_operand" "=f")
2770 (match_operand:SF 1 "register_operand" "f")))]
2771 "! TARGET_SOFT_FLOAT"
2772 "fcnvff,sgl,dbl %1,%0"
2773 [(set_attr "type" "fpalu")
2774 (set_attr "length" "4")])
2776 (define_insn "truncdfsf2"
2777 [(set (match_operand:SF 0 "register_operand" "=f")
2779 (match_operand:DF 1 "register_operand" "f")))]
2780 "! TARGET_SOFT_FLOAT"
2781 "fcnvff,dbl,sgl %1,%0"
2782 [(set_attr "type" "fpalu")
2783 (set_attr "length" "4")])
2785 ;; Conversion between fixed point and floating point.
2786 ;; Note that among the fix-to-float insns
2787 ;; the ones that start with SImode come first.
2788 ;; That is so that an operand that is a CONST_INT
2789 ;; (and therefore lacks a specific machine mode).
2790 ;; will be recognized as SImode (which is always valid)
2791 ;; rather than as QImode or HImode.
2793 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2794 ;; to be reloaded by putting the constant into memory.
2795 ;; It must come before the more general floatsisf2 pattern.
2797 [(set (match_operand:SF 0 "register_operand" "=f")
2798 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2799 "! TARGET_SOFT_FLOAT"
2800 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2801 [(set_attr "type" "fpalu")
2802 (set_attr "length" "8")])
2804 (define_insn "floatsisf2"
2805 [(set (match_operand:SF 0 "register_operand" "=f")
2806 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2807 "! TARGET_SOFT_FLOAT"
2808 "fcnvxf,sgl,sgl %1,%0"
2809 [(set_attr "type" "fpalu")
2810 (set_attr "length" "4")])
2812 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2813 ;; to be reloaded by putting the constant into memory.
2814 ;; It must come before the more general floatsidf2 pattern.
2816 [(set (match_operand:DF 0 "register_operand" "=f")
2817 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2818 "! TARGET_SOFT_FLOAT"
2819 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2820 [(set_attr "type" "fpalu")
2821 (set_attr "length" "8")])
2823 (define_insn "floatsidf2"
2824 [(set (match_operand:DF 0 "register_operand" "=f")
2825 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2826 "! TARGET_SOFT_FLOAT"
2827 "fcnvxf,sgl,dbl %1,%0"
2828 [(set_attr "type" "fpalu")
2829 (set_attr "length" "4")])
2831 (define_expand "floatunssisf2"
2832 [(set (subreg:SI (match_dup 2) 1)
2833 (match_operand:SI 1 "register_operand" ""))
2834 (set (subreg:SI (match_dup 2) 0)
2836 (set (match_operand:SF 0 "register_operand" "")
2837 (float:SF (match_dup 2)))]
2838 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2839 "operands[2] = gen_reg_rtx (DImode);")
2841 (define_expand "floatunssidf2"
2842 [(set (subreg:SI (match_dup 2) 1)
2843 (match_operand:SI 1 "register_operand" ""))
2844 (set (subreg:SI (match_dup 2) 0)
2846 (set (match_operand:DF 0 "register_operand" "")
2847 (float:DF (match_dup 2)))]
2848 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2849 "operands[2] = gen_reg_rtx (DImode);")
2851 (define_insn "floatdisf2"
2852 [(set (match_operand:SF 0 "register_operand" "=f")
2853 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2854 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2855 "fcnvxf,dbl,sgl %1,%0"
2856 [(set_attr "type" "fpalu")
2857 (set_attr "length" "4")])
2859 (define_insn "floatdidf2"
2860 [(set (match_operand:DF 0 "register_operand" "=f")
2861 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2862 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2863 "fcnvxf,dbl,dbl %1,%0"
2864 [(set_attr "type" "fpalu")
2865 (set_attr "length" "4")])
2867 ;; Convert a float to an actual integer.
2868 ;; Truncation is performed as part of the conversion.
2870 (define_insn "fix_truncsfsi2"
2871 [(set (match_operand:SI 0 "register_operand" "=f")
2872 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2873 "! TARGET_SOFT_FLOAT"
2874 "fcnvfxt,sgl,sgl %1,%0"
2875 [(set_attr "type" "fpalu")
2876 (set_attr "length" "4")])
2878 (define_insn "fix_truncdfsi2"
2879 [(set (match_operand:SI 0 "register_operand" "=f")
2880 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2881 "! TARGET_SOFT_FLOAT"
2882 "fcnvfxt,dbl,sgl %1,%0"
2883 [(set_attr "type" "fpalu")
2884 (set_attr "length" "4")])
2886 (define_insn "fix_truncsfdi2"
2887 [(set (match_operand:DI 0 "register_operand" "=f")
2888 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2889 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2890 "fcnvfxt,sgl,dbl %1,%0"
2891 [(set_attr "type" "fpalu")
2892 (set_attr "length" "4")])
2894 (define_insn "fix_truncdfdi2"
2895 [(set (match_operand:DI 0 "register_operand" "=f")
2896 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2897 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT"
2898 "fcnvfxt,dbl,dbl %1,%0"
2899 [(set_attr "type" "fpalu")
2900 (set_attr "length" "4")])
2902 ;;- arithmetic instructions
2904 (define_insn "adddi3"
2905 [(set (match_operand:DI 0 "register_operand" "=r")
2906 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2907 (match_operand:DI 2 "arith11_operand" "rI")))]
2911 if (GET_CODE (operands[2]) == CONST_INT)
2913 if (INTVAL (operands[2]) >= 0)
2914 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2916 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2919 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2921 [(set_attr "type" "binary")
2922 (set_attr "length" "8")])
2925 [(set (match_operand:SI 0 "register_operand" "=r")
2926 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2927 (match_operand:SI 2 "register_operand" "r")))]
2930 [(set_attr "type" "binary")
2931 (set_attr "length" "4")])
2933 ;; define_splits to optimize cases of adding a constant integer
2934 ;; to a register when the constant does not fit in 14 bits. */
2936 [(set (match_operand:SI 0 "register_operand" "")
2937 (plus:SI (match_operand:SI 1 "register_operand" "")
2938 (match_operand:SI 2 "const_int_operand" "")))
2939 (clobber (match_operand:SI 4 "register_operand" ""))]
2940 "! cint_ok_for_move (INTVAL (operands[2]))
2941 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2942 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2943 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2946 int val = INTVAL (operands[2]);
2947 int low = (val < 0) ? -0x2000 : 0x1fff;
2948 int rest = val - low;
2950 operands[2] = GEN_INT (rest);
2951 operands[3] = GEN_INT (low);
2955 [(set (match_operand:SI 0 "register_operand" "")
2956 (plus:SI (match_operand:SI 1 "register_operand" "")
2957 (match_operand:SI 2 "const_int_operand" "")))
2958 (clobber (match_operand:SI 4 "register_operand" ""))]
2959 "! cint_ok_for_move (INTVAL (operands[2]))"
2960 [(set (match_dup 4) (match_dup 2))
2961 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2965 HOST_WIDE_INT intval = INTVAL (operands[2]);
2967 /* Try dividing the constant by 2, then 4, and finally 8 to see
2968 if we can get a constant which can be loaded into a register
2969 in a single instruction (cint_ok_for_move).
2971 If that fails, try to negate the constant and subtract it
2972 from our input operand. */
2973 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
2975 operands[2] = GEN_INT (intval / 2);
2976 operands[3] = GEN_INT (2);
2978 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
2980 operands[2] = GEN_INT (intval / 4);
2981 operands[3] = GEN_INT (4);
2983 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
2985 operands[2] = GEN_INT (intval / 8);
2986 operands[3] = GEN_INT (8);
2988 else if (cint_ok_for_move (-intval))
2990 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
2991 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
2998 (define_insn "addsi3"
2999 [(set (match_operand:SI 0 "register_operand" "=r,r")
3000 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3001 (match_operand:SI 2 "arith_operand" "r,J")))]
3006 [(set_attr "type" "binary,binary")
3007 (set_attr "pa_combine_type" "addmove")
3008 (set_attr "length" "4,4")])
3010 ;; Disgusting kludge to work around reload bugs with frame pointer
3011 ;; elimination. Similar to other magic reload patterns in the
3012 ;; indexed memory operations.
3014 [(set (match_operand:SI 0 "register_operand" "=&r")
3015 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3016 (match_operand:SI 2 "register_operand" "r"))
3017 (match_operand:SI 3 "const_int_operand" "rL")))]
3018 "reload_in_progress"
3021 if (GET_CODE (operands[3]) == CONST_INT)
3022 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3024 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3026 [(set_attr "type" "binary")
3027 (set_attr "length" "8")])
3029 (define_insn "subdi3"
3030 [(set (match_operand:DI 0 "register_operand" "=r")
3031 (minus:DI (match_operand:DI 1 "register_operand" "r")
3032 (match_operand:DI 2 "register_operand" "r")))]
3034 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3035 [(set_attr "type" "binary")
3036 (set_attr "length" "8")])
3038 (define_insn "subsi3"
3039 [(set (match_operand:SI 0 "register_operand" "=r,r")
3040 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3041 (match_operand:SI 2 "register_operand" "r,r")))]
3046 [(set_attr "type" "binary,binary")
3047 (set_attr "length" "4,4")])
3049 ;; Clobbering a "register_operand" instead of a match_scratch
3050 ;; in operand3 of millicode calls avoids spilling %r1 and
3051 ;; produces better code.
3053 ;; The mulsi3 insns set up registers for the millicode call.
3054 (define_expand "mulsi3"
3055 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3056 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3057 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3058 (clobber (match_dup 3))
3059 (clobber (reg:SI 26))
3060 (clobber (reg:SI 25))
3061 (clobber (reg:SI 31))])
3062 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3066 if (TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3068 rtx scratch = gen_reg_rtx (DImode);
3069 operands[1] = force_reg (SImode, operands[1]);
3070 operands[2] = force_reg (SImode, operands[2]);
3071 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3072 emit_insn (gen_rtx_SET (VOIDmode,
3074 gen_rtx_SUBREG (SImode, scratch, 1)));
3077 operands[3] = gen_reg_rtx (SImode);
3080 (define_insn "umulsidi3"
3081 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3082 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3083 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3084 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3086 [(set_attr "type" "fpmuldbl")
3087 (set_attr "length" "4")])
3090 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3091 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3092 (match_operand:DI 2 "uint32_operand" "f")))]
3093 "TARGET_SNAKE && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3095 [(set_attr "type" "fpmuldbl")
3096 (set_attr "length" "4")])
3099 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3100 (clobber (match_operand:SI 0 "register_operand" "=a"))
3101 (clobber (reg:SI 26))
3102 (clobber (reg:SI 25))
3103 (clobber (reg:SI 31))]
3105 "* return output_mul_insn (0, insn);"
3106 [(set_attr "type" "milli")
3107 (set (attr "length")
3109 ;; Target (or stub) within reach
3110 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3112 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3117 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3121 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3122 ;; same as NO_SPACE_REGS code
3123 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3125 (eq (symbol_ref "flag_pic")
3129 ;; Out of range and either PIC or PORTABLE_RUNTIME
3132 ;;; Division and mod.
3133 (define_expand "divsi3"
3134 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3135 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3136 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3137 (clobber (match_dup 3))
3138 (clobber (reg:SI 26))
3139 (clobber (reg:SI 25))
3140 (clobber (reg:SI 31))])
3141 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3145 operands[3] = gen_reg_rtx (SImode);
3146 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3152 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3153 (clobber (match_operand:SI 1 "register_operand" "=a"))
3154 (clobber (reg:SI 26))
3155 (clobber (reg:SI 25))
3156 (clobber (reg:SI 31))]
3159 return output_div_insn (operands, 0, insn);"
3160 [(set_attr "type" "milli")
3161 (set (attr "length")
3163 ;; Target (or stub) within reach
3164 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3166 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3171 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3175 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3176 ;; same as NO_SPACE_REGS code
3177 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3179 (eq (symbol_ref "flag_pic")
3183 ;; Out of range and either PIC or PORTABLE_RUNTIME
3186 (define_expand "udivsi3"
3187 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3188 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3189 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3190 (clobber (match_dup 3))
3191 (clobber (reg:SI 26))
3192 (clobber (reg:SI 25))
3193 (clobber (reg:SI 31))])
3194 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3198 operands[3] = gen_reg_rtx (SImode);
3199 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3205 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3206 (clobber (match_operand:SI 1 "register_operand" "=a"))
3207 (clobber (reg:SI 26))
3208 (clobber (reg:SI 25))
3209 (clobber (reg:SI 31))]
3212 return output_div_insn (operands, 1, insn);"
3213 [(set_attr "type" "milli")
3214 (set (attr "length")
3216 ;; Target (or stub) within reach
3217 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3219 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3224 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3228 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3229 ;; same as NO_SPACE_REGS code
3230 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3232 (eq (symbol_ref "flag_pic")
3236 ;; Out of range and either PIC or PORTABLE_RUNTIME
3239 (define_expand "modsi3"
3240 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3241 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3242 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3243 (clobber (match_dup 3))
3244 (clobber (reg:SI 26))
3245 (clobber (reg:SI 25))
3246 (clobber (reg:SI 31))])
3247 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3251 operands[3] = gen_reg_rtx (SImode);
3255 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3256 (clobber (match_operand:SI 0 "register_operand" "=a"))
3257 (clobber (reg:SI 26))
3258 (clobber (reg:SI 25))
3259 (clobber (reg:SI 31))]
3262 return output_mod_insn (0, insn);"
3263 [(set_attr "type" "milli")
3264 (set (attr "length")
3266 ;; Target (or stub) within reach
3267 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3269 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3274 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3278 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3279 ;; same as NO_SPACE_REGS code
3280 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3282 (eq (symbol_ref "flag_pic")
3286 ;; Out of range and either PIC or PORTABLE_RUNTIME
3289 (define_expand "umodsi3"
3290 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3291 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3292 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3293 (clobber (match_dup 3))
3294 (clobber (reg:SI 26))
3295 (clobber (reg:SI 25))
3296 (clobber (reg:SI 31))])
3297 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3301 operands[3] = gen_reg_rtx (SImode);
3305 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3306 (clobber (match_operand:SI 0 "register_operand" "=a"))
3307 (clobber (reg:SI 26))
3308 (clobber (reg:SI 25))
3309 (clobber (reg:SI 31))]
3312 return output_mod_insn (1, insn);"
3313 [(set_attr "type" "milli")
3314 (set (attr "length")
3316 ;; Target (or stub) within reach
3317 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3319 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3324 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3328 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3329 ;; same as NO_SPACE_REGS code
3330 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3332 (eq (symbol_ref "flag_pic")
3336 ;; Out of range and either PIC or PORTABLE_RUNTIME
3339 ;;- and instructions
3340 ;; We define DImode `and` so with DImode `not` we can get
3341 ;; DImode `andn`. Other combinations are possible.
3343 (define_expand "anddi3"
3344 [(set (match_operand:DI 0 "register_operand" "")
3345 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3346 (match_operand:DI 2 "arith_double_operand" "")))]
3350 if (! register_operand (operands[1], DImode)
3351 || ! register_operand (operands[2], DImode))
3352 /* Let GCC break this into word-at-a-time operations. */
3357 [(set (match_operand:DI 0 "register_operand" "=r")
3358 (and:DI (match_operand:DI 1 "register_operand" "%r")
3359 (match_operand:DI 2 "register_operand" "r")))]
3361 "and %1,%2,%0\;and %R1,%R2,%R0"
3362 [(set_attr "type" "binary")
3363 (set_attr "length" "8")])
3365 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3366 ; constant with ldil;ldo.
3367 (define_insn "andsi3"
3368 [(set (match_operand:SI 0 "register_operand" "=r,r")
3369 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3370 (match_operand:SI 2 "and_operand" "rO,P")))]
3372 "* return output_and (operands); "
3373 [(set_attr "type" "binary,shift")
3374 (set_attr "length" "4,4")])
3377 [(set (match_operand:DI 0 "register_operand" "=r")
3378 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3379 (match_operand:DI 2 "register_operand" "r")))]
3381 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3382 [(set_attr "type" "binary")
3383 (set_attr "length" "8")])
3386 [(set (match_operand:SI 0 "register_operand" "=r")
3387 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3388 (match_operand:SI 2 "register_operand" "r")))]
3391 [(set_attr "type" "binary")
3392 (set_attr "length" "4")])
3394 (define_expand "iordi3"
3395 [(set (match_operand:DI 0 "register_operand" "")
3396 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3397 (match_operand:DI 2 "arith_double_operand" "")))]
3401 if (! register_operand (operands[1], DImode)
3402 || ! register_operand (operands[2], DImode))
3403 /* Let GCC break this into word-at-a-time operations. */
3408 [(set (match_operand:DI 0 "register_operand" "=r")
3409 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3410 (match_operand:DI 2 "register_operand" "r")))]
3412 "or %1,%2,%0\;or %R1,%R2,%R0"
3413 [(set_attr "type" "binary")
3414 (set_attr "length" "8")])
3416 ;; Need a define_expand because we've run out of CONST_OK... characters.
3417 (define_expand "iorsi3"
3418 [(set (match_operand:SI 0 "register_operand" "")
3419 (ior:SI (match_operand:SI 1 "register_operand" "")
3420 (match_operand:SI 2 "arith32_operand" "")))]
3424 if (! (ior_operand (operands[2], SImode)
3425 || register_operand (operands[2], SImode)))
3426 operands[2] = force_reg (SImode, operands[2]);
3430 [(set (match_operand:SI 0 "register_operand" "=r,r")
3431 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3432 (match_operand:SI 2 "ior_operand" "M,i")))]
3434 "* return output_ior (operands); "
3435 [(set_attr "type" "binary,shift")
3436 (set_attr "length" "4,4")])
3439 [(set (match_operand:SI 0 "register_operand" "=r")
3440 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3441 (match_operand:SI 2 "register_operand" "r")))]
3444 [(set_attr "type" "binary")
3445 (set_attr "length" "4")])
3447 (define_expand "xordi3"
3448 [(set (match_operand:DI 0 "register_operand" "")
3449 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3450 (match_operand:DI 2 "arith_double_operand" "")))]
3454 if (! register_operand (operands[1], DImode)
3455 || ! register_operand (operands[2], DImode))
3456 /* Let GCC break this into word-at-a-time operations. */
3461 [(set (match_operand:DI 0 "register_operand" "=r")
3462 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3463 (match_operand:DI 2 "register_operand" "r")))]
3465 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3466 [(set_attr "type" "binary")
3467 (set_attr "length" "8")])
3469 (define_insn "xorsi3"
3470 [(set (match_operand:SI 0 "register_operand" "=r")
3471 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3472 (match_operand:SI 2 "register_operand" "r")))]
3475 [(set_attr "type" "binary")
3476 (set_attr "length" "4")])
3478 (define_insn "negdi2"
3479 [(set (match_operand:DI 0 "register_operand" "=r")
3480 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3482 "sub 0,%R1,%R0\;subb 0,%1,%0"
3483 [(set_attr "type" "unary")
3484 (set_attr "length" "8")])
3486 (define_insn "negsi2"
3487 [(set (match_operand:SI 0 "register_operand" "=r")
3488 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3491 [(set_attr "type" "unary")
3492 (set_attr "length" "4")])
3494 (define_expand "one_cmpldi2"
3495 [(set (match_operand:DI 0 "register_operand" "")
3496 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3500 if (! register_operand (operands[1], DImode))
3505 [(set (match_operand:DI 0 "register_operand" "=r")
3506 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3508 "uaddcm 0,%1,%0\;uaddcm 0,%R1,%R0"
3509 [(set_attr "type" "unary")
3510 (set_attr "length" "8")])
3512 (define_insn "one_cmplsi2"
3513 [(set (match_operand:SI 0 "register_operand" "=r")
3514 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3517 [(set_attr "type" "unary")
3518 (set_attr "length" "4")])
3520 ;; Floating point arithmetic instructions.
3522 (define_insn "adddf3"
3523 [(set (match_operand:DF 0 "register_operand" "=f")
3524 (plus:DF (match_operand:DF 1 "register_operand" "f")
3525 (match_operand:DF 2 "register_operand" "f")))]
3526 "! TARGET_SOFT_FLOAT"
3528 [(set_attr "type" "fpalu")
3529 (set_attr "pa_combine_type" "faddsub")
3530 (set_attr "length" "4")])
3532 (define_insn "addsf3"
3533 [(set (match_operand:SF 0 "register_operand" "=f")
3534 (plus:SF (match_operand:SF 1 "register_operand" "f")
3535 (match_operand:SF 2 "register_operand" "f")))]
3536 "! TARGET_SOFT_FLOAT"
3538 [(set_attr "type" "fpalu")
3539 (set_attr "pa_combine_type" "faddsub")
3540 (set_attr "length" "4")])
3542 (define_insn "subdf3"
3543 [(set (match_operand:DF 0 "register_operand" "=f")
3544 (minus:DF (match_operand:DF 1 "register_operand" "f")
3545 (match_operand:DF 2 "register_operand" "f")))]
3546 "! TARGET_SOFT_FLOAT"
3548 [(set_attr "type" "fpalu")
3549 (set_attr "pa_combine_type" "faddsub")
3550 (set_attr "length" "4")])
3552 (define_insn "subsf3"
3553 [(set (match_operand:SF 0 "register_operand" "=f")
3554 (minus:SF (match_operand:SF 1 "register_operand" "f")
3555 (match_operand:SF 2 "register_operand" "f")))]
3556 "! TARGET_SOFT_FLOAT"
3558 [(set_attr "type" "fpalu")
3559 (set_attr "pa_combine_type" "faddsub")
3560 (set_attr "length" "4")])
3562 (define_insn "muldf3"
3563 [(set (match_operand:DF 0 "register_operand" "=f")
3564 (mult:DF (match_operand:DF 1 "register_operand" "f")
3565 (match_operand:DF 2 "register_operand" "f")))]
3566 "! TARGET_SOFT_FLOAT"
3568 [(set_attr "type" "fpmuldbl")
3569 (set_attr "pa_combine_type" "fmpy")
3570 (set_attr "length" "4")])
3572 (define_insn "mulsf3"
3573 [(set (match_operand:SF 0 "register_operand" "=f")
3574 (mult:SF (match_operand:SF 1 "register_operand" "f")
3575 (match_operand:SF 2 "register_operand" "f")))]
3576 "! TARGET_SOFT_FLOAT"
3578 [(set_attr "type" "fpmulsgl")
3579 (set_attr "pa_combine_type" "fmpy")
3580 (set_attr "length" "4")])
3582 (define_insn "divdf3"
3583 [(set (match_operand:DF 0 "register_operand" "=f")
3584 (div:DF (match_operand:DF 1 "register_operand" "f")
3585 (match_operand:DF 2 "register_operand" "f")))]
3586 "! TARGET_SOFT_FLOAT"
3588 [(set_attr "type" "fpdivdbl")
3589 (set_attr "length" "4")])
3591 (define_insn "divsf3"
3592 [(set (match_operand:SF 0 "register_operand" "=f")
3593 (div:SF (match_operand:SF 1 "register_operand" "f")
3594 (match_operand:SF 2 "register_operand" "f")))]
3595 "! TARGET_SOFT_FLOAT"
3597 [(set_attr "type" "fpdivsgl")
3598 (set_attr "length" "4")])
3600 (define_insn "negdf2"
3601 [(set (match_operand:DF 0 "register_operand" "=f")
3602 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3603 "! TARGET_SOFT_FLOAT"
3605 [(set_attr "type" "fpalu")
3606 (set_attr "length" "4")])
3608 (define_insn "negsf2"
3609 [(set (match_operand:SF 0 "register_operand" "=f")
3610 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3611 "! TARGET_SOFT_FLOAT"
3613 [(set_attr "type" "fpalu")
3614 (set_attr "length" "4")])
3616 (define_insn "absdf2"
3617 [(set (match_operand:DF 0 "register_operand" "=f")
3618 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3619 "! TARGET_SOFT_FLOAT"
3621 [(set_attr "type" "fpalu")
3622 (set_attr "length" "4")])
3624 (define_insn "abssf2"
3625 [(set (match_operand:SF 0 "register_operand" "=f")
3626 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3627 "! TARGET_SOFT_FLOAT"
3629 [(set_attr "type" "fpalu")
3630 (set_attr "length" "4")])
3632 (define_insn "sqrtdf2"
3633 [(set (match_operand:DF 0 "register_operand" "=f")
3634 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3635 "! TARGET_SOFT_FLOAT"
3637 [(set_attr "type" "fpsqrtdbl")
3638 (set_attr "length" "4")])
3640 (define_insn "sqrtsf2"
3641 [(set (match_operand:SF 0 "register_operand" "=f")
3642 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3643 "! TARGET_SOFT_FLOAT"
3645 [(set_attr "type" "fpsqrtsgl")
3646 (set_attr "length" "4")])
3648 ;;- Shift instructions
3650 ;; Optimized special case of shifting.
3653 [(set (match_operand:SI 0 "register_operand" "=r")
3654 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3658 [(set_attr "type" "load")
3659 (set_attr "length" "4")])
3662 [(set (match_operand:SI 0 "register_operand" "=r")
3663 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3667 [(set_attr "type" "load")
3668 (set_attr "length" "4")])
3671 [(set (match_operand:SI 0 "register_operand" "=r")
3672 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3673 (match_operand:SI 3 "shadd_operand" ""))
3674 (match_operand:SI 1 "register_operand" "r")))]
3676 "sh%O3addl %2,%1,%0"
3677 [(set_attr "type" "binary")
3678 (set_attr "length" "4")])
3680 ;; This variant of the above insn can occur if the first operand
3681 ;; is the frame pointer. This is a kludge, but there doesn't
3682 ;; seem to be a way around it. Only recognize it while reloading.
3683 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3684 ;; has constraints allowing a register. I don't know how this works,
3685 ;; but it somehow makes sure that out-of-range constants are placed
3686 ;; in a register which somehow magically is a "const_int_operand".
3687 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3690 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3691 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3692 (match_operand:SI 4 "shadd_operand" ""))
3693 (match_operand:SI 1 "register_operand" "r,r"))
3694 (match_operand:SI 3 "const_int_operand" "r,J")))]
3695 "reload_in_progress"
3697 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3698 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3699 [(set_attr "type" "multi")
3700 (set_attr "length" "8")])
3702 ;; This anonymous pattern and splitter wins because it reduces the latency
3703 ;; of the shadd sequence without increasing the latency of the shift.
3705 ;; We want to make sure and split up the operations for the scheduler since
3706 ;; these instructions can (and should) schedule independently.
3708 ;; It would be clearer if combine used the same operator for both expressions,
3709 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3712 ;; If this pattern is not split before register allocation, then we must expose
3713 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3715 [(set (match_operand:SI 0 "register_operand" "=r")
3716 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3717 (match_operand:SI 3 "shadd_operand" ""))
3718 (match_operand:SI 1 "register_operand" "r")))
3719 (set (match_operand:SI 4 "register_operand" "=&r")
3720 (ashift:SI (match_dup 2)
3721 (match_operand:SI 5 "const_int_operand" "i")))]
3722 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3724 [(set_attr "type" "binary")
3725 (set_attr "length" "8")])
3728 [(set (match_operand:SI 0 "register_operand" "=r")
3729 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3730 (match_operand:SI 3 "shadd_operand" ""))
3731 (match_operand:SI 1 "register_operand" "r")))
3732 (set (match_operand:SI 4 "register_operand" "=&r")
3733 (ashift:SI (match_dup 2)
3734 (match_operand:SI 5 "const_int_operand" "i")))]
3735 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3736 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3737 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3741 (define_expand "ashlsi3"
3742 [(set (match_operand:SI 0 "register_operand" "")
3743 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3744 (match_operand:SI 2 "arith32_operand" "")))]
3748 if (GET_CODE (operands[2]) != CONST_INT)
3750 rtx temp = gen_reg_rtx (SImode);
3751 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3752 if (GET_CODE (operands[1]) == CONST_INT)
3753 emit_insn (gen_zvdep_imm (operands[0], operands[1], temp));
3755 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3758 /* Make sure both inputs are not constants,
3759 there are no patterns for that. */
3760 operands[1] = force_reg (SImode, operands[1]);
3764 [(set (match_operand:SI 0 "register_operand" "=r")
3765 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3766 (match_operand:SI 2 "const_int_operand" "n")))]
3768 "zdep %1,%P2,%L2,%0"
3769 [(set_attr "type" "shift")
3770 (set_attr "length" "4")])
3772 ; Match cases of op1 a CONST_INT here that zvdep_imm doesn't handle.
3773 ; Doing it like this makes slightly better code since reload can
3774 ; replace a register with a known value in range -16..15 with a
3775 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm,
3776 ; but since we have no more CONST_OK... characters, that is not
3778 (define_insn "zvdep32"
3779 [(set (match_operand:SI 0 "register_operand" "=r,r")
3780 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3781 (minus:SI (const_int 31)
3782 (match_operand:SI 2 "register_operand" "q,q"))))]
3787 [(set_attr "type" "shift,shift")
3788 (set_attr "length" "4,4")])
3790 (define_insn "zvdep_imm"
3791 [(set (match_operand:SI 0 "register_operand" "=r")
3792 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3793 (minus:SI (const_int 31)
3794 (match_operand:SI 2 "register_operand" "q"))))]
3798 int x = INTVAL (operands[1]);
3799 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3800 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3801 return \"zvdepi %1,%2,%0\";
3803 [(set_attr "type" "shift")
3804 (set_attr "length" "4")])
3806 (define_insn "vdepi_ior"
3807 [(set (match_operand:SI 0 "register_operand" "=r")
3808 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3809 (minus:SI (const_int 31)
3810 (match_operand:SI 2 "register_operand" "q")))
3811 (match_operand:SI 3 "register_operand" "0")))]
3812 ; accept ...0001...1, can this be generalized?
3813 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3816 int x = INTVAL (operands[1]);
3817 operands[2] = GEN_INT (exact_log2 (x + 1));
3818 return \"vdepi -1,%2,%0\";
3820 [(set_attr "type" "shift")
3821 (set_attr "length" "4")])
3823 (define_insn "vdepi_and"
3824 [(set (match_operand:SI 0 "register_operand" "=r")
3825 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3826 (minus:SI (const_int 31)
3827 (match_operand:SI 2 "register_operand" "q")))
3828 (match_operand:SI 3 "register_operand" "0")))]
3829 ; this can be generalized...!
3830 "INTVAL (operands[1]) == -2"
3833 int x = INTVAL (operands[1]);
3834 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3835 return \"vdepi 0,%2,%0\";
3837 [(set_attr "type" "shift")
3838 (set_attr "length" "4")])
3840 (define_expand "ashrsi3"
3841 [(set (match_operand:SI 0 "register_operand" "")
3842 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3843 (match_operand:SI 2 "arith32_operand" "")))]
3847 if (GET_CODE (operands[2]) != CONST_INT)
3849 rtx temp = gen_reg_rtx (SImode);
3850 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3851 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3857 [(set (match_operand:SI 0 "register_operand" "=r")
3858 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3859 (match_operand:SI 2 "const_int_operand" "n")))]
3861 "extrs %1,%P2,%L2,%0"
3862 [(set_attr "type" "shift")
3863 (set_attr "length" "4")])
3865 (define_insn "vextrs32"
3866 [(set (match_operand:SI 0 "register_operand" "=r")
3867 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3868 (minus:SI (const_int 31)
3869 (match_operand:SI 2 "register_operand" "q"))))]
3872 [(set_attr "type" "shift")
3873 (set_attr "length" "4")])
3875 (define_insn "lshrsi3"
3876 [(set (match_operand:SI 0 "register_operand" "=r,r")
3877 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
3878 (match_operand:SI 2 "arith32_operand" "q,n")))]
3882 extru %1,%P2,%L2,%0"
3883 [(set_attr "type" "shift")
3884 (set_attr "length" "4")])
3886 (define_insn "rotrsi3"
3887 [(set (match_operand:SI 0 "register_operand" "=r,r")
3888 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
3889 (match_operand:SI 2 "arith32_operand" "q,n")))]
3893 if (GET_CODE (operands[2]) == CONST_INT)
3895 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
3896 return \"shd %1,%1,%2,%0\";
3899 return \"vshd %1,%1,%0\";
3901 [(set_attr "type" "shift")
3902 (set_attr "length" "4")])
3905 [(set (match_operand:SI 0 "register_operand" "=r")
3906 (match_operator:SI 5 "plus_xor_ior_operator"
3907 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
3908 (match_operand:SI 3 "const_int_operand" "n"))
3909 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3910 (match_operand:SI 4 "const_int_operand" "n"))]))]
3911 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3913 [(set_attr "type" "shift")
3914 (set_attr "length" "4")])
3917 [(set (match_operand:SI 0 "register_operand" "=r")
3918 (match_operator:SI 5 "plus_xor_ior_operator"
3919 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3920 (match_operand:SI 4 "const_int_operand" "n"))
3921 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3922 (match_operand:SI 3 "const_int_operand" "n"))]))]
3923 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3925 [(set_attr "type" "shift")
3926 (set_attr "length" "4")])
3929 [(set (match_operand:SI 0 "register_operand" "=r")
3930 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
3931 (match_operand:SI 2 "const_int_operand" ""))
3932 (match_operand:SI 3 "const_int_operand" "")))]
3933 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
3936 int cnt = INTVAL (operands[2]) & 31;
3937 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
3938 operands[2] = GEN_INT (31 - cnt);
3939 return \"zdep %1,%2,%3,%0\";
3941 [(set_attr "type" "shift")
3942 (set_attr "length" "4")])
3944 ;; Unconditional and other jump instructions.
3946 (define_insn "return"
3948 "hppa_can_use_return_insn_p ()"
3950 [(set_attr "type" "branch")
3951 (set_attr "length" "4")])
3953 ;; Use a different pattern for functions which have non-trivial
3954 ;; epilogues so as not to confuse jump and reorg.
3955 (define_insn "return_internal"
3960 [(set_attr "type" "branch")
3961 (set_attr "length" "4")])
3963 (define_expand "prologue"
3966 "hppa_expand_prologue ();DONE;")
3968 (define_expand "epilogue"
3973 /* Try to use the trivial return first. Else use the full
3975 if (hppa_can_use_return_insn_p ())
3976 emit_jump_insn (gen_return ());
3979 hppa_expand_epilogue ();
3980 emit_jump_insn (gen_return_internal ());
3985 ;; Special because we use the value placed in %r2 by the bl instruction
3986 ;; from within its delay slot to set the value for the 2nd parameter to
3988 (define_insn "call_profiler"
3989 [(unspec_volatile [(const_int 0)] 0)
3990 (use (match_operand:SI 0 "const_int_operand" ""))]
3992 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
3993 [(set_attr "type" "multi")
3994 (set_attr "length" "8")])
3996 (define_insn "blockage"
3997 [(unspec_volatile [(const_int 2)] 0)]
4000 [(set_attr "length" "0")])
4003 [(set (pc) (label_ref (match_operand 0 "" "")))]
4007 extern int optimize;
4009 if (GET_MODE (insn) == SImode)
4010 return \"bl %l0,0%#\";
4012 /* An unconditional branch which can reach its target. */
4013 if (get_attr_length (insn) != 24
4014 && get_attr_length (insn) != 16)
4015 return \"bl%* %l0,0\";
4017 /* An unconditional branch which can not reach its target.
4019 We need to be able to use %r1 as a scratch register; however,
4020 we can never be sure whether or not it's got a live value in
4021 it. Therefore, we must restore its original value after the
4024 To make matters worse, we don't have a stack slot which we
4025 can always clobber. sp-12/sp-16 shouldn't ever have a live
4026 value during a non-optimizing compilation, so we use those
4027 slots for now. We don't support very long branches when
4028 optimizing -- they should be quite rare when optimizing.
4030 Really the way to go long term is a register scavenger; goto
4031 the target of the jump and find a register which we can use
4032 as a scratch to hold the value in %r1. */
4034 /* We don't know how to register scavenge yet. */
4038 /* First store %r1 into the stack. */
4039 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4041 /* Now load the target address into %r1 and do an indirect jump
4042 to the value specified in %r1. Be careful to generate PIC
4047 xoperands[0] = operands[0];
4048 xoperands[1] = gen_label_rtx ();
4050 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4051 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4052 CODE_LABEL_NUMBER (xoperands[1]));
4053 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv 0(%%r1)\",
4057 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4059 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4060 so we know nothing else can be in the delay slot. */
4061 return \"ldw -16(%%r30),%%r1\";
4063 [(set_attr "type" "uncond_branch")
4064 (set_attr "pa_combine_type" "uncond_branch")
4065 (set (attr "length")
4066 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4067 (if_then_else (lt (abs (minus (match_dup 0)
4068 (plus (pc) (const_int 8))))
4072 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4074 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4079 ;; Subroutines of "casesi".
4080 ;; operand 0 is index
4081 ;; operand 1 is the minimum bound
4082 ;; operand 2 is the maximum bound - minimum bound + 1
4083 ;; operand 3 is CODE_LABEL for the table;
4084 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4086 (define_expand "casesi"
4087 [(match_operand:SI 0 "general_operand" "")
4088 (match_operand:SI 1 "const_int_operand" "")
4089 (match_operand:SI 2 "const_int_operand" "")
4090 (match_operand 3 "" "")
4091 (match_operand 4 "" "")]
4095 if (GET_CODE (operands[0]) != REG)
4096 operands[0] = force_reg (SImode, operands[0]);
4098 if (operands[1] != const0_rtx)
4100 rtx reg = gen_reg_rtx (SImode);
4102 operands[1] = GEN_INT (-INTVAL (operands[1]));
4103 if (!INT_14_BITS (operands[1]))
4104 operands[1] = force_reg (SImode, operands[1]);
4105 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4110 if (!INT_5_BITS (operands[2]))
4111 operands[2] = force_reg (SImode, operands[2]);
4113 emit_insn (gen_cmpsi (operands[0], operands[2]));
4114 emit_jump_insn (gen_bgtu (operands[4]));
4115 if (TARGET_BIG_SWITCH)
4117 rtx temp = gen_reg_rtx (SImode);
4118 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4121 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4125 (define_insn "casesi0"
4127 (mem:SI (plus:SI (pc)
4128 (match_operand:SI 0 "register_operand" "r")))
4129 (label_ref (match_operand 1 "" ""))))]
4132 [(set_attr "type" "multi")
4133 (set_attr "length" "8")])
4135 ;; Need nops for the calls because execution is supposed to continue
4136 ;; past; we don't want to nullify an instruction that we need.
4137 ;;- jump to subroutine
4139 (define_expand "call"
4140 [(parallel [(call (match_operand:SI 0 "" "")
4141 (match_operand 1 "" ""))
4142 (clobber (reg:SI 2))])]
4149 if (TARGET_PORTABLE_RUNTIME)
4150 op = force_reg (SImode, XEXP (operands[0], 0));
4152 op = XEXP (operands[0], 0);
4154 /* Use two different patterns for calls to explicitly named functions
4155 and calls through function pointers. This is necessary as these two
4156 types of calls use different calling conventions, and CSE might try
4157 to change the named call into an indirect call in some cases (using
4158 two patterns keeps CSE from performing this optimization). */
4159 if (GET_CODE (op) == SYMBOL_REF)
4160 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4163 rtx tmpreg = gen_rtx_REG (SImode, 22);
4164 emit_move_insn (tmpreg, force_reg (SImode, op));
4165 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4170 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4172 /* After each call we must restore the PIC register, even if it
4173 doesn't appear to be used.
4175 This will set regs_ever_live for the callee saved register we
4176 stored the PIC register in. */
4177 emit_move_insn (pic_offset_table_rtx,
4178 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4179 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4181 /* Gross. We have to keep the scheduler from moving the restore
4182 of the PIC register away from the call. SCHED_GROUP_P is
4183 supposed to do this, but for some reason the compiler will
4184 go into an infinite loop when we use that.
4186 This method (blockage insn) may make worse code (then again
4187 it may not since calls are nearly blockages anyway), but at
4188 least it should work. */
4189 emit_insn (gen_blockage ());
4194 (define_insn "call_internal_symref"
4195 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4196 (match_operand 1 "" "i"))
4197 (clobber (reg:SI 2))
4198 (use (const_int 0))]
4199 "! TARGET_PORTABLE_RUNTIME"
4202 output_arg_descriptor (insn);
4203 return output_call (insn, operands[0]);
4205 [(set_attr "type" "call")
4206 (set (attr "length")
4207 ;; If we're sure that we can either reach the target or that the
4208 ;; linker can use a long-branch stub, then the length is 4 bytes.
4210 ;; For long-calls the length will be either 52 bytes (non-pic)
4211 ;; or 68 bytes (pic). */
4212 ;; Else we have to use a long-call;
4213 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4216 (if_then_else (eq (symbol_ref "flag_pic")
4221 (define_insn "call_internal_reg"
4222 [(call (mem:SI (reg:SI 22))
4223 (match_operand 0 "" "i"))
4224 (clobber (reg:SI 2))
4225 (use (const_int 1))]
4231 /* First the special case for kernels, level 0 systems, etc. */
4232 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4233 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4235 /* Now the normal case -- we can reach $$dyncall directly or
4236 we're sure that we can get there via a long-branch stub.
4238 No need to check target flags as the length uniquely identifies
4239 the remaining cases. */
4240 if (get_attr_length (insn) == 8)
4241 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4243 /* Long millicode call, but we are not generating PIC or portable runtime
4245 if (get_attr_length (insn) == 12)
4246 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4248 /* Long millicode call for portable runtime. */
4249 if (get_attr_length (insn) == 20)
4250 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n 0(%%r31)\;nop\";
4252 /* If we're generating PIC code. */
4253 xoperands[0] = operands[0];
4254 xoperands[1] = gen_label_rtx ();
4255 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4256 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4257 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4258 CODE_LABEL_NUMBER (xoperands[1]));
4259 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4260 output_asm_insn (\"blr 0,%%r2\", xoperands);
4261 output_asm_insn (\"bv,n 0(%%r1)\\n\\tnop\", xoperands);
4264 [(set_attr "type" "dyncall")
4265 (set (attr "length")
4267 ;; First NO_SPACE_REGS
4268 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4272 ;; Target (or stub) within reach
4273 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4275 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4279 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4280 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4282 (eq (symbol_ref "flag_pic")
4286 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4290 ;; Out of range PIC case
4293 (define_expand "call_value"
4294 [(parallel [(set (match_operand 0 "" "")
4295 (call (match_operand:SI 1 "" "")
4296 (match_operand 2 "" "")))
4297 (clobber (reg:SI 2))])]
4304 if (TARGET_PORTABLE_RUNTIME)
4305 op = force_reg (SImode, XEXP (operands[1], 0));
4307 op = XEXP (operands[1], 0);
4309 /* Use two different patterns for calls to explicitly named functions
4310 and calls through function pointers. This is necessary as these two
4311 types of calls use different calling conventions, and CSE might try
4312 to change the named call into an indirect call in some cases (using
4313 two patterns keeps CSE from performing this optimization). */
4314 if (GET_CODE (op) == SYMBOL_REF)
4315 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4320 rtx tmpreg = gen_rtx_REG (SImode, 22);
4321 emit_move_insn (tmpreg, force_reg (SImode, op));
4322 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4327 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4329 /* After each call we must restore the PIC register, even if it
4330 doesn't appear to be used.
4332 This will set regs_ever_live for the callee saved register we
4333 stored the PIC register in. */
4334 emit_move_insn (pic_offset_table_rtx,
4335 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4336 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4338 /* Gross. We have to keep the scheduler from moving the restore
4339 of the PIC register away from the call. SCHED_GROUP_P is
4340 supposed to do this, but for some reason the compiler will
4341 go into an infinite loop when we use that.
4343 This method (blockage insn) may make worse code (then again
4344 it may not since calls are nearly blockages anyway), but at
4345 least it should work. */
4346 emit_insn (gen_blockage ());
4351 (define_insn "call_value_internal_symref"
4352 [(set (match_operand 0 "" "=rf")
4353 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4354 (match_operand 2 "" "i")))
4355 (clobber (reg:SI 2))
4356 (use (const_int 0))]
4357 ;;- Don't use operand 1 for most machines.
4358 "! TARGET_PORTABLE_RUNTIME"
4361 output_arg_descriptor (insn);
4362 return output_call (insn, operands[1]);
4364 [(set_attr "type" "call")
4365 (set (attr "length")
4366 ;; If we're sure that we can either reach the target or that the
4367 ;; linker can use a long-branch stub, then the length is 4 bytes.
4369 ;; For long-calls the length will be either 52 bytes (non-pic)
4370 ;; or 68 bytes (pic). */
4371 ;; Else we have to use a long-call;
4372 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4375 (if_then_else (eq (symbol_ref "flag_pic")
4380 (define_insn "call_value_internal_reg"
4381 [(set (match_operand 0 "" "=rf")
4382 (call (mem:SI (reg:SI 22))
4383 (match_operand 1 "" "i")))
4384 (clobber (reg:SI 2))
4385 (use (const_int 1))]
4391 /* First the special case for kernels, level 0 systems, etc. */
4392 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4393 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4395 /* Now the normal case -- we can reach $$dyncall directly or
4396 we're sure that we can get there via a long-branch stub.
4398 No need to check target flags as the length uniquely identifies
4399 the remaining cases. */
4400 if (get_attr_length (insn) == 8)
4401 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4403 /* Long millicode call, but we are not generating PIC or portable runtime
4405 if (get_attr_length (insn) == 12)
4406 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4408 /* Long millicode call for portable runtime. */
4409 if (get_attr_length (insn) == 20)
4410 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr 0,%%r2\;bv,n 0(%%r31)\;nop\";
4412 /* If we're generating PIC code. */
4413 xoperands[0] = operands[1];
4414 xoperands[1] = gen_label_rtx ();
4415 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4416 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4417 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4418 CODE_LABEL_NUMBER (xoperands[1]));
4419 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4420 output_asm_insn (\"blr 0,%%r2\", xoperands);
4421 output_asm_insn (\"bv,n 0(%%r1)\\n\\tnop\", xoperands);
4424 [(set_attr "type" "dyncall")
4425 (set (attr "length")
4427 ;; First NO_SPACE_REGS
4428 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4432 ;; Target (or stub) within reach
4433 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4435 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4439 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4440 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4442 (eq (symbol_ref "flag_pic")
4446 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4450 ;; Out of range PIC case
4453 ;; Call subroutine returning any type.
4455 (define_expand "untyped_call"
4456 [(parallel [(call (match_operand 0 "" "")
4458 (match_operand 1 "" "")
4459 (match_operand 2 "" "")])]
4465 emit_call_insn (gen_call (operands[0], const0_rtx));
4467 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4469 rtx set = XVECEXP (operands[2], 0, i);
4470 emit_move_insn (SET_DEST (set), SET_SRC (set));
4473 /* The optimizer does not know that the call sets the function value
4474 registers we stored in the result block. We avoid problems by
4475 claiming that all hard registers are used and clobbered at this
4477 emit_insn (gen_blockage ());
4485 [(set_attr "type" "move")
4486 (set_attr "length" "4")])
4488 ;; These are just placeholders so we know where branch tables
4490 (define_insn "begin_brtab"
4495 /* Only GAS actually supports this pseudo-op. */
4497 return \".begin_brtab\";
4501 [(set_attr "type" "move")
4502 (set_attr "length" "0")])
4504 (define_insn "end_brtab"
4509 /* Only GAS actually supports this pseudo-op. */
4511 return \".end_brtab\";
4515 [(set_attr "type" "move")
4516 (set_attr "length" "0")])
4518 ;;; Hope this is only within a function...
4519 (define_insn "indirect_jump"
4520 [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
4523 [(set_attr "type" "branch")
4524 (set_attr "length" "4")])
4526 (define_insn "extzv"
4527 [(set (match_operand:SI 0 "register_operand" "=r")
4528 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4529 (match_operand:SI 2 "uint5_operand" "")
4530 (match_operand:SI 3 "uint5_operand" "")))]
4532 "extru %1,%3+%2-1,%2,%0"
4533 [(set_attr "type" "shift")
4534 (set_attr "length" "4")])
4537 [(set (match_operand:SI 0 "register_operand" "=r")
4538 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4540 (match_operand:SI 3 "register_operand" "q")))]
4543 [(set_attr "type" "shift")
4544 (set_attr "length" "4")])
4547 [(set (match_operand:SI 0 "register_operand" "=r")
4548 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4549 (match_operand:SI 2 "uint5_operand" "")
4550 (match_operand:SI 3 "uint5_operand" "")))]
4552 "extrs %1,%3+%2-1,%2,%0"
4553 [(set_attr "type" "shift")
4554 (set_attr "length" "4")])
4557 [(set (match_operand:SI 0 "register_operand" "=r")
4558 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4560 (match_operand:SI 3 "register_operand" "q")))]
4563 [(set_attr "type" "shift")
4564 (set_attr "length" "4")])
4567 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4568 (match_operand:SI 1 "uint5_operand" "")
4569 (match_operand:SI 2 "uint5_operand" ""))
4570 (match_operand:SI 3 "arith5_operand" "r,L"))]
4573 dep %3,%2+%1-1,%1,%0
4574 depi %3,%2+%1-1,%1,%0"
4575 [(set_attr "type" "shift,shift")
4576 (set_attr "length" "4,4")])
4578 ;; Optimize insertion of const_int values of type 1...1xxxx.
4580 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4581 (match_operand:SI 1 "uint5_operand" "")
4582 (match_operand:SI 2 "uint5_operand" ""))
4583 (match_operand:SI 3 "const_int_operand" ""))]
4584 "(INTVAL (operands[3]) & 0x10) != 0 &&
4585 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4588 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4589 return \"depi %3,%2+%1-1,%1,%0\";
4591 [(set_attr "type" "shift")
4592 (set_attr "length" "4")])
4594 ;; This insn is used for some loop tests, typically loops reversed when
4595 ;; strength reduction is used. It is actually created when the instruction
4596 ;; combination phase combines the special loop test. Since this insn
4597 ;; is both a jump insn and has an output, it must deal with its own
4598 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4599 ;; to not choose the register alternatives in the event a reload is needed.
4600 (define_insn "decrement_and_branch_until_zero"
4603 (match_operator 2 "comparison_operator"
4604 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4605 (match_operand:SI 1 "int5_operand" "L,L,L"))
4607 (label_ref (match_operand 3 "" ""))
4610 (plus:SI (match_dup 0) (match_dup 1)))
4611 (clobber (match_scratch:SI 4 "=X,r,r"))]
4613 "* return output_dbra (operands, insn, which_alternative); "
4614 ;; Do not expect to understand this the first time through.
4615 [(set_attr "type" "cbranch,multi,multi")
4616 (set (attr "length")
4617 (if_then_else (eq_attr "alternative" "0")
4618 ;; Loop counter in register case
4619 ;; Short branch has length of 4
4620 ;; Long branch has length of 8
4621 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4626 ;; Loop counter in FP reg case.
4627 ;; Extra goo to deal with additional reload insns.
4628 (if_then_else (eq_attr "alternative" "1")
4629 (if_then_else (lt (match_dup 3) (pc))
4631 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4636 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4640 ;; Loop counter in memory case.
4641 ;; Extra goo to deal with additional reload insns.
4642 (if_then_else (lt (match_dup 3) (pc))
4644 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4649 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4652 (const_int 16))))))])
4657 (match_operator 2 "movb_comparison_operator"
4658 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4659 (label_ref (match_operand 3 "" ""))
4661 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4664 "* return output_movb (operands, insn, which_alternative, 0); "
4665 ;; Do not expect to understand this the first time through.
4666 [(set_attr "type" "cbranch,multi,multi,multi")
4667 (set (attr "length")
4668 (if_then_else (eq_attr "alternative" "0")
4669 ;; Loop counter in register case
4670 ;; Short branch has length of 4
4671 ;; Long branch has length of 8
4672 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4677 ;; Loop counter in FP reg case.
4678 ;; Extra goo to deal with additional reload insns.
4679 (if_then_else (eq_attr "alternative" "1")
4680 (if_then_else (lt (match_dup 3) (pc))
4682 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4687 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4691 ;; Loop counter in memory or sar case.
4692 ;; Extra goo to deal with additional reload insns.
4694 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4697 (const_int 12)))))])
4699 ;; Handle negated branch.
4703 (match_operator 2 "movb_comparison_operator"
4704 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4706 (label_ref (match_operand 3 "" ""))))
4707 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4710 "* return output_movb (operands, insn, which_alternative, 1); "
4711 ;; Do not expect to understand this the first time through.
4712 [(set_attr "type" "cbranch,multi,multi,multi")
4713 (set (attr "length")
4714 (if_then_else (eq_attr "alternative" "0")
4715 ;; Loop counter in register case
4716 ;; Short branch has length of 4
4717 ;; Long branch has length of 8
4718 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4723 ;; Loop counter in FP reg case.
4724 ;; Extra goo to deal with additional reload insns.
4725 (if_then_else (eq_attr "alternative" "1")
4726 (if_then_else (lt (match_dup 3) (pc))
4728 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4733 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4737 ;; Loop counter in memory or SAR case.
4738 ;; Extra goo to deal with additional reload insns.
4740 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4743 (const_int 12)))))])
4745 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4746 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4748 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4751 [(set (pc) (label_ref (match_operand 3 "" "" )))
4752 (set (match_operand:SI 0 "register_operand" "=r")
4753 (plus:SI (match_operand:SI 1 "register_operand" "r")
4754 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4755 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4758 return output_parallel_addb (operands, get_attr_length (insn));
4760 [(set_attr "type" "parallel_branch")
4761 (set (attr "length")
4762 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4768 [(set (pc) (label_ref (match_operand 2 "" "" )))
4769 (set (match_operand:SF 0 "register_operand" "=r")
4770 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4774 return output_parallel_movb (operands, get_attr_length (insn));
4776 [(set_attr "type" "parallel_branch")
4777 (set (attr "length")
4778 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4784 [(set (pc) (label_ref (match_operand 2 "" "" )))
4785 (set (match_operand:SI 0 "register_operand" "=r")
4786 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4790 return output_parallel_movb (operands, get_attr_length (insn));
4792 [(set_attr "type" "parallel_branch")
4793 (set (attr "length")
4794 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4800 [(set (pc) (label_ref (match_operand 2 "" "" )))
4801 (set (match_operand:HI 0 "register_operand" "=r")
4802 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4806 return output_parallel_movb (operands, get_attr_length (insn));
4808 [(set_attr "type" "parallel_branch")
4809 (set (attr "length")
4810 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4816 [(set (pc) (label_ref (match_operand 2 "" "" )))
4817 (set (match_operand:QI 0 "register_operand" "=r")
4818 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4822 return output_parallel_movb (operands, get_attr_length (insn));
4824 [(set_attr "type" "parallel_branch")
4825 (set (attr "length")
4826 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4832 [(set (match_operand 0 "register_operand" "=f")
4833 (mult (match_operand 1 "register_operand" "f")
4834 (match_operand 2 "register_operand" "f")))
4835 (set (match_operand 3 "register_operand" "+f")
4836 (plus (match_operand 4 "register_operand" "f")
4837 (match_operand 5 "register_operand" "f")))]
4838 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4839 && reload_completed && fmpyaddoperands (operands)"
4842 if (GET_MODE (operands[0]) == DFmode)
4844 if (rtx_equal_p (operands[3], operands[5]))
4845 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4847 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4851 if (rtx_equal_p (operands[3], operands[5]))
4852 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4854 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4857 [(set_attr "type" "fpalu")
4858 (set_attr "length" "4")])
4861 [(set (match_operand 3 "register_operand" "+f")
4862 (plus (match_operand 4 "register_operand" "f")
4863 (match_operand 5 "register_operand" "f")))
4864 (set (match_operand 0 "register_operand" "=f")
4865 (mult (match_operand 1 "register_operand" "f")
4866 (match_operand 2 "register_operand" "f")))]
4867 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4868 && reload_completed && fmpyaddoperands (operands)"
4871 if (GET_MODE (operands[0]) == DFmode)
4873 if (rtx_equal_p (operands[3], operands[5]))
4874 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4876 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4880 if (rtx_equal_p (operands[3], operands[5]))
4881 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4883 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4886 [(set_attr "type" "fpalu")
4887 (set_attr "length" "4")])
4890 [(set (match_operand 0 "register_operand" "=f")
4891 (mult (match_operand 1 "register_operand" "f")
4892 (match_operand 2 "register_operand" "f")))
4893 (set (match_operand 3 "register_operand" "+f")
4894 (minus (match_operand 4 "register_operand" "f")
4895 (match_operand 5 "register_operand" "f")))]
4896 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4897 && reload_completed && fmpysuboperands (operands)"
4900 if (GET_MODE (operands[0]) == DFmode)
4901 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4903 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4905 [(set_attr "type" "fpalu")
4906 (set_attr "length" "4")])
4909 [(set (match_operand 3 "register_operand" "+f")
4910 (minus (match_operand 4 "register_operand" "f")
4911 (match_operand 5 "register_operand" "f")))
4912 (set (match_operand 0 "register_operand" "=f")
4913 (mult (match_operand 1 "register_operand" "f")
4914 (match_operand 2 "register_operand" "f")))]
4915 "TARGET_SNAKE && ! TARGET_SOFT_FLOAT
4916 && reload_completed && fmpysuboperands (operands)"
4919 if (GET_MODE (operands[0]) == DFmode)
4920 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4922 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4924 [(set_attr "type" "fpalu")
4925 (set_attr "length" "4")])
4927 ;; Clean up turds left by reload.
4929 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
4930 (match_operand 1 "register_operand" "fr"))
4931 (set (match_operand 2 "register_operand" "fr")
4933 "! TARGET_SOFT_FLOAT
4934 && GET_CODE (operands[0]) == MEM
4935 && ! MEM_VOLATILE_P (operands[0])
4936 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4937 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4938 && GET_MODE (operands[0]) == DFmode
4939 && GET_CODE (operands[1]) == REG
4940 && GET_CODE (operands[2]) == REG
4941 && ! side_effects_p (XEXP (operands[0], 0))
4942 && REGNO_REG_CLASS (REGNO (operands[1]))
4943 == REGNO_REG_CLASS (REGNO (operands[2]))"
4948 if (FP_REG_P (operands[1]))
4949 output_asm_insn (output_fp_move_double (operands), operands);
4951 output_asm_insn (output_move_double (operands), operands);
4953 if (rtx_equal_p (operands[1], operands[2]))
4956 xoperands[0] = operands[2];
4957 xoperands[1] = operands[1];
4959 if (FP_REG_P (xoperands[1]))
4960 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4962 output_asm_insn (output_move_double (xoperands), xoperands);
4968 [(set (match_operand 0 "register_operand" "fr")
4969 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
4970 (set (match_operand 2 "register_operand" "fr")
4972 "! TARGET_SOFT_FLOAT
4973 && GET_CODE (operands[1]) == MEM
4974 && ! MEM_VOLATILE_P (operands[1])
4975 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4976 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4977 && GET_MODE (operands[0]) == DFmode
4978 && GET_CODE (operands[0]) == REG
4979 && GET_CODE (operands[2]) == REG
4980 && ! side_effects_p (XEXP (operands[1], 0))
4981 && REGNO_REG_CLASS (REGNO (operands[0]))
4982 == REGNO_REG_CLASS (REGNO (operands[2]))"
4987 if (FP_REG_P (operands[0]))
4988 output_asm_insn (output_fp_move_double (operands), operands);
4990 output_asm_insn (output_move_double (operands), operands);
4992 xoperands[0] = operands[2];
4993 xoperands[1] = operands[0];
4995 if (FP_REG_P (xoperands[1]))
4996 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4998 output_asm_insn (output_move_double (xoperands), xoperands);
5003 ;; Flush the I and D cache line found at the address in operand 0.
5004 ;; This is used by the trampoline code for nested functions.
5005 ;; So long as the trampoline itself is less than 32 bytes this
5008 (define_insn "dcacheflush"
5009 [(unspec_volatile [(const_int 1)] 0)
5010 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5011 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))]
5013 "fdc 0(0,%0)\;fdc 0(0,%1)\;sync"
5014 [(set_attr "type" "multi")
5015 (set_attr "length" "12")])
5017 (define_insn "icacheflush"
5018 [(unspec_volatile [(const_int 2)] 0)
5019 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5020 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))
5021 (use (match_operand:SI 2 "register_operand" "r"))
5022 (clobber (match_operand:SI 3 "register_operand" "=&r"))
5023 (clobber (match_operand:SI 4 "register_operand" "=&r"))]
5025 "mfsp %%sr0,%4\;ldsid (0,%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5026 [(set_attr "type" "multi")
5027 (set_attr "length" "52")])
5029 ;; An out-of-line prologue.
5030 (define_insn "outline_prologue_call"
5031 [(unspec_volatile [(const_int 0)] 0)
5032 (clobber (reg:SI 31))
5033 (clobber (reg:SI 22))
5034 (clobber (reg:SI 21))
5035 (clobber (reg:SI 20))
5036 (clobber (reg:SI 19))
5037 (clobber (reg:SI 1))]
5041 extern int frame_pointer_needed;
5043 /* We need two different versions depending on whether or not we
5044 need a frame pointer. Also note that we return to the instruction
5045 immediately after the branch rather than two instructions after the
5046 break as normally is the case. */
5047 if (frame_pointer_needed)
5049 /* Must import the magic millicode routine(s). */
5050 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5052 if (TARGET_PORTABLE_RUNTIME)
5054 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5055 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5059 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5063 /* Must import the magic millicode routine(s). */
5064 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5066 if (TARGET_PORTABLE_RUNTIME)
5068 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5069 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5072 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5076 [(set_attr "type" "multi")
5077 (set_attr "length" "8")])
5079 ;; An out-of-line epilogue.
5080 (define_insn "outline_epilogue_call"
5081 [(unspec_volatile [(const_int 1)] 0)
5084 (clobber (reg:SI 31))
5085 (clobber (reg:SI 22))
5086 (clobber (reg:SI 21))
5087 (clobber (reg:SI 20))
5088 (clobber (reg:SI 19))
5089 (clobber (reg:SI 2))
5090 (clobber (reg:SI 1))]
5094 extern int frame_pointer_needed;
5096 /* We need two different versions depending on whether or not we
5097 need a frame pointer. Also note that we return to the instruction
5098 immediately after the branch rather than two instructions after the
5099 break as normally is the case. */
5100 if (frame_pointer_needed)
5102 /* Must import the magic millicode routine. */
5103 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5105 /* The out-of-line prologue will make sure we return to the right
5107 if (TARGET_PORTABLE_RUNTIME)
5109 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5110 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5114 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5118 /* Must import the magic millicode routine. */
5119 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5121 /* The out-of-line prologue will make sure we return to the right
5123 if (TARGET_PORTABLE_RUNTIME)
5125 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5126 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5129 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5133 [(set_attr "type" "multi")
5134 (set_attr "length" "8")])
5136 ;; Given a function pointer, canonicalize it so it can be
5137 ;; reliably compared to another function pointer. */
5138 (define_expand "canonicalize_funcptr_for_compare"
5139 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5140 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5141 (clobber (match_dup 2))
5142 (clobber (reg:SI 26))
5143 (clobber (reg:SI 22))
5144 (clobber (reg:SI 31))])
5145 (set (match_operand:SI 0 "register_operand" "")
5147 "! TARGET_PORTABLE_RUNTIME"
5150 operands[2] = gen_reg_rtx (SImode);
5151 if (GET_CODE (operands[1]) != REG)
5153 rtx tmp = gen_reg_rtx (Pmode);
5154 emit_move_insn (tmp, operands[1]);
5160 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5161 (clobber (match_operand:SI 0 "register_operand" "=a"))
5162 (clobber (reg:SI 26))
5163 (clobber (reg:SI 22))
5164 (clobber (reg:SI 31))]
5168 /* Must import the magic millicode routine. */
5169 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5171 /* This is absolutely amazing.
5173 First, copy our input parameter into %r29 just in case we don't
5174 need to call $$sh_func_adrs. */
5175 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5177 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5178 we use %r26 unchanged. */
5179 if (get_attr_length (insn) == 32)
5180 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5181 else if (get_attr_length (insn) == 40)
5182 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5183 else if (get_attr_length (insn) == 44)
5184 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5186 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5188 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5189 4096, then we use %r26 unchanged. */
5190 if (get_attr_length (insn) == 32)
5191 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5192 else if (get_attr_length (insn) == 40)
5193 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5194 else if (get_attr_length (insn) == 44)
5195 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5197 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5199 /* Else call $$sh_func_adrs to extract the function's real add24. */
5200 return output_millicode_call (insn,
5201 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5203 [(set_attr "type" "multi")
5204 (set (attr "length")
5206 ;; Target (or stub) within reach
5207 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5209 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5214 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5218 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5219 ;; same as NO_SPACE_REGS code
5220 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5222 (eq (symbol_ref "flag_pic")
5227 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5231 ;; Out of range and PIC
5234 ;; On the PA, the PIC register is call clobbered, so it must
5235 ;; be saved & restored around calls by the caller. If the call
5236 ;; doesn't return normally (nonlocal goto, or an exception is
5237 ;; thrown), then the code at the exception handler label must
5238 ;; restore the PIC register.
5239 (define_expand "exception_receiver"
5241 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5244 /* Load the PIC register from the stack slot (in our caller's
5246 emit_move_insn (pic_offset_table_rtx,
5247 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5248 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5249 emit_insn (gen_blockage ());