1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 int world_save_p; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Always emit branch hint bits. */
128 static GTY(()) bool rs6000_always_hint;
130 /* Schedule instructions for group formation. */
131 static GTY(()) bool rs6000_sched_groups;
133 /* Support adjust_priority scheduler hook
134 and -mprioritize-restricted-insns= option. */
135 const char *rs6000_sched_restricted_insns_priority_str;
136 int rs6000_sched_restricted_insns_priority;
138 /* Support for -msched-costly-dep option. */
139 const char *rs6000_sched_costly_dep_str;
140 enum rs6000_dependence_cost rs6000_sched_costly_dep;
142 /* Support for -minsert-sched-nops option. */
143 const char *rs6000_sched_insert_nops_str;
144 enum rs6000_nop_insertion rs6000_sched_insert_nops;
146 /* Size of long double */
147 const char *rs6000_long_double_size_string;
148 int rs6000_long_double_type_size;
150 /* Whether -mabi=altivec has appeared */
151 int rs6000_altivec_abi;
153 /* Whether VRSAVE instructions should be generated. */
154 int rs6000_altivec_vrsave;
156 /* String from -mvrsave= option. */
157 const char *rs6000_altivec_vrsave_string;
159 /* Nonzero if we want SPE ABI extensions. */
162 /* Whether isel instructions should be generated. */
165 /* Whether SPE simd instructions should be generated. */
168 /* Nonzero if floating point operations are done in the GPRs. */
169 int rs6000_float_gprs = 0;
171 /* String from -mfloat-gprs=. */
172 const char *rs6000_float_gprs_string;
174 /* String from -misel=. */
175 const char *rs6000_isel_string;
177 /* String from -mspe=. */
178 const char *rs6000_spe_string;
180 /* Set to nonzero once AIX common-mode calls have been defined. */
181 static GTY(()) int common_mode_defined;
183 /* Save information from a "cmpxx" operation until the branch or scc is
185 rtx rs6000_compare_op0, rs6000_compare_op1;
186 int rs6000_compare_fp_p;
188 /* Label number of label created for -mrelocatable, to call to so we can
189 get the address of the GOT section */
190 int rs6000_pic_labelno;
193 /* Which abi to adhere to */
194 const char *rs6000_abi_name;
196 /* Semantics of the small data area */
197 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199 /* Which small data model to use */
200 const char *rs6000_sdata_name = (char *)0;
202 /* Counter for labels which are to be placed in .fixup. */
203 int fixuplabelno = 0;
206 /* Bit size of immediate TLS offsets and string from which it is decoded. */
207 int rs6000_tls_size = 32;
208 const char *rs6000_tls_size_string;
210 /* ABI enumeration available for subtarget to use. */
211 enum rs6000_abi rs6000_current_abi;
213 /* ABI string from -mabi= option. */
214 const char *rs6000_abi_string;
216 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
220 const char *rs6000_debug_name;
221 int rs6000_debug_stack; /* debug stack applications */
222 int rs6000_debug_arg; /* debug argument handling */
224 /* Value is TRUE if register/mode pair is accepatable. */
225 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
228 static GTY(()) tree opaque_V2SI_type_node;
229 static GTY(()) tree opaque_V2SF_type_node;
230 static GTY(()) tree opaque_p_V2SI_type_node;
231 static GTY(()) tree V16QI_type_node;
232 static GTY(()) tree V2SI_type_node;
233 static GTY(()) tree V2SF_type_node;
234 static GTY(()) tree V4HI_type_node;
235 static GTY(()) tree V4SI_type_node;
236 static GTY(()) tree V4SF_type_node;
237 static GTY(()) tree V8HI_type_node;
238 static GTY(()) tree unsigned_V16QI_type_node;
239 static GTY(()) tree unsigned_V8HI_type_node;
240 static GTY(()) tree unsigned_V4SI_type_node;
241 static GTY(()) tree bool_char_type_node; /* __bool char */
242 static GTY(()) tree bool_short_type_node; /* __bool short */
243 static GTY(()) tree bool_int_type_node; /* __bool int */
244 static GTY(()) tree pixel_type_node; /* __pixel */
245 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
246 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
247 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
248 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
250 int rs6000_warn_altivec_long = 1; /* On by default. */
251 const char *rs6000_warn_altivec_long_switch;
253 const char *rs6000_traceback_name;
255 traceback_default = 0,
261 /* Flag to say the TOC is initialized */
263 char toc_label_name[10];
265 /* Alias set for saves and restores from the rs6000 stack. */
266 static GTY(()) int rs6000_sr_alias_set;
268 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
269 The only place that looks at this is rs6000_set_default_type_attributes;
270 everywhere else should rely on the presence or absence of a longcall
271 attribute on the function declaration. */
272 int rs6000_default_long_calls;
273 const char *rs6000_longcall_switch;
275 /* Control alignment for fields within structures. */
276 /* String from -malign-XXXXX. */
277 const char *rs6000_alignment_string;
278 int rs6000_alignment_flags;
280 struct builtin_description
282 /* mask is not const because we're going to alter it below. This
283 nonsense will go away when we rewrite the -march infrastructure
284 to give us more target flag bits. */
286 const enum insn_code icode;
287 const char *const name;
288 const enum rs6000_builtins code;
291 /* Target cpu costs. */
293 struct processor_costs {
294 const int mulsi; /* cost of SImode multiplication. */
295 const int mulsi_const; /* cost of SImode multiplication by constant. */
296 const int mulsi_const9; /* cost of SImode mult by short constant. */
297 const int muldi; /* cost of DImode multiplication. */
298 const int divsi; /* cost of SImode division. */
299 const int divdi; /* cost of DImode division. */
300 const int fp; /* cost of simple SFmode and DFmode insns. */
301 const int dmul; /* cost of DFmode multiplication (and fmadd). */
302 const int sdiv; /* cost of SFmode division (fdivs). */
303 const int ddiv; /* cost of DFmode division (fdiv). */
306 const struct processor_costs *rs6000_cost;
308 /* Processor costs (relative to an add) */
310 /* Instruction size costs on 32bit processors. */
312 struct processor_costs size32_cost = {
313 COSTS_N_INSNS (1), /* mulsi */
314 COSTS_N_INSNS (1), /* mulsi_const */
315 COSTS_N_INSNS (1), /* mulsi_const9 */
316 COSTS_N_INSNS (1), /* muldi */
317 COSTS_N_INSNS (1), /* divsi */
318 COSTS_N_INSNS (1), /* divdi */
319 COSTS_N_INSNS (1), /* fp */
320 COSTS_N_INSNS (1), /* dmul */
321 COSTS_N_INSNS (1), /* sdiv */
322 COSTS_N_INSNS (1), /* ddiv */
325 /* Instruction size costs on 64bit processors. */
327 struct processor_costs size64_cost = {
328 COSTS_N_INSNS (1), /* mulsi */
329 COSTS_N_INSNS (1), /* mulsi_const */
330 COSTS_N_INSNS (1), /* mulsi_const9 */
331 COSTS_N_INSNS (1), /* muldi */
332 COSTS_N_INSNS (1), /* divsi */
333 COSTS_N_INSNS (1), /* divdi */
334 COSTS_N_INSNS (1), /* fp */
335 COSTS_N_INSNS (1), /* dmul */
336 COSTS_N_INSNS (1), /* sdiv */
337 COSTS_N_INSNS (1), /* ddiv */
340 /* Instruction costs on RIOS1 processors. */
342 struct processor_costs rios1_cost = {
343 COSTS_N_INSNS (5), /* mulsi */
344 COSTS_N_INSNS (4), /* mulsi_const */
345 COSTS_N_INSNS (3), /* mulsi_const9 */
346 COSTS_N_INSNS (5), /* muldi */
347 COSTS_N_INSNS (19), /* divsi */
348 COSTS_N_INSNS (19), /* divdi */
349 COSTS_N_INSNS (2), /* fp */
350 COSTS_N_INSNS (2), /* dmul */
351 COSTS_N_INSNS (19), /* sdiv */
352 COSTS_N_INSNS (19), /* ddiv */
355 /* Instruction costs on RIOS2 processors. */
357 struct processor_costs rios2_cost = {
358 COSTS_N_INSNS (2), /* mulsi */
359 COSTS_N_INSNS (2), /* mulsi_const */
360 COSTS_N_INSNS (2), /* mulsi_const9 */
361 COSTS_N_INSNS (2), /* muldi */
362 COSTS_N_INSNS (13), /* divsi */
363 COSTS_N_INSNS (13), /* divdi */
364 COSTS_N_INSNS (2), /* fp */
365 COSTS_N_INSNS (2), /* dmul */
366 COSTS_N_INSNS (17), /* sdiv */
367 COSTS_N_INSNS (17), /* ddiv */
370 /* Instruction costs on RS64A processors. */
372 struct processor_costs rs64a_cost = {
373 COSTS_N_INSNS (20), /* mulsi */
374 COSTS_N_INSNS (12), /* mulsi_const */
375 COSTS_N_INSNS (8), /* mulsi_const9 */
376 COSTS_N_INSNS (34), /* muldi */
377 COSTS_N_INSNS (65), /* divsi */
378 COSTS_N_INSNS (67), /* divdi */
379 COSTS_N_INSNS (4), /* fp */
380 COSTS_N_INSNS (4), /* dmul */
381 COSTS_N_INSNS (31), /* sdiv */
382 COSTS_N_INSNS (31), /* ddiv */
385 /* Instruction costs on MPCCORE processors. */
387 struct processor_costs mpccore_cost = {
388 COSTS_N_INSNS (2), /* mulsi */
389 COSTS_N_INSNS (2), /* mulsi_const */
390 COSTS_N_INSNS (2), /* mulsi_const9 */
391 COSTS_N_INSNS (2), /* muldi */
392 COSTS_N_INSNS (6), /* divsi */
393 COSTS_N_INSNS (6), /* divdi */
394 COSTS_N_INSNS (4), /* fp */
395 COSTS_N_INSNS (5), /* dmul */
396 COSTS_N_INSNS (10), /* sdiv */
397 COSTS_N_INSNS (17), /* ddiv */
400 /* Instruction costs on PPC403 processors. */
402 struct processor_costs ppc403_cost = {
403 COSTS_N_INSNS (4), /* mulsi */
404 COSTS_N_INSNS (4), /* mulsi_const */
405 COSTS_N_INSNS (4), /* mulsi_const9 */
406 COSTS_N_INSNS (4), /* muldi */
407 COSTS_N_INSNS (33), /* divsi */
408 COSTS_N_INSNS (33), /* divdi */
409 COSTS_N_INSNS (11), /* fp */
410 COSTS_N_INSNS (11), /* dmul */
411 COSTS_N_INSNS (11), /* sdiv */
412 COSTS_N_INSNS (11), /* ddiv */
415 /* Instruction costs on PPC405 processors. */
417 struct processor_costs ppc405_cost = {
418 COSTS_N_INSNS (5), /* mulsi */
419 COSTS_N_INSNS (4), /* mulsi_const */
420 COSTS_N_INSNS (3), /* mulsi_const9 */
421 COSTS_N_INSNS (5), /* muldi */
422 COSTS_N_INSNS (35), /* divsi */
423 COSTS_N_INSNS (35), /* divdi */
424 COSTS_N_INSNS (11), /* fp */
425 COSTS_N_INSNS (11), /* dmul */
426 COSTS_N_INSNS (11), /* sdiv */
427 COSTS_N_INSNS (11), /* ddiv */
430 /* Instruction costs on PPC440 processors. */
432 struct processor_costs ppc440_cost = {
433 COSTS_N_INSNS (3), /* mulsi */
434 COSTS_N_INSNS (2), /* mulsi_const */
435 COSTS_N_INSNS (2), /* mulsi_const9 */
436 COSTS_N_INSNS (3), /* muldi */
437 COSTS_N_INSNS (34), /* divsi */
438 COSTS_N_INSNS (34), /* divdi */
439 COSTS_N_INSNS (5), /* fp */
440 COSTS_N_INSNS (5), /* dmul */
441 COSTS_N_INSNS (19), /* sdiv */
442 COSTS_N_INSNS (33), /* ddiv */
445 /* Instruction costs on PPC601 processors. */
447 struct processor_costs ppc601_cost = {
448 COSTS_N_INSNS (5), /* mulsi */
449 COSTS_N_INSNS (5), /* mulsi_const */
450 COSTS_N_INSNS (5), /* mulsi_const9 */
451 COSTS_N_INSNS (5), /* muldi */
452 COSTS_N_INSNS (36), /* divsi */
453 COSTS_N_INSNS (36), /* divdi */
454 COSTS_N_INSNS (4), /* fp */
455 COSTS_N_INSNS (5), /* dmul */
456 COSTS_N_INSNS (17), /* sdiv */
457 COSTS_N_INSNS (31), /* ddiv */
460 /* Instruction costs on PPC603 processors. */
462 struct processor_costs ppc603_cost = {
463 COSTS_N_INSNS (5), /* mulsi */
464 COSTS_N_INSNS (3), /* mulsi_const */
465 COSTS_N_INSNS (2), /* mulsi_const9 */
466 COSTS_N_INSNS (5), /* muldi */
467 COSTS_N_INSNS (37), /* divsi */
468 COSTS_N_INSNS (37), /* divdi */
469 COSTS_N_INSNS (3), /* fp */
470 COSTS_N_INSNS (4), /* dmul */
471 COSTS_N_INSNS (18), /* sdiv */
472 COSTS_N_INSNS (33), /* ddiv */
475 /* Instruction costs on PPC604 processors. */
477 struct processor_costs ppc604_cost = {
478 COSTS_N_INSNS (4), /* mulsi */
479 COSTS_N_INSNS (4), /* mulsi_const */
480 COSTS_N_INSNS (4), /* mulsi_const9 */
481 COSTS_N_INSNS (4), /* muldi */
482 COSTS_N_INSNS (20), /* divsi */
483 COSTS_N_INSNS (20), /* divdi */
484 COSTS_N_INSNS (3), /* fp */
485 COSTS_N_INSNS (3), /* dmul */
486 COSTS_N_INSNS (18), /* sdiv */
487 COSTS_N_INSNS (32), /* ddiv */
490 /* Instruction costs on PPC604e processors. */
492 struct processor_costs ppc604e_cost = {
493 COSTS_N_INSNS (2), /* mulsi */
494 COSTS_N_INSNS (2), /* mulsi_const */
495 COSTS_N_INSNS (2), /* mulsi_const9 */
496 COSTS_N_INSNS (2), /* muldi */
497 COSTS_N_INSNS (20), /* divsi */
498 COSTS_N_INSNS (20), /* divdi */
499 COSTS_N_INSNS (3), /* fp */
500 COSTS_N_INSNS (3), /* dmul */
501 COSTS_N_INSNS (18), /* sdiv */
502 COSTS_N_INSNS (32), /* ddiv */
505 /* Instruction costs on PPC620 processors. */
507 struct processor_costs ppc620_cost = {
508 COSTS_N_INSNS (5), /* mulsi */
509 COSTS_N_INSNS (4), /* mulsi_const */
510 COSTS_N_INSNS (3), /* mulsi_const9 */
511 COSTS_N_INSNS (7), /* muldi */
512 COSTS_N_INSNS (21), /* divsi */
513 COSTS_N_INSNS (37), /* divdi */
514 COSTS_N_INSNS (3), /* fp */
515 COSTS_N_INSNS (3), /* dmul */
516 COSTS_N_INSNS (18), /* sdiv */
517 COSTS_N_INSNS (32), /* ddiv */
520 /* Instruction costs on PPC630 processors. */
522 struct processor_costs ppc630_cost = {
523 COSTS_N_INSNS (5), /* mulsi */
524 COSTS_N_INSNS (4), /* mulsi_const */
525 COSTS_N_INSNS (3), /* mulsi_const9 */
526 COSTS_N_INSNS (7), /* muldi */
527 COSTS_N_INSNS (21), /* divsi */
528 COSTS_N_INSNS (37), /* divdi */
529 COSTS_N_INSNS (3), /* fp */
530 COSTS_N_INSNS (3), /* dmul */
531 COSTS_N_INSNS (17), /* sdiv */
532 COSTS_N_INSNS (21), /* ddiv */
535 /* Instruction costs on PPC750 and PPC7400 processors. */
537 struct processor_costs ppc750_cost = {
538 COSTS_N_INSNS (5), /* mulsi */
539 COSTS_N_INSNS (3), /* mulsi_const */
540 COSTS_N_INSNS (2), /* mulsi_const9 */
541 COSTS_N_INSNS (5), /* muldi */
542 COSTS_N_INSNS (17), /* divsi */
543 COSTS_N_INSNS (17), /* divdi */
544 COSTS_N_INSNS (3), /* fp */
545 COSTS_N_INSNS (3), /* dmul */
546 COSTS_N_INSNS (17), /* sdiv */
547 COSTS_N_INSNS (31), /* ddiv */
550 /* Instruction costs on PPC7450 processors. */
552 struct processor_costs ppc7450_cost = {
553 COSTS_N_INSNS (4), /* mulsi */
554 COSTS_N_INSNS (3), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (4), /* muldi */
557 COSTS_N_INSNS (23), /* divsi */
558 COSTS_N_INSNS (23), /* divdi */
559 COSTS_N_INSNS (5), /* fp */
560 COSTS_N_INSNS (5), /* dmul */
561 COSTS_N_INSNS (21), /* sdiv */
562 COSTS_N_INSNS (35), /* ddiv */
565 /* Instruction costs on PPC8540 processors. */
567 struct processor_costs ppc8540_cost = {
568 COSTS_N_INSNS (4), /* mulsi */
569 COSTS_N_INSNS (4), /* mulsi_const */
570 COSTS_N_INSNS (4), /* mulsi_const9 */
571 COSTS_N_INSNS (4), /* muldi */
572 COSTS_N_INSNS (19), /* divsi */
573 COSTS_N_INSNS (19), /* divdi */
574 COSTS_N_INSNS (4), /* fp */
575 COSTS_N_INSNS (4), /* dmul */
576 COSTS_N_INSNS (29), /* sdiv */
577 COSTS_N_INSNS (29), /* ddiv */
580 /* Instruction costs on POWER4 and POWER5 processors. */
582 struct processor_costs power4_cost = {
583 COSTS_N_INSNS (3), /* mulsi */
584 COSTS_N_INSNS (2), /* mulsi_const */
585 COSTS_N_INSNS (2), /* mulsi_const9 */
586 COSTS_N_INSNS (4), /* muldi */
587 COSTS_N_INSNS (18), /* divsi */
588 COSTS_N_INSNS (34), /* divdi */
589 COSTS_N_INSNS (3), /* fp */
590 COSTS_N_INSNS (3), /* dmul */
591 COSTS_N_INSNS (17), /* sdiv */
592 COSTS_N_INSNS (17), /* ddiv */
596 static bool rs6000_function_ok_for_sibcall (tree, tree);
597 static int num_insns_constant_wide (HOST_WIDE_INT);
598 static void validate_condition_mode (enum rtx_code, enum machine_mode);
599 static rtx rs6000_generate_compare (enum rtx_code);
600 static void rs6000_maybe_dead (rtx);
601 static void rs6000_emit_stack_tie (void);
602 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
603 static rtx spe_synthesize_frame_save (rtx);
604 static bool spe_func_has_64bit_regs_p (void);
605 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
607 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
608 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
609 static unsigned rs6000_hash_constant (rtx);
610 static unsigned toc_hash_function (const void *);
611 static int toc_hash_eq (const void *, const void *);
612 static int constant_pool_expr_1 (rtx, int *, int *);
613 static bool constant_pool_expr_p (rtx);
614 static bool toc_relative_expr_p (rtx);
615 static bool legitimate_small_data_p (enum machine_mode, rtx);
616 static bool legitimate_indexed_address_p (rtx, int);
617 static bool legitimate_indirect_address_p (rtx, int);
618 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
619 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
620 static struct machine_function * rs6000_init_machine_status (void);
621 static bool rs6000_assemble_integer (rtx, unsigned int, int);
622 #ifdef HAVE_GAS_HIDDEN
623 static void rs6000_assemble_visibility (tree, int);
625 static int rs6000_ra_ever_killed (void);
626 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
627 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
628 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
629 static const char *rs6000_mangle_fundamental_type (tree);
630 extern const struct attribute_spec rs6000_attribute_table[];
631 static void rs6000_set_default_type_attributes (tree);
632 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
633 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
634 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
636 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
637 static bool rs6000_return_in_memory (tree, tree);
638 static void rs6000_file_start (void);
640 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
641 static void rs6000_elf_asm_out_constructor (rtx, int);
642 static void rs6000_elf_asm_out_destructor (rtx, int);
643 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
644 static void rs6000_elf_unique_section (tree, int);
645 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
646 unsigned HOST_WIDE_INT);
647 static void rs6000_elf_encode_section_info (tree, rtx, int)
649 static bool rs6000_elf_in_small_data_p (tree);
652 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
653 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
654 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
655 static void rs6000_xcoff_unique_section (tree, int);
656 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
657 unsigned HOST_WIDE_INT);
658 static const char * rs6000_xcoff_strip_name_encoding (const char *);
659 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
660 static void rs6000_xcoff_file_start (void);
661 static void rs6000_xcoff_file_end (void);
664 static bool rs6000_binds_local_p (tree);
666 static int rs6000_variable_issue (FILE *, int, rtx, int);
667 static bool rs6000_rtx_costs (rtx, int, int, int *);
668 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
669 static bool is_microcoded_insn (rtx);
670 static int is_dispatch_slot_restricted (rtx);
671 static bool is_cracked_insn (rtx);
672 static bool is_branch_slot_insn (rtx);
673 static int rs6000_adjust_priority (rtx, int);
674 static int rs6000_issue_rate (void);
675 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
676 static rtx get_next_active_insn (rtx, rtx);
677 static bool insn_terminates_group_p (rtx , enum group_termination);
678 static bool is_costly_group (rtx *, rtx);
679 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
680 static int redefine_groups (FILE *, int, rtx, rtx);
681 static int pad_groups (FILE *, int, rtx, rtx);
682 static void rs6000_sched_finish (FILE *, int);
683 static int rs6000_use_sched_lookahead (void);
685 static void rs6000_init_builtins (void);
686 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
687 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
688 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
689 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
690 static void altivec_init_builtins (void);
691 static void rs6000_common_init_builtins (void);
692 static void rs6000_init_libfuncs (void);
694 static void enable_mask_for_builtins (struct builtin_description *, int,
695 enum rs6000_builtins,
696 enum rs6000_builtins);
697 static tree build_opaque_vector_type (tree, int);
698 static void spe_init_builtins (void);
699 static rtx spe_expand_builtin (tree, rtx, bool *);
700 static rtx spe_expand_stv_builtin (enum insn_code, tree);
701 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
702 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
703 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
704 static rs6000_stack_t *rs6000_stack_info (void);
705 static void debug_stack_info (rs6000_stack_t *);
707 static rtx altivec_expand_builtin (tree, rtx, bool *);
708 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
709 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
710 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
711 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
712 static rtx altivec_expand_predicate_builtin (enum insn_code,
713 const char *, tree, rtx);
714 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
715 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
716 static void rs6000_parse_abi_options (void);
717 static void rs6000_parse_alignment_option (void);
718 static void rs6000_parse_tls_size_option (void);
719 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
720 static int first_altivec_reg_to_save (void);
721 static unsigned int compute_vrsave_mask (void);
722 static void compute_save_world_info(rs6000_stack_t *info_ptr);
723 static void is_altivec_return_reg (rtx, void *);
724 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
725 int easy_vector_constant (rtx, enum machine_mode);
726 static int easy_vector_same (rtx, enum machine_mode);
727 static int easy_vector_splat_const (int, enum machine_mode);
728 static bool is_ev64_opaque_type (tree);
729 static rtx rs6000_dwarf_register_span (rtx);
730 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
731 static rtx rs6000_tls_get_addr (void);
732 static rtx rs6000_got_sym (void);
733 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
734 static const char *rs6000_get_some_local_dynamic_name (void);
735 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
736 static rtx rs6000_complex_function_value (enum machine_mode);
737 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
738 enum machine_mode, tree);
739 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
740 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
741 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
742 enum machine_mode, tree,
744 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
747 static void macho_branch_islands (void);
748 static void add_compiler_branch_island (tree, tree, int);
749 static int no_previous_def (tree function_name);
750 static tree get_prev_label (tree function_name);
753 static tree rs6000_build_builtin_va_list (void);
754 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
755 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
756 static bool rs6000_vector_mode_supported_p (enum machine_mode);
758 static enum machine_mode rs6000_eh_return_filter_mode (void);
760 /* Hash table stuff for keeping track of TOC entries. */
762 struct toc_hash_struct GTY(())
764 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
765 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
767 enum machine_mode key_mode;
771 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
773 /* Default register names. */
774 char rs6000_reg_names[][8] =
776 "0", "1", "2", "3", "4", "5", "6", "7",
777 "8", "9", "10", "11", "12", "13", "14", "15",
778 "16", "17", "18", "19", "20", "21", "22", "23",
779 "24", "25", "26", "27", "28", "29", "30", "31",
780 "0", "1", "2", "3", "4", "5", "6", "7",
781 "8", "9", "10", "11", "12", "13", "14", "15",
782 "16", "17", "18", "19", "20", "21", "22", "23",
783 "24", "25", "26", "27", "28", "29", "30", "31",
784 "mq", "lr", "ctr","ap",
785 "0", "1", "2", "3", "4", "5", "6", "7",
787 /* AltiVec registers. */
788 "0", "1", "2", "3", "4", "5", "6", "7",
789 "8", "9", "10", "11", "12", "13", "14", "15",
790 "16", "17", "18", "19", "20", "21", "22", "23",
791 "24", "25", "26", "27", "28", "29", "30", "31",
797 #ifdef TARGET_REGNAMES
798 static const char alt_reg_names[][8] =
800 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
801 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
802 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
803 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
804 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
805 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
806 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
807 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
808 "mq", "lr", "ctr", "ap",
809 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
811 /* AltiVec registers. */
812 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
813 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
814 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
815 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
822 #ifndef MASK_STRICT_ALIGN
823 #define MASK_STRICT_ALIGN 0
825 #ifndef TARGET_PROFILE_KERNEL
826 #define TARGET_PROFILE_KERNEL 0
829 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
830 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
832 /* Return 1 for a symbol ref for a thread-local storage symbol. */
833 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
834 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
836 /* Initialize the GCC target structure. */
837 #undef TARGET_ATTRIBUTE_TABLE
838 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
839 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
840 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
842 #undef TARGET_ASM_ALIGNED_DI_OP
843 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
845 /* Default unaligned ops are only provided for ELF. Find the ops needed
846 for non-ELF systems. */
847 #ifndef OBJECT_FORMAT_ELF
849 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
851 #undef TARGET_ASM_UNALIGNED_HI_OP
852 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
853 #undef TARGET_ASM_UNALIGNED_SI_OP
854 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
855 #undef TARGET_ASM_UNALIGNED_DI_OP
856 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
859 #undef TARGET_ASM_UNALIGNED_HI_OP
860 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
861 #undef TARGET_ASM_UNALIGNED_SI_OP
862 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
863 #undef TARGET_ASM_UNALIGNED_DI_OP
864 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
865 #undef TARGET_ASM_ALIGNED_DI_OP
866 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
870 /* This hook deals with fixups for relocatable code and DI-mode objects
872 #undef TARGET_ASM_INTEGER
873 #define TARGET_ASM_INTEGER rs6000_assemble_integer
875 #ifdef HAVE_GAS_HIDDEN
876 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
877 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
880 #undef TARGET_HAVE_TLS
881 #define TARGET_HAVE_TLS HAVE_AS_TLS
883 #undef TARGET_CANNOT_FORCE_CONST_MEM
884 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
886 #undef TARGET_ASM_FUNCTION_PROLOGUE
887 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
888 #undef TARGET_ASM_FUNCTION_EPILOGUE
889 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
891 #undef TARGET_SCHED_VARIABLE_ISSUE
892 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
894 #undef TARGET_SCHED_ISSUE_RATE
895 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
896 #undef TARGET_SCHED_ADJUST_COST
897 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
898 #undef TARGET_SCHED_ADJUST_PRIORITY
899 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
900 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
901 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
902 #undef TARGET_SCHED_FINISH
903 #define TARGET_SCHED_FINISH rs6000_sched_finish
905 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
906 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
908 #undef TARGET_INIT_BUILTINS
909 #define TARGET_INIT_BUILTINS rs6000_init_builtins
911 #undef TARGET_EXPAND_BUILTIN
912 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
914 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
915 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
917 #undef TARGET_INIT_LIBFUNCS
918 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
921 #undef TARGET_BINDS_LOCAL_P
922 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
925 #undef TARGET_ASM_OUTPUT_MI_THUNK
926 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
928 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
929 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
931 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
932 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
934 #undef TARGET_RTX_COSTS
935 #define TARGET_RTX_COSTS rs6000_rtx_costs
936 #undef TARGET_ADDRESS_COST
937 #define TARGET_ADDRESS_COST hook_int_rtx_0
939 #undef TARGET_VECTOR_OPAQUE_P
940 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
942 #undef TARGET_DWARF_REGISTER_SPAN
943 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
945 /* On rs6000, function arguments are promoted, as are function return
947 #undef TARGET_PROMOTE_FUNCTION_ARGS
948 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
949 #undef TARGET_PROMOTE_FUNCTION_RETURN
950 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
952 #undef TARGET_RETURN_IN_MEMORY
953 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
955 #undef TARGET_SETUP_INCOMING_VARARGS
956 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
958 /* Always strict argument naming on rs6000. */
959 #undef TARGET_STRICT_ARGUMENT_NAMING
960 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
961 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
962 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
963 #undef TARGET_SPLIT_COMPLEX_ARG
964 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
965 #undef TARGET_MUST_PASS_IN_STACK
966 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
967 #undef TARGET_PASS_BY_REFERENCE
968 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
970 #undef TARGET_BUILD_BUILTIN_VA_LIST
971 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
973 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
974 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
976 #undef TARGET_EH_RETURN_FILTER_MODE
977 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
979 #undef TARGET_VECTOR_MODE_SUPPORTED_P
980 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
982 struct gcc_target targetm = TARGET_INITIALIZER;
985 /* Value is 1 if hard register REGNO can hold a value of machine-mode
988 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
990 /* The GPRs can hold any mode, but values bigger than one register
991 cannot go past R31. */
992 if (INT_REGNO_P (regno))
993 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
995 /* The float registers can only hold floating modes and DImode. */
996 if (FP_REGNO_P (regno))
998 (GET_MODE_CLASS (mode) == MODE_FLOAT
999 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1000 || (GET_MODE_CLASS (mode) == MODE_INT
1001 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1003 /* The CR register can only hold CC modes. */
1004 if (CR_REGNO_P (regno))
1005 return GET_MODE_CLASS (mode) == MODE_CC;
1007 if (XER_REGNO_P (regno))
1008 return mode == PSImode;
1010 /* AltiVec only in AldyVec registers. */
1011 if (ALTIVEC_REGNO_P (regno))
1012 return ALTIVEC_VECTOR_MODE (mode);
1014 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1015 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1018 /* We cannot put TImode anywhere except general register and it must be
1019 able to fit within the register set. */
1021 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1024 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1026 rs6000_init_hard_regno_mode_ok (void)
1030 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1031 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1032 if (rs6000_hard_regno_mode_ok (r, m))
1033 rs6000_hard_regno_mode_ok_p[m][r] = true;
1036 /* If not otherwise specified by a target, make 'long double' equivalent to
1039 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1040 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1043 /* Override command line options. Mostly we process the processor
1044 type and sometimes adjust other TARGET_ options. */
1047 rs6000_override_options (const char *default_cpu)
1050 struct rs6000_cpu_select *ptr;
1053 /* Simplifications for entries below. */
1056 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1057 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1060 /* This table occasionally claims that a processor does not support
1061 a particular feature even though it does, but the feature is slower
1062 than the alternative. Thus, it shouldn't be relied on as a
1063 complete description of the processor's support.
1065 Please keep this list in order, and don't forget to update the
1066 documentation in invoke.texi when adding a new processor or
1070 const char *const name; /* Canonical processor name. */
1071 const enum processor_type processor; /* Processor type enum value. */
1072 const int target_enable; /* Target flags to enable. */
1073 } const processor_target_table[]
1074 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1075 {"403", PROCESSOR_PPC403,
1076 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1077 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1078 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1079 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1080 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1081 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1082 {"601", PROCESSOR_PPC601,
1083 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1084 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1085 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1086 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1087 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1088 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1089 {"620", PROCESSOR_PPC620,
1090 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1091 {"630", PROCESSOR_PPC630,
1092 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1093 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1094 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1095 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1096 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1097 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1098 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1099 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1100 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1101 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1102 {"970", PROCESSOR_POWER4,
1103 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1104 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1105 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1106 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1107 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1108 {"G5", PROCESSOR_POWER4,
1109 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1110 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1111 {"power2", PROCESSOR_POWER,
1112 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1113 {"power3", PROCESSOR_PPC630,
1114 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1115 {"power4", PROCESSOR_POWER4,
1116 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1117 {"power5", PROCESSOR_POWER5,
1118 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1119 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1120 {"powerpc64", PROCESSOR_POWERPC64,
1121 POWERPC_BASE_MASK | MASK_POWERPC64},
1122 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1123 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1124 {"rios2", PROCESSOR_RIOS2,
1125 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1126 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1127 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1128 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
1131 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1133 /* Some OSs don't support saving the high part of 64-bit registers on
1134 context switch. Other OSs don't support saving Altivec registers.
1135 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1136 settings; if the user wants either, the user must explicitly specify
1137 them and we won't interfere with the user's specification. */
1140 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1141 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1142 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1146 rs6000_init_hard_regno_mode_ok ();
1148 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1149 #ifdef OS_MISSING_POWERPC64
1150 if (OS_MISSING_POWERPC64)
1151 set_masks &= ~MASK_POWERPC64;
1153 #ifdef OS_MISSING_ALTIVEC
1154 if (OS_MISSING_ALTIVEC)
1155 set_masks &= ~MASK_ALTIVEC;
1158 /* Don't override these by the processor default if given explicitly. */
1159 set_masks &= ~(target_flags_explicit
1160 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
1162 /* Identify the processor type. */
1163 rs6000_select[0].string = default_cpu;
1164 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1166 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1168 ptr = &rs6000_select[i];
1169 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1171 for (j = 0; j < ptt_size; j++)
1172 if (! strcmp (ptr->string, processor_target_table[j].name))
1174 if (ptr->set_tune_p)
1175 rs6000_cpu = processor_target_table[j].processor;
1177 if (ptr->set_arch_p)
1179 target_flags &= ~set_masks;
1180 target_flags |= (processor_target_table[j].target_enable
1187 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1194 /* If we are optimizing big endian systems for space, use the load/store
1195 multiple and string instructions. */
1196 if (BYTES_BIG_ENDIAN && optimize_size)
1197 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1199 /* Don't allow -mmultiple or -mstring on little endian systems
1200 unless the cpu is a 750, because the hardware doesn't support the
1201 instructions used in little endian mode, and causes an alignment
1202 trap. The 750 does not cause an alignment trap (except when the
1203 target is unaligned). */
1205 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1207 if (TARGET_MULTIPLE)
1209 target_flags &= ~MASK_MULTIPLE;
1210 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1211 warning ("-mmultiple is not supported on little endian systems");
1216 target_flags &= ~MASK_STRING;
1217 if ((target_flags_explicit & MASK_STRING) != 0)
1218 warning ("-mstring is not supported on little endian systems");
1222 /* Set debug flags */
1223 if (rs6000_debug_name)
1225 if (! strcmp (rs6000_debug_name, "all"))
1226 rs6000_debug_stack = rs6000_debug_arg = 1;
1227 else if (! strcmp (rs6000_debug_name, "stack"))
1228 rs6000_debug_stack = 1;
1229 else if (! strcmp (rs6000_debug_name, "arg"))
1230 rs6000_debug_arg = 1;
1232 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1235 if (rs6000_traceback_name)
1237 if (! strncmp (rs6000_traceback_name, "full", 4))
1238 rs6000_traceback = traceback_full;
1239 else if (! strncmp (rs6000_traceback_name, "part", 4))
1240 rs6000_traceback = traceback_part;
1241 else if (! strncmp (rs6000_traceback_name, "no", 2))
1242 rs6000_traceback = traceback_none;
1244 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1245 rs6000_traceback_name);
1248 /* Set size of long double */
1249 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1250 if (rs6000_long_double_size_string)
1253 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1254 if (*tail != '\0' || (size != 64 && size != 128))
1255 error ("Unknown switch -mlong-double-%s",
1256 rs6000_long_double_size_string);
1258 rs6000_long_double_type_size = size;
1261 /* Set Altivec ABI as default for powerpc64 linux. */
1262 if (TARGET_ELF && TARGET_64BIT)
1264 rs6000_altivec_abi = 1;
1265 rs6000_altivec_vrsave = 1;
1268 /* Handle -mabi= options. */
1269 rs6000_parse_abi_options ();
1271 /* Handle -malign-XXXXX option. */
1272 rs6000_parse_alignment_option ();
1274 /* Handle generic -mFOO=YES/NO options. */
1275 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1276 &rs6000_altivec_vrsave);
1277 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1279 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1280 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
1281 &rs6000_float_gprs);
1283 /* Handle -mtls-size option. */
1284 rs6000_parse_tls_size_option ();
1286 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1287 SUBTARGET_OVERRIDE_OPTIONS;
1289 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1290 SUBSUBTARGET_OVERRIDE_OPTIONS;
1296 error ("AltiVec and E500 instructions cannot coexist");
1298 /* The e500 does not have string instructions, and we set
1299 MASK_STRING above when optimizing for size. */
1300 if ((target_flags & MASK_STRING) != 0)
1301 target_flags = target_flags & ~MASK_STRING;
1303 /* No SPE means 64-bit long doubles, even if an E500. */
1304 if (rs6000_spe_string != 0
1305 && !strcmp (rs6000_spe_string, "no"))
1306 rs6000_long_double_type_size = 64;
1308 else if (rs6000_select[1].string != NULL)
1310 /* For the powerpc-eabispe configuration, we set all these by
1311 default, so let's unset them if we manually set another
1312 CPU that is not the E500. */
1313 if (rs6000_abi_string == 0)
1315 if (rs6000_spe_string == 0)
1317 if (rs6000_float_gprs_string == 0)
1318 rs6000_float_gprs = 0;
1319 if (rs6000_isel_string == 0)
1321 if (rs6000_long_double_size_string == 0)
1322 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1325 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1326 && rs6000_cpu != PROCESSOR_POWER5);
1327 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1328 || rs6000_cpu == PROCESSOR_POWER5);
1330 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1331 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1332 bits in target_flags so TARGET_SWITCHES cannot be used.
1333 Assumption here is that rs6000_longcall_switch points into the
1334 text of the complete option, rather than being a copy, so we can
1335 scan back for the presence or absence of the no- modifier. */
1336 if (rs6000_longcall_switch)
1338 const char *base = rs6000_longcall_switch;
1339 while (base[-1] != 'm') base--;
1341 if (*rs6000_longcall_switch != '\0')
1342 error ("invalid option `%s'", base);
1343 rs6000_default_long_calls = (base[0] != 'n');
1346 /* Handle -m(no-)warn-altivec-long similarly. */
1347 if (rs6000_warn_altivec_long_switch)
1349 const char *base = rs6000_warn_altivec_long_switch;
1350 while (base[-1] != 'm') base--;
1352 if (*rs6000_warn_altivec_long_switch != '\0')
1353 error ("invalid option `%s'", base);
1354 rs6000_warn_altivec_long = (base[0] != 'n');
1357 /* Handle -mprioritize-restricted-insns option. */
1358 rs6000_sched_restricted_insns_priority
1359 = (rs6000_sched_groups ? 1 : 0);
1360 if (rs6000_sched_restricted_insns_priority_str)
1361 rs6000_sched_restricted_insns_priority =
1362 atoi (rs6000_sched_restricted_insns_priority_str);
1364 /* Handle -msched-costly-dep option. */
1365 rs6000_sched_costly_dep
1366 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1367 if (rs6000_sched_costly_dep_str)
1369 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1370 rs6000_sched_costly_dep = no_dep_costly;
1371 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1372 rs6000_sched_costly_dep = all_deps_costly;
1373 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1374 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1375 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1376 rs6000_sched_costly_dep = store_to_load_dep_costly;
1378 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1381 /* Handle -minsert-sched-nops option. */
1382 rs6000_sched_insert_nops
1383 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1384 if (rs6000_sched_insert_nops_str)
1386 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1387 rs6000_sched_insert_nops = sched_finish_none;
1388 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1389 rs6000_sched_insert_nops = sched_finish_pad_groups;
1390 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1391 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1393 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1396 #ifdef TARGET_REGNAMES
1397 /* If the user desires alternate register names, copy in the
1398 alternate names now. */
1399 if (TARGET_REGNAMES)
1400 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1403 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1404 If -maix-struct-return or -msvr4-struct-return was explicitly
1405 used, don't override with the ABI default. */
1406 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1408 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1409 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1411 target_flags |= MASK_AIX_STRUCT_RET;
1414 if (TARGET_LONG_DOUBLE_128
1415 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1416 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1418 /* Allocate an alias set for register saves & restores from stack. */
1419 rs6000_sr_alias_set = new_alias_set ();
1422 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1424 /* We can only guarantee the availability of DI pseudo-ops when
1425 assembling for 64-bit targets. */
1428 targetm.asm_out.aligned_op.di = NULL;
1429 targetm.asm_out.unaligned_op.di = NULL;
1432 /* Set maximum branch target alignment at two instructions, eight bytes. */
1433 align_jumps_max_skip = 8;
1434 align_loops_max_skip = 8;
1436 /* Arrange to save and restore machine status around nested functions. */
1437 init_machine_status = rs6000_init_machine_status;
1439 /* We should always be splitting complex arguments, but we can't break
1440 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1441 if (DEFAULT_ABI != ABI_AIX)
1442 targetm.calls.split_complex_arg = NULL;
1444 /* Initialize rs6000_cost with the appropriate target costs. */
1446 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1450 case PROCESSOR_RIOS1:
1451 rs6000_cost = &rios1_cost;
1454 case PROCESSOR_RIOS2:
1455 rs6000_cost = &rios2_cost;
1458 case PROCESSOR_RS64A:
1459 rs6000_cost = &rs64a_cost;
1462 case PROCESSOR_MPCCORE:
1463 rs6000_cost = &mpccore_cost;
1466 case PROCESSOR_PPC403:
1467 rs6000_cost = &ppc403_cost;
1470 case PROCESSOR_PPC405:
1471 rs6000_cost = &ppc405_cost;
1474 case PROCESSOR_PPC440:
1475 rs6000_cost = &ppc440_cost;
1478 case PROCESSOR_PPC601:
1479 rs6000_cost = &ppc601_cost;
1482 case PROCESSOR_PPC603:
1483 rs6000_cost = &ppc603_cost;
1486 case PROCESSOR_PPC604:
1487 rs6000_cost = &ppc604_cost;
1490 case PROCESSOR_PPC604e:
1491 rs6000_cost = &ppc604e_cost;
1494 case PROCESSOR_PPC620:
1495 rs6000_cost = &ppc620_cost;
1498 case PROCESSOR_PPC630:
1499 rs6000_cost = &ppc630_cost;
1502 case PROCESSOR_PPC750:
1503 case PROCESSOR_PPC7400:
1504 rs6000_cost = &ppc750_cost;
1507 case PROCESSOR_PPC7450:
1508 rs6000_cost = &ppc7450_cost;
1511 case PROCESSOR_PPC8540:
1512 rs6000_cost = &ppc8540_cost;
1515 case PROCESSOR_POWER4:
1516 case PROCESSOR_POWER5:
1517 rs6000_cost = &power4_cost;
1525 /* Handle generic options of the form -mfoo=yes/no.
1526 NAME is the option name.
1527 VALUE is the option value.
1528 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1529 whether the option value is 'yes' or 'no' respectively. */
1531 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1535 else if (!strcmp (value, "yes"))
1537 else if (!strcmp (value, "no"))
1540 error ("unknown -m%s= option specified: '%s'", name, value);
1543 /* Handle -mabi= options. */
1545 rs6000_parse_abi_options (void)
1547 if (rs6000_abi_string == 0)
1549 else if (! strcmp (rs6000_abi_string, "altivec"))
1551 rs6000_altivec_abi = 1;
1554 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1555 rs6000_altivec_abi = 0;
1556 else if (! strcmp (rs6000_abi_string, "spe"))
1559 rs6000_altivec_abi = 0;
1560 if (!TARGET_SPE_ABI)
1561 error ("not configured for ABI: '%s'", rs6000_abi_string);
1564 else if (! strcmp (rs6000_abi_string, "no-spe"))
1567 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1570 /* Handle -malign-XXXXXX options. */
1572 rs6000_parse_alignment_option (void)
1574 if (rs6000_alignment_string == 0)
1576 else if (! strcmp (rs6000_alignment_string, "power"))
1577 rs6000_alignment_flags = MASK_ALIGN_POWER;
1578 else if (! strcmp (rs6000_alignment_string, "natural"))
1579 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1581 error ("unknown -malign-XXXXX option specified: '%s'",
1582 rs6000_alignment_string);
1585 /* Validate and record the size specified with the -mtls-size option. */
1588 rs6000_parse_tls_size_option (void)
1590 if (rs6000_tls_size_string == 0)
1592 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1593 rs6000_tls_size = 16;
1594 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1595 rs6000_tls_size = 32;
1596 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1597 rs6000_tls_size = 64;
1599 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1603 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1607 /* Do anything needed at the start of the asm file. */
1610 rs6000_file_start (void)
1614 const char *start = buffer;
1615 struct rs6000_cpu_select *ptr;
1616 const char *default_cpu = TARGET_CPU_DEFAULT;
1617 FILE *file = asm_out_file;
1619 default_file_start ();
1621 #ifdef TARGET_BI_ARCH
1622 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1626 if (flag_verbose_asm)
1628 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1629 rs6000_select[0].string = default_cpu;
1631 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1633 ptr = &rs6000_select[i];
1634 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1636 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1641 #ifdef USING_ELFOS_H
1642 switch (rs6000_sdata)
1644 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1645 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1646 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1647 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1650 if (rs6000_sdata && g_switch_value)
1652 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1663 /* Return nonzero if this function is known to have a null epilogue. */
1666 direct_return (void)
1668 if (reload_completed)
1670 rs6000_stack_t *info = rs6000_stack_info ();
1672 if (info->first_gp_reg_save == 32
1673 && info->first_fp_reg_save == 64
1674 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1675 && ! info->lr_save_p
1676 && ! info->cr_save_p
1677 && info->vrsave_mask == 0
1685 /* Returns 1 always. */
1688 any_operand (rtx op ATTRIBUTE_UNUSED,
1689 enum machine_mode mode ATTRIBUTE_UNUSED)
1694 /* Returns 1 always. */
1697 any_parallel_operand (rtx op ATTRIBUTE_UNUSED,
1698 enum machine_mode mode ATTRIBUTE_UNUSED)
1703 /* Returns 1 if op is the count register. */
1706 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1708 if (GET_CODE (op) != REG)
1711 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1714 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1720 /* Returns 1 if op is an altivec register. */
1723 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1726 return (register_operand (op, mode)
1727 && (GET_CODE (op) != REG
1728 || REGNO (op) > FIRST_PSEUDO_REGISTER
1729 || ALTIVEC_REGNO_P (REGNO (op))));
1733 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1735 if (GET_CODE (op) != REG)
1738 if (XER_REGNO_P (REGNO (op)))
1744 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1745 by such constants completes more quickly. */
1748 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1750 return ( GET_CODE (op) == CONST_INT
1751 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1754 /* Return 1 if OP is a constant that can fit in a D field. */
1757 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1759 return (GET_CODE (op) == CONST_INT
1760 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1763 /* Similar for an unsigned D field. */
1766 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1768 return (GET_CODE (op) == CONST_INT
1769 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1772 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1775 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1777 return (GET_CODE (op) == CONST_INT
1778 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1781 /* Returns 1 if OP is a CONST_INT that is a positive value
1782 and an exact power of 2. */
1785 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1787 return (GET_CODE (op) == CONST_INT
1789 && exact_log2 (INTVAL (op)) >= 0);
1792 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1796 gpc_reg_operand (rtx op, enum machine_mode mode)
1798 return (register_operand (op, mode)
1799 && (GET_CODE (op) != REG
1800 || (REGNO (op) >= ARG_POINTER_REGNUM
1801 && !XER_REGNO_P (REGNO (op)))
1802 || REGNO (op) < MQ_REGNO));
1805 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1809 cc_reg_operand (rtx op, enum machine_mode mode)
1811 return (register_operand (op, mode)
1812 && (GET_CODE (op) != REG
1813 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1814 || CR_REGNO_P (REGNO (op))));
1817 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1818 CR field that isn't CR0. */
1821 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1823 return (register_operand (op, mode)
1824 && (GET_CODE (op) != REG
1825 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1826 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1829 /* Returns 1 if OP is either a constant integer valid for a D-field or
1830 a non-special register. If a register, it must be in the proper
1831 mode unless MODE is VOIDmode. */
1834 reg_or_short_operand (rtx op, enum machine_mode mode)
1836 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1839 /* Similar, except check if the negation of the constant would be
1840 valid for a D-field. Don't allow a constant zero, since all the
1841 patterns that call this predicate use "addic r1,r2,-constant" on
1842 a constant value to set a carry when r2 is greater or equal to
1843 "constant". That doesn't work for zero. */
1846 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1848 if (GET_CODE (op) == CONST_INT)
1849 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1851 return gpc_reg_operand (op, mode);
1854 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1855 a non-special register. If a register, it must be in the proper
1856 mode unless MODE is VOIDmode. */
1859 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1861 if (gpc_reg_operand (op, mode))
1863 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1870 /* Return 1 if the operand is either a register or an integer whose
1871 high-order 16 bits are zero. */
1874 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1876 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1879 /* Return 1 is the operand is either a non-special register or ANY
1880 constant integer. */
1883 reg_or_cint_operand (rtx op, enum machine_mode mode)
1885 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1888 /* Return 1 is the operand is either a non-special register or ANY
1889 32-bit signed constant integer. */
1892 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1894 return (gpc_reg_operand (op, mode)
1895 || (GET_CODE (op) == CONST_INT
1896 #if HOST_BITS_PER_WIDE_INT != 32
1897 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1898 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1903 /* Return 1 is the operand is either a non-special register or a 32-bit
1904 signed constant integer valid for 64-bit addition. */
1907 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1909 return (gpc_reg_operand (op, mode)
1910 || (GET_CODE (op) == CONST_INT
1911 #if HOST_BITS_PER_WIDE_INT == 32
1912 && INTVAL (op) < 0x7fff8000
1914 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1920 /* Return 1 is the operand is either a non-special register or a 32-bit
1921 signed constant integer valid for 64-bit subtraction. */
1924 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1926 return (gpc_reg_operand (op, mode)
1927 || (GET_CODE (op) == CONST_INT
1928 #if HOST_BITS_PER_WIDE_INT == 32
1929 && (- INTVAL (op)) < 0x7fff8000
1931 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1937 /* Return 1 is the operand is either a non-special register or ANY
1938 32-bit unsigned constant integer. */
1941 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1943 if (GET_CODE (op) == CONST_INT)
1945 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1947 if (GET_MODE_BITSIZE (mode) <= 32)
1950 if (INTVAL (op) < 0)
1954 return ((INTVAL (op) & GET_MODE_MASK (mode)
1955 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1957 else if (GET_CODE (op) == CONST_DOUBLE)
1959 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1963 return CONST_DOUBLE_HIGH (op) == 0;
1966 return gpc_reg_operand (op, mode);
1969 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1972 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1974 return (GET_CODE (op) == SYMBOL_REF
1975 || GET_CODE (op) == CONST
1976 || GET_CODE (op) == LABEL_REF);
1979 /* Return 1 if the operand is a simple references that can be loaded via
1980 the GOT (labels involving addition aren't allowed). */
1983 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1985 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1988 /* Return the number of instructions it takes to form a constant in an
1989 integer register. */
1992 num_insns_constant_wide (HOST_WIDE_INT value)
1994 /* signed constant loadable with {cal|addi} */
1995 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1998 /* constant loadable with {cau|addis} */
1999 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
2002 #if HOST_BITS_PER_WIDE_INT == 64
2003 else if (TARGET_POWERPC64)
2005 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2006 HOST_WIDE_INT high = value >> 31;
2008 if (high == 0 || high == -1)
2014 return num_insns_constant_wide (high) + 1;
2016 return (num_insns_constant_wide (high)
2017 + num_insns_constant_wide (low) + 1);
2026 num_insns_constant (rtx op, enum machine_mode mode)
2028 if (GET_CODE (op) == CONST_INT)
2030 #if HOST_BITS_PER_WIDE_INT == 64
2031 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2032 && mask64_operand (op, mode))
2036 return num_insns_constant_wide (INTVAL (op));
2039 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
2044 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2045 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2046 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2049 else if (GET_CODE (op) == CONST_DOUBLE)
2055 int endian = (WORDS_BIG_ENDIAN == 0);
2057 if (mode == VOIDmode || mode == DImode)
2059 high = CONST_DOUBLE_HIGH (op);
2060 low = CONST_DOUBLE_LOW (op);
2064 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2065 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2067 low = l[1 - endian];
2071 return (num_insns_constant_wide (low)
2072 + num_insns_constant_wide (high));
2076 if (high == 0 && low >= 0)
2077 return num_insns_constant_wide (low);
2079 else if (high == -1 && low < 0)
2080 return num_insns_constant_wide (low);
2082 else if (mask64_operand (op, mode))
2086 return num_insns_constant_wide (high) + 1;
2089 return (num_insns_constant_wide (high)
2090 + num_insns_constant_wide (low) + 1);
2098 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2099 register with one instruction per word. We only do this if we can
2100 safely read CONST_DOUBLE_{LOW,HIGH}. */
2103 easy_fp_constant (rtx op, enum machine_mode mode)
2105 if (GET_CODE (op) != CONST_DOUBLE
2106 || GET_MODE (op) != mode
2107 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
2110 /* Consider all constants with -msoft-float to be easy. */
2111 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
2115 /* If we are using V.4 style PIC, consider all constants to be hard. */
2116 if (flag_pic && DEFAULT_ABI == ABI_V4)
2119 #ifdef TARGET_RELOCATABLE
2120 /* Similarly if we are using -mrelocatable, consider all constants
2122 if (TARGET_RELOCATABLE)
2131 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2132 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
2134 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2135 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
2136 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
2137 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
2140 else if (mode == DFmode)
2145 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2146 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
2148 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2149 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
2152 else if (mode == SFmode)
2157 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2158 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2160 return num_insns_constant_wide (l) == 1;
2163 else if (mode == DImode)
2164 return ((TARGET_POWERPC64
2165 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
2166 || (num_insns_constant (op, DImode) <= 2));
2168 else if (mode == SImode)
2174 /* Returns the constant for the splat instruction, if exists. */
2177 easy_vector_splat_const (int cst, enum machine_mode mode)
2182 if (EASY_VECTOR_15 (cst)
2183 || EASY_VECTOR_15_ADD_SELF (cst))
2185 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2189 if (EASY_VECTOR_15 (cst)
2190 || EASY_VECTOR_15_ADD_SELF (cst))
2192 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2196 if (EASY_VECTOR_15 (cst)
2197 || EASY_VECTOR_15_ADD_SELF (cst))
2206 /* Return nonzero if all elements of a vector have the same value. */
2209 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2213 units = CONST_VECTOR_NUNITS (op);
2215 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2216 for (i = 1; i < units; ++i)
2217 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2219 if (i == units && easy_vector_splat_const (cst, mode))
2224 /* Return 1 if the operand is a CONST_INT and can be put into a
2225 register without using memory. */
2228 easy_vector_constant (rtx op, enum machine_mode mode)
2232 if (GET_CODE (op) != CONST_VECTOR
2237 if (zero_constant (op, mode)
2238 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
2239 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
2242 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2245 if (TARGET_SPE && mode == V1DImode)
2248 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2249 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
2251 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2253 evmergelo r0, r0, r0
2256 I don't know how efficient it would be to allow bigger constants,
2257 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2258 instructions is better than a 64-bit memory load, but I don't
2259 have the e500 timing specs. */
2260 if (TARGET_SPE && mode == V2SImode
2261 && cst >= -0x7fff && cst <= 0x7fff
2262 && cst2 >= -0x7fff && cst2 <= 0x7fff)
2266 && easy_vector_same (op, mode))
2268 cst = easy_vector_splat_const (cst, mode);
2269 if (EASY_VECTOR_15_ADD_SELF (cst)
2270 || EASY_VECTOR_15 (cst))
2276 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2279 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
2283 && GET_CODE (op) == CONST_VECTOR
2284 && easy_vector_same (op, mode))
2286 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
2287 if (EASY_VECTOR_15_ADD_SELF (cst))
2293 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2296 gen_easy_vector_constant_add_self (rtx op)
2300 units = GET_MODE_NUNITS (GET_MODE (op));
2301 v = rtvec_alloc (units);
2303 for (i = 0; i < units; i++)
2305 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2306 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2310 output_vec_const_move (rtx *operands)
2313 enum machine_mode mode;
2319 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2320 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2321 mode = GET_MODE (dest);
2325 if (zero_constant (vec, mode))
2326 return "vxor %0,%0,%0";
2327 else if (easy_vector_constant (vec, mode))
2329 operands[1] = GEN_INT (cst);
2333 if (EASY_VECTOR_15 (cst))
2335 operands[1] = GEN_INT (cst);
2336 return "vspltisw %0,%1";
2338 else if (EASY_VECTOR_15_ADD_SELF (cst))
2342 if (EASY_VECTOR_15 (cst))
2344 operands[1] = GEN_INT (cst);
2345 return "vspltish %0,%1";
2347 else if (EASY_VECTOR_15_ADD_SELF (cst))
2351 if (EASY_VECTOR_15 (cst))
2353 operands[1] = GEN_INT (cst);
2354 return "vspltisb %0,%1";
2356 else if (EASY_VECTOR_15_ADD_SELF (cst))
2368 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2369 pattern of V1DI, V4HI, and V2SF.
2371 FIXME: We should probably return # and add post reload
2372 splitters for these, but this way is so easy ;-).
2374 operands[1] = GEN_INT (cst);
2375 operands[2] = GEN_INT (cst2);
2377 return "li %0,%1\n\tevmergelo %0,%0,%0";
2379 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2385 /* Return 1 if the operand is the constant 0. This works for scalars
2386 as well as vectors. */
2388 zero_constant (rtx op, enum machine_mode mode)
2390 return op == CONST0_RTX (mode);
2393 /* Return 1 if the operand is 0.0. */
2395 zero_fp_constant (rtx op, enum machine_mode mode)
2397 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
2400 /* Return 1 if the operand is in volatile memory. Note that during
2401 the RTL generation phase, memory_operand does not return TRUE for
2402 volatile memory references. So this function allows us to
2403 recognize volatile references where its safe. */
2406 volatile_mem_operand (rtx op, enum machine_mode mode)
2408 if (GET_CODE (op) != MEM)
2411 if (!MEM_VOLATILE_P (op))
2414 if (mode != GET_MODE (op))
2417 if (reload_completed)
2418 return memory_operand (op, mode);
2420 if (reload_in_progress)
2421 return strict_memory_address_p (mode, XEXP (op, 0));
2423 return memory_address_p (mode, XEXP (op, 0));
2426 /* Return 1 if the operand is an offsettable memory operand. */
2429 offsettable_mem_operand (rtx op, enum machine_mode mode)
2431 return ((GET_CODE (op) == MEM)
2432 && offsettable_address_p (reload_completed || reload_in_progress,
2433 mode, XEXP (op, 0)));
2436 /* Return 1 if the operand is either an easy FP constant (see above) or
2440 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2442 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2445 /* Return 1 if the operand is either a non-special register or an item
2446 that can be used as the operand of a `mode' add insn. */
2449 add_operand (rtx op, enum machine_mode mode)
2451 if (GET_CODE (op) == CONST_INT)
2452 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2453 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2455 return gpc_reg_operand (op, mode);
2458 /* Return 1 if OP is a constant but not a valid add_operand. */
2461 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2463 return (GET_CODE (op) == CONST_INT
2464 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2465 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2468 /* Return 1 if the operand is a non-special register or a constant that
2469 can be used as the operand of an OR or XOR insn on the RS/6000. */
2472 logical_operand (rtx op, enum machine_mode mode)
2474 HOST_WIDE_INT opl, oph;
2476 if (gpc_reg_operand (op, mode))
2479 if (GET_CODE (op) == CONST_INT)
2481 opl = INTVAL (op) & GET_MODE_MASK (mode);
2483 #if HOST_BITS_PER_WIDE_INT <= 32
2484 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2488 else if (GET_CODE (op) == CONST_DOUBLE)
2490 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2493 opl = CONST_DOUBLE_LOW (op);
2494 oph = CONST_DOUBLE_HIGH (op);
2501 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2502 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2505 /* Return 1 if C is a constant that is not a logical operand (as
2506 above), but could be split into one. */
2509 non_logical_cint_operand (rtx op, enum machine_mode mode)
2511 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2512 && ! logical_operand (op, mode)
2513 && reg_or_logical_cint_operand (op, mode));
2516 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2517 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2518 Reject all ones and all zeros, since these should have been optimized
2519 away and confuse the making of MB and ME. */
2522 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2524 HOST_WIDE_INT c, lsb;
2526 if (GET_CODE (op) != CONST_INT)
2531 /* Fail in 64-bit mode if the mask wraps around because the upper
2532 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2533 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2536 /* We don't change the number of transitions by inverting,
2537 so make sure we start with the LS bit zero. */
2541 /* Reject all zeros or all ones. */
2545 /* Find the first transition. */
2548 /* Invert to look for a second transition. */
2551 /* Erase first transition. */
2554 /* Find the second transition (if any). */
2557 /* Match if all the bits above are 1's (or c is zero). */
2561 /* Return 1 for the PowerPC64 rlwinm corner case. */
2564 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2566 HOST_WIDE_INT c, lsb;
2568 if (GET_CODE (op) != CONST_INT)
2573 if ((c & 0x80000001) != 0x80000001)
2587 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2588 It is if there are no more than one 1->0 or 0->1 transitions.
2589 Reject all zeros, since zero should have been optimized away and
2590 confuses the making of MB and ME. */
2593 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2595 if (GET_CODE (op) == CONST_INT)
2597 HOST_WIDE_INT c, lsb;
2601 /* Reject all zeros. */
2605 /* We don't change the number of transitions by inverting,
2606 so make sure we start with the LS bit zero. */
2610 /* Find the transition, and check that all bits above are 1's. */
2613 /* Match if all the bits above are 1's (or c is zero). */
2619 /* Like mask64_operand, but allow up to three transitions. This
2620 predicate is used by insn patterns that generate two rldicl or
2621 rldicr machine insns. */
2624 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2626 if (GET_CODE (op) == CONST_INT)
2628 HOST_WIDE_INT c, lsb;
2632 /* Disallow all zeros. */
2636 /* We don't change the number of transitions by inverting,
2637 so make sure we start with the LS bit zero. */
2641 /* Find the first transition. */
2644 /* Invert to look for a second transition. */
2647 /* Erase first transition. */
2650 /* Find the second transition. */
2653 /* Invert to look for a third transition. */
2656 /* Erase second transition. */
2659 /* Find the third transition (if any). */
2662 /* Match if all the bits above are 1's (or c is zero). */
2668 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2669 implement ANDing by the mask IN. */
2671 build_mask64_2_operands (rtx in, rtx *out)
2673 #if HOST_BITS_PER_WIDE_INT >= 64
2674 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2677 if (GET_CODE (in) != CONST_INT)
2683 /* Assume c initially something like 0x00fff000000fffff. The idea
2684 is to rotate the word so that the middle ^^^^^^ group of zeros
2685 is at the MS end and can be cleared with an rldicl mask. We then
2686 rotate back and clear off the MS ^^ group of zeros with a
2688 c = ~c; /* c == 0xff000ffffff00000 */
2689 lsb = c & -c; /* lsb == 0x0000000000100000 */
2690 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2691 c = ~c; /* c == 0x00fff000000fffff */
2692 c &= -lsb; /* c == 0x00fff00000000000 */
2693 lsb = c & -c; /* lsb == 0x0000100000000000 */
2694 c = ~c; /* c == 0xff000fffffffffff */
2695 c &= -lsb; /* c == 0xff00000000000000 */
2697 while ((lsb >>= 1) != 0)
2698 shift++; /* shift == 44 on exit from loop */
2699 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2700 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2701 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2705 /* Assume c initially something like 0xff000f0000000000. The idea
2706 is to rotate the word so that the ^^^ middle group of zeros
2707 is at the LS end and can be cleared with an rldicr mask. We then
2708 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2710 lsb = c & -c; /* lsb == 0x0000010000000000 */
2711 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2712 c = ~c; /* c == 0x00fff0ffffffffff */
2713 c &= -lsb; /* c == 0x00fff00000000000 */
2714 lsb = c & -c; /* lsb == 0x0000100000000000 */
2715 c = ~c; /* c == 0xff000fffffffffff */
2716 c &= -lsb; /* c == 0xff00000000000000 */
2718 while ((lsb >>= 1) != 0)
2719 shift++; /* shift == 44 on exit from loop */
2720 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2721 m1 >>= shift; /* m1 == 0x0000000000000fff */
2722 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2725 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2726 masks will be all 1's. We are guaranteed more than one transition. */
2727 out[0] = GEN_INT (64 - shift);
2728 out[1] = GEN_INT (m1);
2729 out[2] = GEN_INT (shift);
2730 out[3] = GEN_INT (m2);
2738 /* Return 1 if the operand is either a non-special register or a constant
2739 that can be used as the operand of a PowerPC64 logical AND insn. */
2742 and64_operand (rtx op, enum machine_mode mode)
2744 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2745 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2747 return (logical_operand (op, mode) || mask64_operand (op, mode));
2750 /* Like the above, but also match constants that can be implemented
2751 with two rldicl or rldicr insns. */
2754 and64_2_operand (rtx op, enum machine_mode mode)
2756 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2757 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2759 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2762 /* Return 1 if the operand is either a non-special register or a
2763 constant that can be used as the operand of an RS/6000 logical AND insn. */
2766 and_operand (rtx op, enum machine_mode mode)
2768 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2769 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2771 return (logical_operand (op, mode) || mask_operand (op, mode));
2774 /* Return 1 if the operand is a general register or memory operand. */
2777 reg_or_mem_operand (rtx op, enum machine_mode mode)
2779 return (gpc_reg_operand (op, mode)
2780 || memory_operand (op, mode)
2781 || macho_lo_sum_memory_operand (op, mode)
2782 || volatile_mem_operand (op, mode));
2785 /* Return 1 if the operand is a general register or memory operand without
2786 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2790 lwa_operand (rtx op, enum machine_mode mode)
2794 if (reload_completed && GET_CODE (inner) == SUBREG)
2795 inner = SUBREG_REG (inner);
2797 return gpc_reg_operand (inner, mode)
2798 || (memory_operand (inner, mode)
2799 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2800 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2801 && (GET_CODE (XEXP (inner, 0)) != PLUS
2802 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2803 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2806 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2809 symbol_ref_operand (rtx op, enum machine_mode mode)
2811 if (mode != VOIDmode && GET_MODE (op) != mode)
2814 return (GET_CODE (op) == SYMBOL_REF
2815 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2818 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2819 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2822 call_operand (rtx op, enum machine_mode mode)
2824 if (mode != VOIDmode && GET_MODE (op) != mode)
2827 return (GET_CODE (op) == SYMBOL_REF
2828 || (GET_CODE (op) == REG
2829 && (REGNO (op) == LINK_REGISTER_REGNUM
2830 || REGNO (op) == COUNT_REGISTER_REGNUM
2831 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2834 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2838 current_file_function_operand (rtx op,
2839 enum machine_mode mode ATTRIBUTE_UNUSED)
2841 return (GET_CODE (op) == SYMBOL_REF
2842 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2843 && (SYMBOL_REF_LOCAL_P (op)
2844 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2847 /* Return 1 if this operand is a valid input for a move insn. */
2850 input_operand (rtx op, enum machine_mode mode)
2852 /* Memory is always valid. */
2853 if (memory_operand (op, mode))
2856 /* For floating-point, easy constants are valid. */
2857 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2859 && easy_fp_constant (op, mode))
2862 /* Allow any integer constant. */
2863 if (GET_MODE_CLASS (mode) == MODE_INT
2864 && (GET_CODE (op) == CONST_INT
2865 || GET_CODE (op) == CONST_DOUBLE))
2868 /* Allow easy vector constants. */
2869 if (GET_CODE (op) == CONST_VECTOR
2870 && easy_vector_constant (op, mode))
2873 /* For floating-point or multi-word mode, the only remaining valid type
2875 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2876 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2877 return register_operand (op, mode);
2879 /* The only cases left are integral modes one word or smaller (we
2880 do not get called for MODE_CC values). These can be in any
2882 if (register_operand (op, mode))
2885 /* A SYMBOL_REF referring to the TOC is valid. */
2886 if (legitimate_constant_pool_address_p (op))
2889 /* A constant pool expression (relative to the TOC) is valid */
2890 if (toc_relative_expr_p (op))
2893 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2895 if (DEFAULT_ABI == ABI_V4
2896 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2897 && small_data_operand (op, Pmode))
2904 /* Darwin, AIX increases natural record alignment to doubleword if the first
2905 field is an FP double while the FP fields remain word aligned. */
2908 rs6000_special_round_type_align (tree type, int computed, int specified)
2910 tree field = TYPE_FIELDS (type);
2912 /* Skip all the static variables only if ABI is greater than
2914 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2915 field = TREE_CHAIN (field);
2917 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2918 return MAX (computed, specified);
2920 return MAX (MAX (computed, specified), 64);
2923 /* Return 1 for an operand in small memory on V.4/eabi. */
2926 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2927 enum machine_mode mode ATTRIBUTE_UNUSED)
2932 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2935 if (DEFAULT_ABI != ABI_V4)
2938 if (GET_CODE (op) == SYMBOL_REF)
2941 else if (GET_CODE (op) != CONST
2942 || GET_CODE (XEXP (op, 0)) != PLUS
2943 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2944 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2949 rtx sum = XEXP (op, 0);
2950 HOST_WIDE_INT summand;
2952 /* We have to be careful here, because it is the referenced address
2953 that must be 32k from _SDA_BASE_, not just the symbol. */
2954 summand = INTVAL (XEXP (sum, 1));
2955 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2958 sym_ref = XEXP (sum, 0);
2961 return SYMBOL_REF_SMALL_P (sym_ref);
2967 /* Return true, if operand is a memory operand and has a
2968 displacement divisible by 4. */
2971 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2976 if (!memory_operand (op, mode))
2979 addr = XEXP (op, 0);
2980 if (GET_CODE (addr) == PLUS
2981 && GET_CODE (XEXP (addr, 0)) == REG
2982 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2983 off = INTVAL (XEXP (addr, 1));
2985 return (off % 4) == 0;
2988 /* Return true if either operand is a general purpose register. */
2991 gpr_or_gpr_p (rtx op0, rtx op1)
2993 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2994 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2998 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3001 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
3003 switch (GET_CODE(op))
3006 if (RS6000_SYMBOL_REF_TLS_P (op))
3008 else if (CONSTANT_POOL_ADDRESS_P (op))
3010 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3018 else if (! strcmp (XSTR (op, 0), toc_label_name))
3027 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3028 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
3030 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
3039 constant_pool_expr_p (rtx op)
3043 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3047 toc_relative_expr_p (rtx op)
3051 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3055 legitimate_constant_pool_address_p (rtx x)
3058 && GET_CODE (x) == PLUS
3059 && GET_CODE (XEXP (x, 0)) == REG
3060 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3061 && constant_pool_expr_p (XEXP (x, 1)));
3065 legitimate_small_data_p (enum machine_mode mode, rtx x)
3067 return (DEFAULT_ABI == ABI_V4
3068 && !flag_pic && !TARGET_TOC
3069 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3070 && small_data_operand (x, mode));
3073 /* SPE offset addressing is limited to 5-bits worth of double words. */
3074 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3077 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3079 unsigned HOST_WIDE_INT offset, extra;
3081 if (GET_CODE (x) != PLUS)
3083 if (GET_CODE (XEXP (x, 0)) != REG)
3085 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3087 if (legitimate_constant_pool_address_p (x))
3089 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3092 offset = INTVAL (XEXP (x, 1));
3100 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3101 which leaves the only valid constant offset of zero, which by
3102 canonicalization rules is also invalid. */
3109 /* SPE vector modes. */
3110 return SPE_CONST_OFFSET_OK (offset);
3114 if (mode == DFmode || !TARGET_POWERPC64)
3116 else if (offset & 3)
3122 if (mode == TFmode || !TARGET_POWERPC64)
3124 else if (offset & 3)
3135 return (offset < 0x10000) && (offset + extra < 0x10000);
3139 legitimate_indexed_address_p (rtx x, int strict)
3143 if (GET_CODE (x) != PLUS)
3148 if (!REG_P (op0) || !REG_P (op1))
3151 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
3152 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3153 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3154 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
3158 legitimate_indirect_address_p (rtx x, int strict)
3160 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3164 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3166 if (!TARGET_MACHO || !flag_pic
3167 || mode != SImode || GET_CODE(x) != MEM)
3171 if (GET_CODE (x) != LO_SUM)
3173 if (GET_CODE (XEXP (x, 0)) != REG)
3175 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3179 return CONSTANT_P (x);
3183 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3185 if (GET_CODE (x) != LO_SUM)
3187 if (GET_CODE (XEXP (x, 0)) != REG)
3189 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3193 if (TARGET_ELF || TARGET_MACHO)
3195 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3199 if (GET_MODE_NUNITS (mode) != 1)
3201 if (GET_MODE_BITSIZE (mode) > 64)
3204 return CONSTANT_P (x);
3211 /* Try machine-dependent ways of modifying an illegitimate address
3212 to be legitimate. If we find one, return the new, valid address.
3213 This is used from only one place: `memory_address' in explow.c.
3215 OLDX is the address as it was before break_out_memory_refs was
3216 called. In some cases it is useful to look at this to decide what
3219 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3221 It is always safe for this function to do nothing. It exists to
3222 recognize opportunities to optimize the output.
3224 On RS/6000, first check for the sum of a register with a constant
3225 integer that is out of range. If so, generate code to add the
3226 constant with the low-order 16 bits masked to the register and force
3227 this result into another register (this can be done with `cau').
3228 Then generate an address of REG+(CONST&0xffff), allowing for the
3229 possibility of bit 16 being a one.
3231 Then check for the sum of a register and something not constant, try to
3232 load the other things into a register and return the sum. */
3235 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3236 enum machine_mode mode)
3238 if (GET_CODE (x) == SYMBOL_REF)
3240 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3242 return rs6000_legitimize_tls_address (x, model);
3245 if (GET_CODE (x) == PLUS
3246 && GET_CODE (XEXP (x, 0)) == REG
3247 && GET_CODE (XEXP (x, 1)) == CONST_INT
3248 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
3250 HOST_WIDE_INT high_int, low_int;
3252 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3253 high_int = INTVAL (XEXP (x, 1)) - low_int;
3254 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3255 GEN_INT (high_int)), 0);
3256 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3258 else if (GET_CODE (x) == PLUS
3259 && GET_CODE (XEXP (x, 0)) == REG
3260 && GET_CODE (XEXP (x, 1)) != CONST_INT
3261 && GET_MODE_NUNITS (mode) == 1
3262 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3264 || (mode != DFmode && mode != TFmode))
3265 && (TARGET_POWERPC64 || mode != DImode)
3268 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3269 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3271 else if (ALTIVEC_VECTOR_MODE (mode))
3275 /* Make sure both operands are registers. */
3276 if (GET_CODE (x) == PLUS)
3277 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3278 force_reg (Pmode, XEXP (x, 1)));
3280 reg = force_reg (Pmode, x);
3283 else if (SPE_VECTOR_MODE (mode))
3285 /* We accept [reg + reg] and [reg + OFFSET]. */
3287 if (GET_CODE (x) == PLUS)
3289 rtx op1 = XEXP (x, 0);
3290 rtx op2 = XEXP (x, 1);
3292 op1 = force_reg (Pmode, op1);
3294 if (GET_CODE (op2) != REG
3295 && (GET_CODE (op2) != CONST_INT
3296 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3297 op2 = force_reg (Pmode, op2);
3299 return gen_rtx_PLUS (Pmode, op1, op2);
3302 return force_reg (Pmode, x);
3308 && GET_CODE (x) != CONST_INT
3309 && GET_CODE (x) != CONST_DOUBLE
3311 && GET_MODE_NUNITS (mode) == 1
3312 && (GET_MODE_BITSIZE (mode) <= 32
3313 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3315 rtx reg = gen_reg_rtx (Pmode);
3316 emit_insn (gen_elf_high (reg, x));
3317 return gen_rtx_LO_SUM (Pmode, reg, x);
3319 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3322 && ! MACHO_DYNAMIC_NO_PIC_P
3324 && GET_CODE (x) != CONST_INT
3325 && GET_CODE (x) != CONST_DOUBLE
3327 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3331 rtx reg = gen_reg_rtx (Pmode);
3332 emit_insn (gen_macho_high (reg, x));
3333 return gen_rtx_LO_SUM (Pmode, reg, x);
3336 && constant_pool_expr_p (x)
3337 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3339 return create_TOC_reference (x);
3345 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3346 We need to emit DTP-relative relocations. */
3349 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3354 fputs ("\t.long\t", file);
3357 fputs (DOUBLE_INT_ASM_OP, file);
3362 output_addr_const (file, x);
3363 fputs ("@dtprel+0x8000", file);
3366 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3368 static GTY(()) rtx rs6000_tls_symbol;
3370 rs6000_tls_get_addr (void)
3372 if (!rs6000_tls_symbol)
3373 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3375 return rs6000_tls_symbol;
3378 /* Construct the SYMBOL_REF for TLS GOT references. */
3380 static GTY(()) rtx rs6000_got_symbol;
3382 rs6000_got_sym (void)
3384 if (!rs6000_got_symbol)
3386 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3387 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3388 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3391 return rs6000_got_symbol;
3394 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3395 this (thread-local) address. */
3398 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3402 dest = gen_reg_rtx (Pmode);
3403 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3409 tlsreg = gen_rtx_REG (Pmode, 13);
3410 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3414 tlsreg = gen_rtx_REG (Pmode, 2);
3415 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3419 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3423 tmp = gen_reg_rtx (Pmode);
3426 tlsreg = gen_rtx_REG (Pmode, 13);
3427 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3431 tlsreg = gen_rtx_REG (Pmode, 2);
3432 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3436 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3438 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3443 rtx r3, got, tga, tmp1, tmp2, eqv;
3446 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3450 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3453 rtx gsym = rs6000_got_sym ();
3454 got = gen_reg_rtx (Pmode);
3456 rs6000_emit_move (got, gsym, Pmode);
3460 static int tls_got_labelno = 0;
3461 rtx tempLR, lab, tmp3, mem;
3464 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3465 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3466 tempLR = gen_reg_rtx (Pmode);
3467 tmp1 = gen_reg_rtx (Pmode);
3468 tmp2 = gen_reg_rtx (Pmode);
3469 tmp3 = gen_reg_rtx (Pmode);
3470 mem = gen_const_mem (Pmode, tmp1);
3472 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3474 emit_move_insn (tmp1, tempLR);
3475 emit_move_insn (tmp2, mem);
3476 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3477 last = emit_move_insn (got, tmp3);
3478 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3480 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3482 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3488 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3490 r3 = gen_rtx_REG (Pmode, 3);
3492 insn = gen_tls_gd_64 (r3, got, addr);
3494 insn = gen_tls_gd_32 (r3, got, addr);
3497 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3498 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3499 insn = emit_call_insn (insn);
3500 CONST_OR_PURE_CALL_P (insn) = 1;
3501 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3502 insn = get_insns ();
3504 emit_libcall_block (insn, dest, r3, addr);
3506 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3508 r3 = gen_rtx_REG (Pmode, 3);
3510 insn = gen_tls_ld_64 (r3, got);
3512 insn = gen_tls_ld_32 (r3, got);
3515 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3516 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3517 insn = emit_call_insn (insn);
3518 CONST_OR_PURE_CALL_P (insn) = 1;
3519 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3520 insn = get_insns ();
3522 tmp1 = gen_reg_rtx (Pmode);
3523 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3525 emit_libcall_block (insn, tmp1, r3, eqv);
3526 if (rs6000_tls_size == 16)
3529 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3531 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3533 else if (rs6000_tls_size == 32)
3535 tmp2 = gen_reg_rtx (Pmode);
3537 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3539 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3542 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3544 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3548 tmp2 = gen_reg_rtx (Pmode);
3550 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3552 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3554 insn = gen_rtx_SET (Pmode, dest,
3555 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3561 /* IE, or 64 bit offset LE. */
3562 tmp2 = gen_reg_rtx (Pmode);
3564 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3566 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3569 insn = gen_tls_tls_64 (dest, tmp2, addr);
3571 insn = gen_tls_tls_32 (dest, tmp2, addr);
3579 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3580 instruction definitions. */
3583 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3585 return RS6000_SYMBOL_REF_TLS_P (x);
3588 /* Return 1 if X contains a thread-local symbol. */
3591 rs6000_tls_referenced_p (rtx x)
3593 if (! TARGET_HAVE_TLS)
3596 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3599 /* Return 1 if *X is a thread-local symbol. This is the same as
3600 rs6000_tls_symbol_ref except for the type of the unused argument. */
3603 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3605 return RS6000_SYMBOL_REF_TLS_P (*x);
3608 /* The convention appears to be to define this wherever it is used.
3609 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3610 is now used here. */
3611 #ifndef REG_MODE_OK_FOR_BASE_P
3612 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3615 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3616 replace the input X, or the original X if no replacement is called for.
3617 The output parameter *WIN is 1 if the calling macro should goto WIN,
3620 For RS/6000, we wish to handle large displacements off a base
3621 register by splitting the addend across an addiu/addis and the mem insn.
3622 This cuts number of extra insns needed from 3 to 1.
3624 On Darwin, we use this to generate code for floating point constants.
3625 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3626 The Darwin code is inside #if TARGET_MACHO because only then is
3627 machopic_function_base_name() defined. */
3629 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3630 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3632 /* We must recognize output that we have already generated ourselves. */
3633 if (GET_CODE (x) == PLUS
3634 && GET_CODE (XEXP (x, 0)) == PLUS
3635 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3636 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3637 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3639 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3640 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3641 opnum, (enum reload_type)type);
3647 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3648 && GET_CODE (x) == LO_SUM
3649 && GET_CODE (XEXP (x, 0)) == PLUS
3650 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3651 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3652 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3653 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3654 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3655 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3656 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3658 /* Result of previous invocation of this function on Darwin
3659 floating point constant. */
3660 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3661 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3662 opnum, (enum reload_type)type);
3667 if (GET_CODE (x) == PLUS
3668 && GET_CODE (XEXP (x, 0)) == REG
3669 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3670 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3671 && GET_CODE (XEXP (x, 1)) == CONST_INT
3672 && !SPE_VECTOR_MODE (mode)
3673 && !ALTIVEC_VECTOR_MODE (mode))
3675 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3676 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3678 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3680 /* Check for 32-bit overflow. */
3681 if (high + low != val)
3687 /* Reload the high part into a base reg; leave the low part
3688 in the mem directly. */
3690 x = gen_rtx_PLUS (GET_MODE (x),
3691 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3695 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3696 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3697 opnum, (enum reload_type)type);
3702 if (GET_CODE (x) == SYMBOL_REF
3703 && DEFAULT_ABI == ABI_DARWIN
3704 && !ALTIVEC_VECTOR_MODE (mode)
3705 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3706 /* Don't do this for TFmode, since the result isn't offsettable. */
3711 rtx offset = gen_rtx_CONST (Pmode,
3712 gen_rtx_MINUS (Pmode, x,
3713 machopic_function_base_sym ()));
3714 x = gen_rtx_LO_SUM (GET_MODE (x),
3715 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3716 gen_rtx_HIGH (Pmode, offset)), offset);
3719 x = gen_rtx_LO_SUM (GET_MODE (x),
3720 gen_rtx_HIGH (Pmode, x), x);
3722 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3723 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3724 opnum, (enum reload_type)type);
3730 && constant_pool_expr_p (x)
3731 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3733 (x) = create_TOC_reference (x);
3741 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3742 that is a valid memory address for an instruction.
3743 The MODE argument is the machine mode for the MEM expression
3744 that wants to use this address.
3746 On the RS/6000, there are four valid address: a SYMBOL_REF that
3747 refers to a constant pool entry of an address (or the sum of it
3748 plus a constant), a short (16-bit signed) constant plus a register,
3749 the sum of two registers, or a register indirect, possibly with an
3750 auto-increment. For DFmode and DImode with a constant plus register,
3751 we must ensure that both words are addressable or PowerPC64 with offset
3754 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3755 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3756 adjacent memory cells are accessed by adding word-sized offsets
3757 during assembly output. */
3759 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3761 if (RS6000_SYMBOL_REF_TLS_P (x))
3763 if (legitimate_indirect_address_p (x, reg_ok_strict))
3765 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3766 && !ALTIVEC_VECTOR_MODE (mode)
3767 && !SPE_VECTOR_MODE (mode)
3769 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3771 if (legitimate_small_data_p (mode, x))
3773 if (legitimate_constant_pool_address_p (x))
3775 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3777 && GET_CODE (x) == PLUS
3778 && GET_CODE (XEXP (x, 0)) == REG
3779 && (XEXP (x, 0) == virtual_stack_vars_rtx
3780 || XEXP (x, 0) == arg_pointer_rtx)
3781 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3783 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3787 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3789 || (mode != DFmode && mode != TFmode))
3790 && (TARGET_POWERPC64 || mode != DImode)
3791 && legitimate_indexed_address_p (x, reg_ok_strict))
3793 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3798 /* Go to LABEL if ADDR (a legitimate address expression)
3799 has an effect that depends on the machine mode it is used for.
3801 On the RS/6000 this is true of all integral offsets (since AltiVec
3802 modes don't allow them) or is a pre-increment or decrement.
3804 ??? Except that due to conceptual problems in offsettable_address_p
3805 we can't really report the problems of integral offsets. So leave
3806 this assuming that the adjustable offset must be valid for the
3807 sub-words of a TFmode operand, which is what we had before. */
3810 rs6000_mode_dependent_address (rtx addr)
3812 switch (GET_CODE (addr))
3815 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3817 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3818 return val + 12 + 0x8000 >= 0x10000;
3827 return TARGET_UPDATE;
3836 /* Return number of consecutive hard regs needed starting at reg REGNO
3837 to hold something of mode MODE.
3838 This is ordinarily the length in words of a value of mode MODE
3839 but can be less for certain modes in special long registers.
3841 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3842 scalar instructions. The upper 32 bits are only available to the
3845 POWER and PowerPC GPRs hold 32 bits worth;
3846 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3849 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3851 if (FP_REGNO_P (regno))
3852 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3854 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3855 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3857 if (ALTIVEC_REGNO_P (regno))
3859 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3861 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3864 /* Change register usage conditional on target flags. */
3866 rs6000_conditional_register_usage (void)
3870 /* Set MQ register fixed (already call_used) if not POWER
3871 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3876 /* 64-bit AIX reserves GPR13 for thread-private data. */
3878 fixed_regs[13] = call_used_regs[13]
3879 = call_really_used_regs[13] = 1;
3881 /* Conditionally disable FPRs. */
3882 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3883 for (i = 32; i < 64; i++)
3884 fixed_regs[i] = call_used_regs[i]
3885 = call_really_used_regs[i] = 1;
3887 if (DEFAULT_ABI == ABI_V4
3888 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3890 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3892 if (DEFAULT_ABI == ABI_V4
3893 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3895 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3896 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3897 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3899 if (DEFAULT_ABI == ABI_DARWIN
3900 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3901 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3902 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3903 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3904 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3906 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3907 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3908 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3911 global_regs[VSCR_REGNO] = 1;
3915 global_regs[SPEFSCR_REGNO] = 1;
3916 fixed_regs[FIXED_SCRATCH]
3917 = call_used_regs[FIXED_SCRATCH]
3918 = call_really_used_regs[FIXED_SCRATCH] = 1;
3921 if (! TARGET_ALTIVEC)
3923 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3924 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3925 call_really_used_regs[VRSAVE_REGNO] = 1;
3928 if (TARGET_ALTIVEC_ABI)
3929 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3930 call_used_regs[i] = call_really_used_regs[i] = 1;
3933 /* Try to output insns to set TARGET equal to the constant C if it can
3934 be done in less than N insns. Do all computations in MODE.
3935 Returns the place where the output has been placed if it can be
3936 done and the insns have been emitted. If it would take more than N
3937 insns, zero is returned and no insns and emitted. */
3940 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3941 rtx source, int n ATTRIBUTE_UNUSED)
3943 rtx result, insn, set;
3944 HOST_WIDE_INT c0, c1;
3946 if (mode == QImode || mode == HImode)
3949 dest = gen_reg_rtx (mode);
3950 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3953 else if (mode == SImode)
3955 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3957 emit_insn (gen_rtx_SET (VOIDmode, result,
3958 GEN_INT (INTVAL (source)
3959 & (~ (HOST_WIDE_INT) 0xffff))));
3960 emit_insn (gen_rtx_SET (VOIDmode, dest,
3961 gen_rtx_IOR (SImode, result,
3962 GEN_INT (INTVAL (source) & 0xffff))));
3965 else if (mode == DImode)
3967 if (GET_CODE (source) == CONST_INT)
3969 c0 = INTVAL (source);
3972 else if (GET_CODE (source) == CONST_DOUBLE)
3974 #if HOST_BITS_PER_WIDE_INT >= 64
3975 c0 = CONST_DOUBLE_LOW (source);
3978 c0 = CONST_DOUBLE_LOW (source);
3979 c1 = CONST_DOUBLE_HIGH (source);
3985 result = rs6000_emit_set_long_const (dest, c0, c1);
3990 insn = get_last_insn ();
3991 set = single_set (insn);
3992 if (! CONSTANT_P (SET_SRC (set)))
3993 set_unique_reg_note (insn, REG_EQUAL, source);
3998 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3999 fall back to a straight forward decomposition. We do this to avoid
4000 exponential run times encountered when looking for longer sequences
4001 with rs6000_emit_set_const. */
4003 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
4005 if (!TARGET_POWERPC64)
4007 rtx operand1, operand2;
4009 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4011 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
4013 emit_move_insn (operand1, GEN_INT (c1));
4014 emit_move_insn (operand2, GEN_INT (c2));
4018 HOST_WIDE_INT ud1, ud2, ud3, ud4;
4021 ud2 = (c1 & 0xffff0000) >> 16;
4022 #if HOST_BITS_PER_WIDE_INT >= 64
4026 ud4 = (c2 & 0xffff0000) >> 16;
4028 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4029 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4032 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4034 emit_move_insn (dest, GEN_INT (ud1));
4037 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4038 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4041 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4044 emit_move_insn (dest, GEN_INT (ud2 << 16));
4046 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4048 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4049 || (ud4 == 0 && ! (ud3 & 0x8000)))
4052 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4055 emit_move_insn (dest, GEN_INT (ud3 << 16));
4058 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
4059 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
4061 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4066 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4069 emit_move_insn (dest, GEN_INT (ud4 << 16));
4072 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
4074 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
4076 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
4077 GEN_INT (ud2 << 16)));
4079 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4085 /* Helper for the following. Get rid of [r+r] memory refs
4086 in cases where it won't work (TImode, TFmode). */
4089 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4091 if (GET_CODE (operands[0]) == MEM
4092 && GET_CODE (XEXP (operands[0], 0)) != REG
4093 && ! reload_in_progress)
4095 = replace_equiv_address (operands[0],
4096 copy_addr_to_reg (XEXP (operands[0], 0)));
4098 if (GET_CODE (operands[1]) == MEM
4099 && GET_CODE (XEXP (operands[1], 0)) != REG
4100 && ! reload_in_progress)
4102 = replace_equiv_address (operands[1],
4103 copy_addr_to_reg (XEXP (operands[1], 0)));
4106 /* Emit a move from SOURCE to DEST in mode MODE. */
4108 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4112 operands[1] = source;
4114 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4115 if (GET_CODE (operands[1]) == CONST_DOUBLE
4116 && ! FLOAT_MODE_P (mode)
4117 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4119 /* FIXME. This should never happen. */
4120 /* Since it seems that it does, do the safe thing and convert
4122 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4124 if (GET_CODE (operands[1]) == CONST_DOUBLE
4125 && ! FLOAT_MODE_P (mode)
4126 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
4127 && CONST_DOUBLE_LOW (operands[1]) >= 0)
4128 || (CONST_DOUBLE_HIGH (operands[1]) == -1
4129 && CONST_DOUBLE_LOW (operands[1]) < 0)))
4132 /* Check if GCC is setting up a block move that will end up using FP
4133 registers as temporaries. We must make sure this is acceptable. */
4134 if (GET_CODE (operands[0]) == MEM
4135 && GET_CODE (operands[1]) == MEM
4137 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4138 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4139 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4140 ? 32 : MEM_ALIGN (operands[0])))
4141 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4143 : MEM_ALIGN (operands[1]))))
4144 && ! MEM_VOLATILE_P (operands [0])
4145 && ! MEM_VOLATILE_P (operands [1]))
4147 emit_move_insn (adjust_address (operands[0], SImode, 0),
4148 adjust_address (operands[1], SImode, 0));
4149 emit_move_insn (adjust_address (operands[0], SImode, 4),
4150 adjust_address (operands[1], SImode, 4));
4154 if (!no_new_pseudos)
4156 if (GET_CODE (operands[1]) == MEM && optimize > 0
4157 && (mode == QImode || mode == HImode || mode == SImode)
4158 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
4160 rtx reg = gen_reg_rtx (word_mode);
4162 emit_insn (gen_rtx_SET (word_mode, reg,
4163 gen_rtx_ZERO_EXTEND (word_mode,
4165 operands[1] = gen_lowpart (mode, reg);
4167 if (GET_CODE (operands[0]) != REG)
4168 operands[1] = force_reg (mode, operands[1]);
4171 if (mode == SFmode && ! TARGET_POWERPC
4172 && TARGET_HARD_FLOAT && TARGET_FPRS
4173 && GET_CODE (operands[0]) == MEM)
4177 if (reload_in_progress || reload_completed)
4178 regnum = true_regnum (operands[1]);
4179 else if (GET_CODE (operands[1]) == REG)
4180 regnum = REGNO (operands[1]);
4184 /* If operands[1] is a register, on POWER it may have
4185 double-precision data in it, so truncate it to single
4187 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4190 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4191 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4192 operands[1] = newreg;
4196 /* Recognize the case where operand[1] is a reference to thread-local
4197 data and load its address to a register. */
4198 if (GET_CODE (operands[1]) == SYMBOL_REF)
4200 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4202 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
4205 /* Handle the case where reload calls us with an invalid address. */
4206 if (reload_in_progress && mode == Pmode
4207 && (! general_operand (operands[1], mode)
4208 || ! nonimmediate_operand (operands[0], mode)))
4211 /* 128-bit constant floating-point values on Darwin should really be
4212 loaded as two parts. */
4213 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
4214 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4215 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4217 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4218 know how to get a DFmode SUBREG of a TFmode. */
4219 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4220 simplify_gen_subreg (DImode, operands[1], mode, 0),
4222 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4223 GET_MODE_SIZE (DImode)),
4224 simplify_gen_subreg (DImode, operands[1], mode,
4225 GET_MODE_SIZE (DImode)),
4230 /* FIXME: In the long term, this switch statement should go away
4231 and be replaced by a sequence of tests based on things like
4237 if (CONSTANT_P (operands[1])
4238 && GET_CODE (operands[1]) != CONST_INT)
4239 operands[1] = force_const_mem (mode, operands[1]);
4243 rs6000_eliminate_indexed_memrefs (operands);
4248 if (CONSTANT_P (operands[1])
4249 && ! easy_fp_constant (operands[1], mode))
4250 operands[1] = force_const_mem (mode, operands[1]);
4261 if (CONSTANT_P (operands[1])
4262 && !easy_vector_constant (operands[1], mode))
4263 operands[1] = force_const_mem (mode, operands[1]);
4268 /* Use default pattern for address of ELF small data */
4271 && DEFAULT_ABI == ABI_V4
4272 && (GET_CODE (operands[1]) == SYMBOL_REF
4273 || GET_CODE (operands[1]) == CONST)
4274 && small_data_operand (operands[1], mode))
4276 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4280 if (DEFAULT_ABI == ABI_V4
4281 && mode == Pmode && mode == SImode
4282 && flag_pic == 1 && got_operand (operands[1], mode))
4284 emit_insn (gen_movsi_got (operands[0], operands[1]));
4288 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4292 && CONSTANT_P (operands[1])
4293 && GET_CODE (operands[1]) != HIGH
4294 && GET_CODE (operands[1]) != CONST_INT)
4296 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4298 /* If this is a function address on -mcall-aixdesc,
4299 convert it to the address of the descriptor. */
4300 if (DEFAULT_ABI == ABI_AIX
4301 && GET_CODE (operands[1]) == SYMBOL_REF
4302 && XSTR (operands[1], 0)[0] == '.')
4304 const char *name = XSTR (operands[1], 0);
4306 while (*name == '.')
4308 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4309 CONSTANT_POOL_ADDRESS_P (new_ref)
4310 = CONSTANT_POOL_ADDRESS_P (operands[1]);
4311 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4312 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4313 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
4314 operands[1] = new_ref;
4317 if (DEFAULT_ABI == ABI_DARWIN)
4320 if (MACHO_DYNAMIC_NO_PIC_P)
4322 /* Take care of any required data indirection. */
4323 operands[1] = rs6000_machopic_legitimize_pic_address (
4324 operands[1], mode, operands[0]);
4325 if (operands[0] != operands[1])
4326 emit_insn (gen_rtx_SET (VOIDmode,
4327 operands[0], operands[1]));
4333 emit_insn (gen_macho_high_di (target, operands[1]));
4334 emit_insn (gen_macho_low_di (operands[0], target, operands[1]));
4338 emit_insn (gen_macho_high (target, operands[1]));
4339 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4344 emit_insn (gen_elf_high (target, operands[1]));
4345 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4349 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4350 and we have put it in the TOC, we just need to make a TOC-relative
4353 && GET_CODE (operands[1]) == SYMBOL_REF
4354 && constant_pool_expr_p (operands[1])
4355 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4356 get_pool_mode (operands[1])))
4358 operands[1] = create_TOC_reference (operands[1]);
4360 else if (mode == Pmode
4361 && CONSTANT_P (operands[1])
4362 && ((GET_CODE (operands[1]) != CONST_INT
4363 && ! easy_fp_constant (operands[1], mode))
4364 || (GET_CODE (operands[1]) == CONST_INT
4365 && num_insns_constant (operands[1], mode) > 2)
4366 || (GET_CODE (operands[0]) == REG
4367 && FP_REGNO_P (REGNO (operands[0]))))
4368 && GET_CODE (operands[1]) != HIGH
4369 && ! legitimate_constant_pool_address_p (operands[1])
4370 && ! toc_relative_expr_p (operands[1]))
4372 /* Emit a USE operation so that the constant isn't deleted if
4373 expensive optimizations are turned on because nobody
4374 references it. This should only be done for operands that
4375 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4376 This should not be done for operands that contain LABEL_REFs.
4377 For now, we just handle the obvious case. */
4378 if (GET_CODE (operands[1]) != LABEL_REF)
4379 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4382 /* Darwin uses a special PIC legitimizer. */
4383 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4386 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4388 if (operands[0] != operands[1])
4389 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4394 /* If we are to limit the number of things we put in the TOC and
4395 this is a symbol plus a constant we can add in one insn,
4396 just put the symbol in the TOC and add the constant. Don't do
4397 this if reload is in progress. */
4398 if (GET_CODE (operands[1]) == CONST
4399 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4400 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4401 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4402 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4403 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4404 && ! side_effects_p (operands[0]))
4407 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4408 rtx other = XEXP (XEXP (operands[1], 0), 1);
4410 sym = force_reg (mode, sym);
4412 emit_insn (gen_addsi3 (operands[0], sym, other));
4414 emit_insn (gen_adddi3 (operands[0], sym, other));
4418 operands[1] = force_const_mem (mode, operands[1]);
4421 && constant_pool_expr_p (XEXP (operands[1], 0))
4422 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4423 get_pool_constant (XEXP (operands[1], 0)),
4424 get_pool_mode (XEXP (operands[1], 0))))
4427 = gen_const_mem (mode,
4428 create_TOC_reference (XEXP (operands[1], 0)));
4429 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4435 rs6000_eliminate_indexed_memrefs (operands);
4439 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4441 gen_rtx_SET (VOIDmode,
4442 operands[0], operands[1]),
4443 gen_rtx_CLOBBER (VOIDmode,
4444 gen_rtx_SCRATCH (SImode)))));
4453 /* Above, we may have called force_const_mem which may have returned
4454 an invalid address. If we can, fix this up; otherwise, reload will
4455 have to deal with it. */
4456 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4457 operands[1] = validize_mem (operands[1]);
4460 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4463 /* Nonzero if we can use a floating-point register to pass this arg. */
4464 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4465 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4466 && (CUM)->fregno <= FP_ARG_MAX_REG \
4467 && TARGET_HARD_FLOAT && TARGET_FPRS)
4469 /* Nonzero if we can use an AltiVec register to pass this arg. */
4470 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4471 (ALTIVEC_VECTOR_MODE (MODE) \
4472 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4473 && TARGET_ALTIVEC_ABI \
4476 /* Return a nonzero value to say to return the function value in
4477 memory, just as large structures are always returned. TYPE will be
4478 the data type of the value, and FNTYPE will be the type of the
4479 function doing the returning, or @code{NULL} for libcalls.
4481 The AIX ABI for the RS/6000 specifies that all structures are
4482 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4483 specifies that structures <= 8 bytes are returned in r3/r4, but a
4484 draft put them in memory, and GCC used to implement the draft
4485 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4486 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4487 compatibility can change DRAFT_V4_STRUCT_RET to override the
4488 default, and -m switches get the final word. See
4489 rs6000_override_options for more details.
4491 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4492 long double support is enabled. These values are returned in memory.
4494 int_size_in_bytes returns -1 for variable size objects, which go in
4495 memory always. The cast to unsigned makes -1 > 8. */
4498 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4500 if (AGGREGATE_TYPE_P (type)
4501 && (TARGET_AIX_STRUCT_RET
4502 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4504 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4509 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4510 for a call to a function whose data type is FNTYPE.
4511 For a library call, FNTYPE is 0.
4513 For incoming args we set the number of arguments in the prototype large
4514 so we never return a PARALLEL. */
4517 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4518 rtx libname ATTRIBUTE_UNUSED, int incoming,
4519 int libcall, int n_named_args)
4521 static CUMULATIVE_ARGS zero_cumulative;
4523 *cum = zero_cumulative;
4525 cum->fregno = FP_ARG_MIN_REG;
4526 cum->vregno = ALTIVEC_ARG_MIN_REG;
4527 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4528 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4529 ? CALL_LIBCALL : CALL_NORMAL);
4530 cum->sysv_gregno = GP_ARG_MIN_REG;
4531 cum->stdarg = fntype
4532 && (TYPE_ARG_TYPES (fntype) != 0
4533 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4534 != void_type_node));
4536 cum->nargs_prototype = 0;
4537 if (incoming || cum->prototype)
4538 cum->nargs_prototype = n_named_args;
4540 /* Check for a longcall attribute. */
4542 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4543 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4544 cum->call_cookie = CALL_LONG;
4546 if (TARGET_DEBUG_ARG)
4548 fprintf (stderr, "\ninit_cumulative_args:");
4551 tree ret_type = TREE_TYPE (fntype);
4552 fprintf (stderr, " ret code = %s,",
4553 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4556 if (cum->call_cookie & CALL_LONG)
4557 fprintf (stderr, " longcall,");
4559 fprintf (stderr, " proto = %d, nargs = %d\n",
4560 cum->prototype, cum->nargs_prototype);
4565 && TARGET_ALTIVEC_ABI
4566 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4568 error ("Cannot return value in vector register because"
4569 " altivec instructions are disabled, use -maltivec"
4570 " to enable them.");
4574 /* Return true if TYPE must be passed on the stack and not in registers. */
4577 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4579 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4580 return must_pass_in_stack_var_size (mode, type);
4582 return must_pass_in_stack_var_size_or_pad (mode, type);
4585 /* If defined, a C expression which determines whether, and in which
4586 direction, to pad out an argument with extra space. The value
4587 should be of type `enum direction': either `upward' to pad above
4588 the argument, `downward' to pad below, or `none' to inhibit
4591 For the AIX ABI structs are always stored left shifted in their
4595 function_arg_padding (enum machine_mode mode, tree type)
4597 #ifndef AGGREGATE_PADDING_FIXED
4598 #define AGGREGATE_PADDING_FIXED 0
4600 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4601 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4604 if (!AGGREGATE_PADDING_FIXED)
4606 /* GCC used to pass structures of the same size as integer types as
4607 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4608 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4609 passed padded downward, except that -mstrict-align further
4610 muddied the water in that multi-component structures of 2 and 4
4611 bytes in size were passed padded upward.
4613 The following arranges for best compatibility with previous
4614 versions of gcc, but removes the -mstrict-align dependency. */
4615 if (BYTES_BIG_ENDIAN)
4617 HOST_WIDE_INT size = 0;
4619 if (mode == BLKmode)
4621 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4622 size = int_size_in_bytes (type);
4625 size = GET_MODE_SIZE (mode);
4627 if (size == 1 || size == 2 || size == 4)
4633 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4635 if (type != 0 && AGGREGATE_TYPE_P (type))
4639 /* Fall back to the default. */
4640 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4643 /* If defined, a C expression that gives the alignment boundary, in bits,
4644 of an argument with the specified mode and type. If it is not defined,
4645 PARM_BOUNDARY is used for all arguments.
4647 V.4 wants long longs to be double word aligned. */
4650 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4652 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4654 else if (SPE_VECTOR_MODE (mode))
4656 else if (ALTIVEC_VECTOR_MODE (mode))
4659 return PARM_BOUNDARY;
4662 /* Compute the size (in words) of a function argument. */
4664 static unsigned long
4665 rs6000_arg_size (enum machine_mode mode, tree type)
4669 if (mode != BLKmode)
4670 size = GET_MODE_SIZE (mode);
4672 size = int_size_in_bytes (type);
4675 return (size + 3) >> 2;
4677 return (size + 7) >> 3;
4680 /* Update the data in CUM to advance over an argument
4681 of mode MODE and data type TYPE.
4682 (TYPE is null for libcalls where that information may not be available.)
4684 Note that for args passed by reference, function_arg will be called
4685 with MODE and TYPE set to that of the pointer to the arg, not the arg
4689 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4690 tree type, int named)
4692 cum->nargs_prototype--;
4694 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4698 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4701 if (!TARGET_ALTIVEC)
4702 error ("Cannot pass argument in vector register because"
4703 " altivec instructions are disabled, use -maltivec"
4704 " to enable them.");
4706 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4707 even if it is going to be passed in a vector register.
4708 Darwin does the same for variable-argument functions. */
4709 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4710 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4720 /* Vector parameters must be 16-byte aligned. This places
4721 them at 2 mod 4 in terms of words in 32-bit mode, since
4722 the parameter save area starts at offset 24 from the
4723 stack. In 64-bit mode, they just have to start on an
4724 even word, since the parameter save area is 16-byte
4725 aligned. Space for GPRs is reserved even if the argument
4726 will be passed in memory. */
4728 align = (2 - cum->words) & 3;
4730 align = cum->words & 1;
4731 cum->words += align + rs6000_arg_size (mode, type);
4733 if (TARGET_DEBUG_ARG)
4735 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4737 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4738 cum->nargs_prototype, cum->prototype,
4739 GET_MODE_NAME (mode));
4743 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4745 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4747 else if (DEFAULT_ABI == ABI_V4)
4749 if (TARGET_HARD_FLOAT && TARGET_FPRS
4750 && (mode == SFmode || mode == DFmode))
4752 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4757 cum->words += cum->words & 1;
4758 cum->words += rs6000_arg_size (mode, type);
4763 int n_words = rs6000_arg_size (mode, type);
4764 int gregno = cum->sysv_gregno;
4766 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4767 (r7,r8) or (r9,r10). As does any other 2 word item such
4768 as complex int due to a historical mistake. */
4770 gregno += (1 - gregno) & 1;
4772 /* Multi-reg args are not split between registers and stack. */
4773 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4775 /* Long long and SPE vectors are aligned on the stack.
4776 So are other 2 word items such as complex int due to
4777 a historical mistake. */
4779 cum->words += cum->words & 1;
4780 cum->words += n_words;
4783 /* Note: continuing to accumulate gregno past when we've started
4784 spilling to the stack indicates the fact that we've started
4785 spilling to the stack to expand_builtin_saveregs. */
4786 cum->sysv_gregno = gregno + n_words;
4789 if (TARGET_DEBUG_ARG)
4791 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4792 cum->words, cum->fregno);
4793 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4794 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4795 fprintf (stderr, "mode = %4s, named = %d\n",
4796 GET_MODE_NAME (mode), named);
4801 int n_words = rs6000_arg_size (mode, type);
4802 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4804 /* The simple alignment calculation here works because
4805 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4806 If we ever want to handle alignments larger than 8 bytes for
4807 32-bit or 16 bytes for 64-bit, then we'll need to take into
4808 account the offset to the start of the parm save area. */
4809 align &= cum->words;
4810 cum->words += align + n_words;
4812 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4813 && TARGET_HARD_FLOAT && TARGET_FPRS)
4814 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4816 if (TARGET_DEBUG_ARG)
4818 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4819 cum->words, cum->fregno);
4820 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4821 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4822 fprintf (stderr, "named = %d, align = %d\n", named, align);
4827 /* Determine where to put a SIMD argument on the SPE. */
4830 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4835 int gregno = cum->sysv_gregno;
4836 int n_words = rs6000_arg_size (mode, type);
4838 /* SPE vectors are put in odd registers. */
4839 if (n_words == 2 && (gregno & 1) == 0)
4842 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4845 enum machine_mode m = SImode;
4847 r1 = gen_rtx_REG (m, gregno);
4848 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4849 r2 = gen_rtx_REG (m, gregno + 1);
4850 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4851 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4858 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4859 return gen_rtx_REG (mode, cum->sysv_gregno);
4865 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4868 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4872 rtx rvec[GP_ARG_NUM_REG + 1];
4874 if (align_words >= GP_ARG_NUM_REG)
4877 n_units = rs6000_arg_size (mode, type);
4879 /* Optimize the simple case where the arg fits in one gpr, except in
4880 the case of BLKmode due to assign_parms assuming that registers are
4881 BITS_PER_WORD wide. */
4883 || (n_units == 1 && mode != BLKmode))
4884 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4887 if (align_words + n_units > GP_ARG_NUM_REG)
4888 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4889 using a magic NULL_RTX component.
4890 FIXME: This is not strictly correct. Only some of the arg
4891 belongs in memory, not all of it. However, there isn't any way
4892 to do this currently, apart from building rtx descriptions for
4893 the pieces of memory we want stored. Due to bugs in the generic
4894 code we can't use the normal function_arg_partial_nregs scheme
4895 with the PARALLEL arg description we emit here.
4896 In any case, the code to store the whole arg to memory is often
4897 more efficient than code to store pieces, and we know that space
4898 is available in the right place for the whole arg. */
4899 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4904 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4905 rtx off = GEN_INT (i++ * 4);
4906 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4908 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4910 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4913 /* Determine where to put an argument to a function.
4914 Value is zero to push the argument on the stack,
4915 or a hard register in which to store the argument.
4917 MODE is the argument's machine mode.
4918 TYPE is the data type of the argument (as a tree).
4919 This is null for libcalls where that information may
4921 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4922 the preceding args and about the function being called.
4923 NAMED is nonzero if this argument is a named parameter
4924 (otherwise it is an extra parameter matching an ellipsis).
4926 On RS/6000 the first eight words of non-FP are normally in registers
4927 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4928 Under V.4, the first 8 FP args are in registers.
4930 If this is floating-point and no prototype is specified, we use
4931 both an FP and integer register (or possibly FP reg and stack). Library
4932 functions (when CALL_LIBCALL is set) always have the proper types for args,
4933 so we can pass the FP value just in one register. emit_library_function
4934 doesn't support PARALLEL anyway.
4936 Note that for args passed by reference, function_arg will be called
4937 with MODE and TYPE set to that of the pointer to the arg, not the arg
4941 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4942 tree type, int named)
4944 enum rs6000_abi abi = DEFAULT_ABI;
4946 /* Return a marker to indicate whether CR1 needs to set or clear the
4947 bit that V.4 uses to say fp args were passed in registers.
4948 Assume that we don't need the marker for software floating point,
4949 or compiler generated library calls. */
4950 if (mode == VOIDmode)
4953 && cum->nargs_prototype < 0
4954 && (cum->call_cookie & CALL_LIBCALL) == 0
4955 && (cum->prototype || TARGET_NO_PROTOTYPE))
4957 /* For the SPE, we need to crxor CR6 always. */
4959 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4960 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4961 return GEN_INT (cum->call_cookie
4962 | ((cum->fregno == FP_ARG_MIN_REG)
4963 ? CALL_V4_SET_FP_ARGS
4964 : CALL_V4_CLEAR_FP_ARGS));
4967 return GEN_INT (cum->call_cookie);
4970 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4971 if (TARGET_64BIT && ! cum->prototype)
4973 /* Vector parameters get passed in vector register
4974 and also in GPRs or memory, in absence of prototype. */
4977 align_words = (cum->words + 1) & ~1;
4979 if (align_words >= GP_ARG_NUM_REG)
4985 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4987 return gen_rtx_PARALLEL (mode,
4989 gen_rtx_EXPR_LIST (VOIDmode,
4991 gen_rtx_EXPR_LIST (VOIDmode,
4992 gen_rtx_REG (mode, cum->vregno),
4996 return gen_rtx_REG (mode, cum->vregno);
4997 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4999 if (named || abi == ABI_V4)
5003 /* Vector parameters to varargs functions under AIX or Darwin
5004 get passed in memory and possibly also in GPRs. */
5005 int align, align_words, n_words;
5006 enum machine_mode part_mode;
5008 /* Vector parameters must be 16-byte aligned. This places them at
5009 2 mod 4 in terms of words in 32-bit mode, since the parameter
5010 save area starts at offset 24 from the stack. In 64-bit mode,
5011 they just have to start on an even word, since the parameter
5012 save area is 16-byte aligned. */
5014 align = (2 - cum->words) & 3;
5016 align = cum->words & 1;
5017 align_words = cum->words + align;
5019 /* Out of registers? Memory, then. */
5020 if (align_words >= GP_ARG_NUM_REG)
5023 if (TARGET_32BIT && TARGET_POWERPC64)
5024 return rs6000_mixed_function_arg (mode, type, align_words);
5026 /* The vector value goes in GPRs. Only the part of the
5027 value in GPRs is reported here. */
5029 n_words = rs6000_arg_size (mode, type);
5030 if (align_words + n_words > GP_ARG_NUM_REG)
5031 /* Fortunately, there are only two possibilities, the value
5032 is either wholly in GPRs or half in GPRs and half not. */
5035 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5038 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
5039 return rs6000_spe_function_arg (cum, mode, type);
5040 else if (abi == ABI_V4)
5042 if (TARGET_HARD_FLOAT && TARGET_FPRS
5043 && (mode == SFmode || mode == DFmode))
5045 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5046 return gen_rtx_REG (mode, cum->fregno);
5052 int n_words = rs6000_arg_size (mode, type);
5053 int gregno = cum->sysv_gregno;
5055 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5056 (r7,r8) or (r9,r10). As does any other 2 word item such
5057 as complex int due to a historical mistake. */
5059 gregno += (1 - gregno) & 1;
5061 /* Multi-reg args are not split between registers and stack. */
5062 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5065 if (TARGET_32BIT && TARGET_POWERPC64)
5066 return rs6000_mixed_function_arg (mode, type,
5067 gregno - GP_ARG_MIN_REG);
5068 return gen_rtx_REG (mode, gregno);
5073 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5074 int align_words = cum->words + (cum->words & align);
5076 if (USE_FP_FOR_ARG_P (cum, mode, type))
5078 rtx rvec[GP_ARG_NUM_REG + 1];
5082 enum machine_mode fmode = mode;
5083 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5085 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5087 /* Currently, we only ever need one reg here because complex
5088 doubles are split. */
5089 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
5092 /* Long double split over regs and memory. */
5096 /* Do we also need to pass this arg in the parameter save
5099 && (cum->nargs_prototype <= 0
5100 || (DEFAULT_ABI == ABI_AIX
5102 && align_words >= GP_ARG_NUM_REG)));
5104 if (!needs_psave && mode == fmode)
5105 return gen_rtx_REG (fmode, cum->fregno);
5110 /* Describe the part that goes in gprs or the stack.
5111 This piece must come first, before the fprs. */
5112 if (align_words < GP_ARG_NUM_REG)
5114 unsigned long n_words = rs6000_arg_size (mode, type);
5116 if (align_words + n_words > GP_ARG_NUM_REG
5117 || (TARGET_32BIT && TARGET_POWERPC64))
5119 /* If this is partially on the stack, then we only
5120 include the portion actually in registers here. */
5121 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5125 r = gen_rtx_REG (rmode,
5126 GP_ARG_MIN_REG + align_words);
5127 off = GEN_INT (k * GET_MODE_SIZE (rmode));
5128 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5130 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5134 /* The whole arg fits in gprs. */
5135 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5136 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5140 /* It's entirely in memory. */
5141 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5144 /* Describe where this piece goes in the fprs. */
5145 r = gen_rtx_REG (fmode, cum->fregno);
5146 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5148 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5150 else if (align_words < GP_ARG_NUM_REG)
5152 if (TARGET_32BIT && TARGET_POWERPC64)
5153 return rs6000_mixed_function_arg (mode, type, align_words);
5155 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5162 /* For an arg passed partly in registers and partly in memory, this is
5163 the number of registers used. For args passed entirely in registers
5164 or entirely in memory, zero. When an arg is described by a PARALLEL,
5165 perhaps using more than one register type, this function returns the
5166 number of registers used by the first element of the PARALLEL. */
5169 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5170 tree type, int named)
5177 if (DEFAULT_ABI == ABI_V4)
5180 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5181 && cum->nargs_prototype >= 0)
5184 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5185 parm_offset = TARGET_32BIT ? 2 : 0;
5186 align_words = cum->words + ((parm_offset - cum->words) & align);
5188 if (USE_FP_FOR_ARG_P (cum, mode, type)
5189 /* If we are passing this arg in gprs as well, then this function
5190 should return the number of gprs (or memory) partially passed,
5191 *not* the number of fprs. */
5193 && (cum->nargs_prototype <= 0
5194 || (DEFAULT_ABI == ABI_AIX
5196 && align_words >= GP_ARG_NUM_REG))))
5198 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5199 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
5200 else if (cum->nargs_prototype >= 0)
5204 if (align_words < GP_ARG_NUM_REG
5205 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5206 ret = GP_ARG_NUM_REG - align_words;
5208 if (ret != 0 && TARGET_DEBUG_ARG)
5209 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
5214 /* A C expression that indicates when an argument must be passed by
5215 reference. If nonzero for an argument, a copy of that argument is
5216 made in memory and a pointer to the argument is passed instead of
5217 the argument itself. The pointer is passed in whatever way is
5218 appropriate for passing a pointer to that type.
5220 Under V.4, aggregates and long double are passed by reference.
5222 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5223 reference unless the AltiVec vector extension ABI is in force.
5225 As an extension to all ABIs, variable sized types are passed by
5229 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5230 enum machine_mode mode ATTRIBUTE_UNUSED,
5231 tree type, bool named ATTRIBUTE_UNUSED)
5233 if ((DEFAULT_ABI == ABI_V4
5234 && ((type && AGGREGATE_TYPE_P (type))
5236 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5237 || (type && int_size_in_bytes (type) < 0))
5239 if (TARGET_DEBUG_ARG)
5240 fprintf (stderr, "function_arg_pass_by_reference\n");
5248 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5251 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5256 for (i = 0; i < nregs; i++)
5258 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
5259 if (reload_completed)
5261 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5264 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5265 i * GET_MODE_SIZE(reg_mode));
5268 tem = replace_equiv_address (tem, XEXP (tem, 0));
5270 if (tem == NULL_RTX)
5273 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5278 /* Perform any needed actions needed for a function that is receiving a
5279 variable number of arguments.
5283 MODE and TYPE are the mode and type of the current parameter.
5285 PRETEND_SIZE is a variable that should be set to the amount of stack
5286 that must be pushed by the prolog to pretend that our caller pushed
5289 Normally, this macro will push all remaining incoming registers on the
5290 stack and set PRETEND_SIZE to the length of the registers pushed. */
5293 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5294 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
5296 CUMULATIVE_ARGS next_cum;
5297 int reg_size = TARGET_32BIT ? 4 : 8;
5298 rtx save_area = NULL_RTX, mem;
5299 int first_reg_offset, set;
5301 /* Skip the last named argument. */
5303 function_arg_advance (&next_cum, mode, type, 1);
5305 if (DEFAULT_ABI == ABI_V4)
5308 save_area = plus_constant (virtual_stack_vars_rtx,
5309 - RS6000_VARARGS_SIZE);
5311 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5315 first_reg_offset = next_cum.words;
5316 save_area = virtual_incoming_args_rtx;
5318 if (targetm.calls.must_pass_in_stack (mode, type))
5319 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5322 set = get_varargs_alias_set ();
5323 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
5325 mem = gen_rtx_MEM (BLKmode,
5326 plus_constant (save_area,
5327 first_reg_offset * reg_size)),
5328 set_mem_alias_set (mem, set);
5329 set_mem_align (mem, BITS_PER_WORD);
5331 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5332 GP_ARG_NUM_REG - first_reg_offset);
5335 /* Save FP registers if needed. */
5336 if (DEFAULT_ABI == ABI_V4
5337 && TARGET_HARD_FLOAT && TARGET_FPRS
5339 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
5341 int fregno = next_cum.fregno;
5342 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5343 rtx lab = gen_label_rtx ();
5344 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5346 emit_jump_insn (gen_rtx_SET (VOIDmode,
5348 gen_rtx_IF_THEN_ELSE (VOIDmode,
5349 gen_rtx_NE (VOIDmode, cr1,
5351 gen_rtx_LABEL_REF (VOIDmode, lab),
5354 while (fregno <= FP_ARG_V4_MAX_REG)
5356 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5357 set_mem_alias_set (mem, set);
5358 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5367 /* Create the va_list data type. */
5370 rs6000_build_builtin_va_list (void)
5372 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5374 /* For AIX, prefer 'char *' because that's what the system
5375 header files like. */
5376 if (DEFAULT_ABI != ABI_V4)
5377 return build_pointer_type (char_type_node);
5379 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5380 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5382 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5383 unsigned_char_type_node);
5384 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5385 unsigned_char_type_node);
5386 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5388 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5389 short_unsigned_type_node);
5390 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5392 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5395 DECL_FIELD_CONTEXT (f_gpr) = record;
5396 DECL_FIELD_CONTEXT (f_fpr) = record;
5397 DECL_FIELD_CONTEXT (f_res) = record;
5398 DECL_FIELD_CONTEXT (f_ovf) = record;
5399 DECL_FIELD_CONTEXT (f_sav) = record;
5401 TREE_CHAIN (record) = type_decl;
5402 TYPE_NAME (record) = type_decl;
5403 TYPE_FIELDS (record) = f_gpr;
5404 TREE_CHAIN (f_gpr) = f_fpr;
5405 TREE_CHAIN (f_fpr) = f_res;
5406 TREE_CHAIN (f_res) = f_ovf;
5407 TREE_CHAIN (f_ovf) = f_sav;
5409 layout_type (record);
5411 /* The correct type is an array type of one element. */
5412 return build_array_type (record, build_index_type (size_zero_node));
5415 /* Implement va_start. */
5418 rs6000_va_start (tree valist, rtx nextarg)
5420 HOST_WIDE_INT words, n_gpr, n_fpr;
5421 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5422 tree gpr, fpr, ovf, sav, t;
5424 /* Only SVR4 needs something special. */
5425 if (DEFAULT_ABI != ABI_V4)
5427 std_expand_builtin_va_start (valist, nextarg);
5431 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5432 f_fpr = TREE_CHAIN (f_gpr);
5433 f_res = TREE_CHAIN (f_fpr);
5434 f_ovf = TREE_CHAIN (f_res);
5435 f_sav = TREE_CHAIN (f_ovf);
5437 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5438 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5439 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5440 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5441 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5443 /* Count number of gp and fp argument registers used. */
5444 words = current_function_args_info.words;
5445 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5446 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5448 if (TARGET_DEBUG_ARG)
5449 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5450 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5451 words, n_gpr, n_fpr);
5453 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5454 build_int_cst (NULL_TREE, n_gpr));
5455 TREE_SIDE_EFFECTS (t) = 1;
5456 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5458 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5459 build_int_cst (NULL_TREE, n_fpr));
5460 TREE_SIDE_EFFECTS (t) = 1;
5461 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5463 /* Find the overflow area. */
5464 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5466 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5467 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5468 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5469 TREE_SIDE_EFFECTS (t) = 1;
5470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5472 /* Find the register save area. */
5473 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5474 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5475 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE));
5476 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5477 TREE_SIDE_EFFECTS (t) = 1;
5478 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5481 /* Implement va_arg. */
5484 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5486 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5487 tree gpr, fpr, ovf, sav, reg, t, u;
5488 int size, rsize, n_reg, sav_ofs, sav_scale;
5489 tree lab_false, lab_over, addr;
5491 tree ptrtype = build_pointer_type (type);
5493 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5495 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5496 return build_fold_indirect_ref (t);
5499 if (DEFAULT_ABI != ABI_V4)
5501 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5503 tree elem_type = TREE_TYPE (type);
5504 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5505 int elem_size = GET_MODE_SIZE (elem_mode);
5507 if (elem_size < UNITS_PER_WORD)
5509 tree real_part, imag_part;
5510 tree post = NULL_TREE;
5512 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5514 /* Copy the value into a temporary, lest the formal temporary
5515 be reused out from under us. */
5516 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5517 append_to_statement_list (post, pre_p);
5519 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5522 return build (COMPLEX_EXPR, type, real_part, imag_part);
5526 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5529 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5530 f_fpr = TREE_CHAIN (f_gpr);
5531 f_res = TREE_CHAIN (f_fpr);
5532 f_ovf = TREE_CHAIN (f_res);
5533 f_sav = TREE_CHAIN (f_ovf);
5535 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5536 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5537 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5538 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5539 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5541 size = int_size_in_bytes (type);
5542 rsize = (size + 3) / 4;
5545 if (TARGET_HARD_FLOAT && TARGET_FPRS
5546 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5548 /* FP args go in FP registers, if present. */
5553 if (TYPE_MODE (type) == DFmode)
5558 /* Otherwise into GP registers. */
5567 /* Pull the value out of the saved registers.... */
5570 addr = create_tmp_var (ptr_type_node, "addr");
5571 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5573 /* AltiVec vectors never go in registers when -mabi=altivec. */
5574 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5578 lab_false = create_artificial_label ();
5579 lab_over = create_artificial_label ();
5581 /* Long long and SPE vectors are aligned in the registers.
5582 As are any other 2 gpr item such as complex int due to a
5583 historical mistake. */
5587 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5588 size_int (n_reg - 1));
5589 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5592 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5593 t = build2 (GE_EXPR, boolean_type_node, u, t);
5594 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5595 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5596 gimplify_and_add (t, pre_p);
5600 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5602 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5603 u = build1 (CONVERT_EXPR, integer_type_node, u);
5604 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5605 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5607 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5608 gimplify_and_add (t, pre_p);
5610 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5611 gimplify_and_add (t, pre_p);
5613 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5614 append_to_statement_list (t, pre_p);
5618 /* Ensure that we don't find any more args in regs.
5619 Alignment has taken care of the n_reg == 2 case. */
5620 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5621 gimplify_and_add (t, pre_p);
5625 /* ... otherwise out of the overflow area. */
5627 /* Care for on-stack alignment if needed. */
5631 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5632 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5633 build_int_cst (NULL_TREE, -align));
5635 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5637 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5638 gimplify_and_add (u, pre_p);
5640 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5641 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5642 gimplify_and_add (t, pre_p);
5646 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5647 append_to_statement_list (t, pre_p);
5650 addr = fold_convert (ptrtype, addr);
5651 return build_fold_indirect_ref (addr);
5656 #define def_builtin(MASK, NAME, TYPE, CODE) \
5658 if ((MASK) & target_flags) \
5659 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5663 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5665 static const struct builtin_description bdesc_3arg[] =
5667 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5668 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5673 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5675 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5685 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5686 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5687 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5688 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5689 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5692 /* DST operations: void foo (void *, const int, const char). */
5694 static const struct builtin_description bdesc_dst[] =
5696 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5697 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5698 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5699 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5702 /* Simple binary operations: VECc = foo (VECa, VECb). */
5704 static struct builtin_description bdesc_2arg[] =
5706 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5707 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5708 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5709 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5710 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5711 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5712 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5713 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5714 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5715 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5716 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5717 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5718 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5719 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5720 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5721 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5722 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5723 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5724 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5725 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5726 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5727 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5728 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5729 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5730 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5731 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5732 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5733 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5734 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5735 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5736 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5737 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5738 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5739 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5740 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5741 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5742 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5743 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5744 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5745 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5746 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5747 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5748 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5749 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5750 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5751 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5752 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5753 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5754 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5755 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5756 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5757 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5758 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5759 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5760 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5761 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5762 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5763 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5764 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5765 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5766 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5767 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5768 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5769 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5770 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5771 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5772 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5773 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5774 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5775 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5776 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5777 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5778 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5779 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5780 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5781 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5782 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5783 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5784 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5785 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5786 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5787 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5788 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5789 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5790 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5791 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5792 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5793 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5794 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5795 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5796 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5797 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5798 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5799 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5800 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5801 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5802 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5803 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5804 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5805 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5806 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5807 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5808 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5809 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5810 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5811 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5812 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5813 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5814 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5815 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5816 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5817 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5818 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5820 /* Place holder, leave as first spe builtin. */
5821 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5822 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5823 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5824 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5825 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5826 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5827 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5828 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5829 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5830 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5831 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5832 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5833 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5834 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5835 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5836 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5837 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5838 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5839 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5840 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5841 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5842 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5843 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5844 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5845 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5846 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5847 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5848 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5849 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5850 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5851 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5852 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5853 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5854 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5855 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5856 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5857 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5858 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5859 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5860 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5861 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5862 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5863 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5864 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5865 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5866 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5867 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5868 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5869 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5870 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5871 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5872 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5873 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5874 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5875 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5876 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5877 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5878 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5879 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5880 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5881 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5882 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5883 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5884 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5885 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5886 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5887 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5888 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5889 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5890 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5891 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5892 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5893 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5894 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5895 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5896 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5897 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5898 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5899 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5900 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5901 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5902 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5903 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5904 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5905 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5906 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5907 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5908 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5909 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5910 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5911 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5912 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5913 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5914 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5915 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5916 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5917 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5918 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5919 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5920 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5921 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5922 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5923 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5924 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5925 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5926 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5927 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5928 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5929 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5931 /* SPE binary operations expecting a 5-bit unsigned literal. */
5932 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5934 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5935 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5936 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5937 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5938 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5939 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5940 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5941 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5942 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5943 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5944 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5945 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5946 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5947 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5948 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5949 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5950 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5951 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5952 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5953 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5954 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5955 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5956 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5957 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5958 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5959 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5961 /* Place-holder. Leave as last binary SPE builtin. */
5962 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5965 /* AltiVec predicates. */
5967 struct builtin_description_predicates
5969 const unsigned int mask;
5970 const enum insn_code icode;
5972 const char *const name;
5973 const enum rs6000_builtins code;
5976 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5978 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5982 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5983 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5984 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5985 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5986 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5988 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5989 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5990 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5993 /* SPE predicates. */
5994 static struct builtin_description bdesc_spe_predicates[] =
5996 /* Place-holder. Leave as first. */
5997 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5998 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5999 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6000 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6001 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6002 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6003 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6004 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6005 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6006 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6007 /* Place-holder. Leave as last. */
6008 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6011 /* SPE evsel predicates. */
6012 static struct builtin_description bdesc_spe_evsel[] =
6014 /* Place-holder. Leave as first. */
6015 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6016 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6017 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6018 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6019 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6020 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6021 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6022 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6023 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6024 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6025 /* Place-holder. Leave as last. */
6026 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6029 /* ABS* operations. */
6031 static const struct builtin_description bdesc_abs[] =
6033 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6034 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6035 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6036 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6037 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6039 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6042 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6045 static struct builtin_description bdesc_1arg[] =
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6049 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6051 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6052 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6053 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6054 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6055 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6056 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6057 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6058 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6059 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6060 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6061 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6062 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6063 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6065 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6066 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6067 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6068 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6069 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6070 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6071 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6072 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6073 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6074 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6075 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6076 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6077 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6078 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6079 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6080 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6081 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6082 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6083 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6084 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6085 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6086 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6087 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6088 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6089 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6090 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6091 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6092 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6093 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6094 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6096 /* Place-holder. Leave as last unary SPE builtin. */
6097 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
6101 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6104 tree arg0 = TREE_VALUE (arglist);
6105 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6106 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6107 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6109 if (icode == CODE_FOR_nothing)
6110 /* Builtin not supported on this processor. */
6113 /* If we got invalid arguments bail out before generating bad rtl. */
6114 if (arg0 == error_mark_node)
6117 if (icode == CODE_FOR_altivec_vspltisb
6118 || icode == CODE_FOR_altivec_vspltish
6119 || icode == CODE_FOR_altivec_vspltisw
6120 || icode == CODE_FOR_spe_evsplatfi
6121 || icode == CODE_FOR_spe_evsplati)
6123 /* Only allow 5-bit *signed* literals. */
6124 if (GET_CODE (op0) != CONST_INT
6125 || INTVAL (op0) > 0x1f
6126 || INTVAL (op0) < -0x1f)
6128 error ("argument 1 must be a 5-bit signed literal");
6134 || GET_MODE (target) != tmode
6135 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6136 target = gen_reg_rtx (tmode);
6138 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6139 op0 = copy_to_mode_reg (mode0, op0);
6141 pat = GEN_FCN (icode) (target, op0);
6150 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6152 rtx pat, scratch1, scratch2;
6153 tree arg0 = TREE_VALUE (arglist);
6154 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6155 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6156 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6158 /* If we have invalid arguments, bail out before generating bad rtl. */
6159 if (arg0 == error_mark_node)
6163 || GET_MODE (target) != tmode
6164 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6165 target = gen_reg_rtx (tmode);
6167 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6168 op0 = copy_to_mode_reg (mode0, op0);
6170 scratch1 = gen_reg_rtx (mode0);
6171 scratch2 = gen_reg_rtx (mode0);
6173 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6182 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6185 tree arg0 = TREE_VALUE (arglist);
6186 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6187 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6188 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6189 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6190 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6191 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6193 if (icode == CODE_FOR_nothing)
6194 /* Builtin not supported on this processor. */
6197 /* If we got invalid arguments bail out before generating bad rtl. */
6198 if (arg0 == error_mark_node || arg1 == error_mark_node)
6201 if (icode == CODE_FOR_altivec_vcfux
6202 || icode == CODE_FOR_altivec_vcfsx
6203 || icode == CODE_FOR_altivec_vctsxs
6204 || icode == CODE_FOR_altivec_vctuxs
6205 || icode == CODE_FOR_altivec_vspltb
6206 || icode == CODE_FOR_altivec_vsplth
6207 || icode == CODE_FOR_altivec_vspltw
6208 || icode == CODE_FOR_spe_evaddiw
6209 || icode == CODE_FOR_spe_evldd
6210 || icode == CODE_FOR_spe_evldh
6211 || icode == CODE_FOR_spe_evldw
6212 || icode == CODE_FOR_spe_evlhhesplat
6213 || icode == CODE_FOR_spe_evlhhossplat
6214 || icode == CODE_FOR_spe_evlhhousplat
6215 || icode == CODE_FOR_spe_evlwhe
6216 || icode == CODE_FOR_spe_evlwhos
6217 || icode == CODE_FOR_spe_evlwhou
6218 || icode == CODE_FOR_spe_evlwhsplat
6219 || icode == CODE_FOR_spe_evlwwsplat
6220 || icode == CODE_FOR_spe_evrlwi
6221 || icode == CODE_FOR_spe_evslwi
6222 || icode == CODE_FOR_spe_evsrwis
6223 || icode == CODE_FOR_spe_evsubifw
6224 || icode == CODE_FOR_spe_evsrwiu)
6226 /* Only allow 5-bit unsigned literals. */
6228 if (TREE_CODE (arg1) != INTEGER_CST
6229 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6231 error ("argument 2 must be a 5-bit unsigned literal");
6237 || GET_MODE (target) != tmode
6238 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6239 target = gen_reg_rtx (tmode);
6241 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6242 op0 = copy_to_mode_reg (mode0, op0);
6243 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6244 op1 = copy_to_mode_reg (mode1, op1);
6246 pat = GEN_FCN (icode) (target, op0, op1);
6255 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6256 tree arglist, rtx target)
6259 tree cr6_form = TREE_VALUE (arglist);
6260 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6261 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6262 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6263 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6264 enum machine_mode tmode = SImode;
6265 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6266 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6269 if (TREE_CODE (cr6_form) != INTEGER_CST)
6271 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6275 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6280 /* If we have invalid arguments, bail out before generating bad rtl. */
6281 if (arg0 == error_mark_node || arg1 == error_mark_node)
6285 || GET_MODE (target) != tmode
6286 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6287 target = gen_reg_rtx (tmode);
6289 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6290 op0 = copy_to_mode_reg (mode0, op0);
6291 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6292 op1 = copy_to_mode_reg (mode1, op1);
6294 scratch = gen_reg_rtx (mode0);
6296 pat = GEN_FCN (icode) (scratch, op0, op1,
6297 gen_rtx_SYMBOL_REF (Pmode, opcode));
6302 /* The vec_any* and vec_all* predicates use the same opcodes for two
6303 different operations, but the bits in CR6 will be different
6304 depending on what information we want. So we have to play tricks
6305 with CR6 to get the right bits out.
6307 If you think this is disgusting, look at the specs for the
6308 AltiVec predicates. */
6310 switch (cr6_form_int)
6313 emit_insn (gen_cr6_test_for_zero (target));
6316 emit_insn (gen_cr6_test_for_zero_reverse (target));
6319 emit_insn (gen_cr6_test_for_lt (target));
6322 emit_insn (gen_cr6_test_for_lt_reverse (target));
6325 error ("argument 1 of __builtin_altivec_predicate is out of range");
6333 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6336 tree arg0 = TREE_VALUE (arglist);
6337 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6338 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6339 enum machine_mode mode0 = Pmode;
6340 enum machine_mode mode1 = Pmode;
6341 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6342 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6344 if (icode == CODE_FOR_nothing)
6345 /* Builtin not supported on this processor. */
6348 /* If we got invalid arguments bail out before generating bad rtl. */
6349 if (arg0 == error_mark_node || arg1 == error_mark_node)
6353 || GET_MODE (target) != tmode
6354 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6355 target = gen_reg_rtx (tmode);
6357 op1 = copy_to_mode_reg (mode1, op1);
6359 if (op0 == const0_rtx)
6361 addr = gen_rtx_MEM (tmode, op1);
6365 op0 = copy_to_mode_reg (mode0, op0);
6366 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6369 pat = GEN_FCN (icode) (target, addr);
6379 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6381 tree arg0 = TREE_VALUE (arglist);
6382 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6383 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6384 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6385 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6386 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6388 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6389 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6390 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6392 /* Invalid arguments. Bail before doing anything stoopid! */
6393 if (arg0 == error_mark_node
6394 || arg1 == error_mark_node
6395 || arg2 == error_mark_node)
6398 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6399 op0 = copy_to_mode_reg (mode2, op0);
6400 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6401 op1 = copy_to_mode_reg (mode0, op1);
6402 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6403 op2 = copy_to_mode_reg (mode1, op2);
6405 pat = GEN_FCN (icode) (op1, op2, op0);
6412 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6414 tree arg0 = TREE_VALUE (arglist);
6415 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6416 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6417 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6418 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6419 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6421 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6422 enum machine_mode mode1 = Pmode;
6423 enum machine_mode mode2 = Pmode;
6425 /* Invalid arguments. Bail before doing anything stoopid! */
6426 if (arg0 == error_mark_node
6427 || arg1 == error_mark_node
6428 || arg2 == error_mark_node)
6431 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6432 op0 = copy_to_mode_reg (tmode, op0);
6434 op2 = copy_to_mode_reg (mode2, op2);
6436 if (op1 == const0_rtx)
6438 addr = gen_rtx_MEM (tmode, op2);
6442 op1 = copy_to_mode_reg (mode1, op1);
6443 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6446 pat = GEN_FCN (icode) (addr, op0);
6453 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6456 tree arg0 = TREE_VALUE (arglist);
6457 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6458 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6459 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6460 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6461 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6462 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6463 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6464 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6465 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6467 if (icode == CODE_FOR_nothing)
6468 /* Builtin not supported on this processor. */
6471 /* If we got invalid arguments bail out before generating bad rtl. */
6472 if (arg0 == error_mark_node
6473 || arg1 == error_mark_node
6474 || arg2 == error_mark_node)
6477 if (icode == CODE_FOR_altivec_vsldoi_4sf
6478 || icode == CODE_FOR_altivec_vsldoi_4si
6479 || icode == CODE_FOR_altivec_vsldoi_8hi
6480 || icode == CODE_FOR_altivec_vsldoi_16qi)
6482 /* Only allow 4-bit unsigned literals. */
6484 if (TREE_CODE (arg2) != INTEGER_CST
6485 || TREE_INT_CST_LOW (arg2) & ~0xf)
6487 error ("argument 3 must be a 4-bit unsigned literal");
6493 || GET_MODE (target) != tmode
6494 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6495 target = gen_reg_rtx (tmode);
6497 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6498 op0 = copy_to_mode_reg (mode0, op0);
6499 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6500 op1 = copy_to_mode_reg (mode1, op1);
6501 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6502 op2 = copy_to_mode_reg (mode2, op2);
6504 pat = GEN_FCN (icode) (target, op0, op1, op2);
6512 /* Expand the lvx builtins. */
6514 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6516 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6517 tree arglist = TREE_OPERAND (exp, 1);
6518 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6520 enum machine_mode tmode, mode0;
6522 enum insn_code icode;
6526 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6527 icode = CODE_FOR_altivec_lvx_16qi;
6529 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6530 icode = CODE_FOR_altivec_lvx_8hi;
6532 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6533 icode = CODE_FOR_altivec_lvx_4si;
6535 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6536 icode = CODE_FOR_altivec_lvx_4sf;
6545 arg0 = TREE_VALUE (arglist);
6546 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6547 tmode = insn_data[icode].operand[0].mode;
6548 mode0 = insn_data[icode].operand[1].mode;
6551 || GET_MODE (target) != tmode
6552 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6553 target = gen_reg_rtx (tmode);
6555 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6556 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6558 pat = GEN_FCN (icode) (target, op0);
6565 /* Expand the stvx builtins. */
6567 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6570 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6571 tree arglist = TREE_OPERAND (exp, 1);
6572 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6574 enum machine_mode mode0, mode1;
6576 enum insn_code icode;
6580 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6581 icode = CODE_FOR_altivec_stvx_16qi;
6583 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6584 icode = CODE_FOR_altivec_stvx_8hi;
6586 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6587 icode = CODE_FOR_altivec_stvx_4si;
6589 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6590 icode = CODE_FOR_altivec_stvx_4sf;
6597 arg0 = TREE_VALUE (arglist);
6598 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6599 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6600 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6601 mode0 = insn_data[icode].operand[0].mode;
6602 mode1 = insn_data[icode].operand[1].mode;
6604 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6605 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6606 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6607 op1 = copy_to_mode_reg (mode1, op1);
6609 pat = GEN_FCN (icode) (op0, op1);
6617 /* Expand the dst builtins. */
6619 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6622 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6623 tree arglist = TREE_OPERAND (exp, 1);
6624 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6625 tree arg0, arg1, arg2;
6626 enum machine_mode mode0, mode1, mode2;
6627 rtx pat, op0, op1, op2;
6628 struct builtin_description *d;
6633 /* Handle DST variants. */
6634 d = (struct builtin_description *) bdesc_dst;
6635 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6636 if (d->code == fcode)
6638 arg0 = TREE_VALUE (arglist);
6639 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6640 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6641 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6642 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6643 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6644 mode0 = insn_data[d->icode].operand[0].mode;
6645 mode1 = insn_data[d->icode].operand[1].mode;
6646 mode2 = insn_data[d->icode].operand[2].mode;
6648 /* Invalid arguments, bail out before generating bad rtl. */
6649 if (arg0 == error_mark_node
6650 || arg1 == error_mark_node
6651 || arg2 == error_mark_node)
6656 if (TREE_CODE (arg2) != INTEGER_CST
6657 || TREE_INT_CST_LOW (arg2) & ~0x3)
6659 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6663 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6664 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6665 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6666 op1 = copy_to_mode_reg (mode1, op1);
6668 pat = GEN_FCN (d->icode) (op0, op1, op2);
6678 /* Expand the builtin in EXP and store the result in TARGET. Store
6679 true in *EXPANDEDP if we found a builtin to expand. */
6681 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6683 struct builtin_description *d;
6684 struct builtin_description_predicates *dp;
6686 enum insn_code icode;
6687 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6688 tree arglist = TREE_OPERAND (exp, 1);
6691 enum machine_mode tmode, mode0;
6692 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6694 target = altivec_expand_ld_builtin (exp, target, expandedp);
6698 target = altivec_expand_st_builtin (exp, target, expandedp);
6702 target = altivec_expand_dst_builtin (exp, target, expandedp);
6710 case ALTIVEC_BUILTIN_STVX:
6711 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6712 case ALTIVEC_BUILTIN_STVEBX:
6713 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6714 case ALTIVEC_BUILTIN_STVEHX:
6715 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6716 case ALTIVEC_BUILTIN_STVEWX:
6717 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6718 case ALTIVEC_BUILTIN_STVXL:
6719 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6721 case ALTIVEC_BUILTIN_MFVSCR:
6722 icode = CODE_FOR_altivec_mfvscr;
6723 tmode = insn_data[icode].operand[0].mode;
6726 || GET_MODE (target) != tmode
6727 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6728 target = gen_reg_rtx (tmode);
6730 pat = GEN_FCN (icode) (target);
6736 case ALTIVEC_BUILTIN_MTVSCR:
6737 icode = CODE_FOR_altivec_mtvscr;
6738 arg0 = TREE_VALUE (arglist);
6739 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6740 mode0 = insn_data[icode].operand[0].mode;
6742 /* If we got invalid arguments bail out before generating bad rtl. */
6743 if (arg0 == error_mark_node)
6746 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6747 op0 = copy_to_mode_reg (mode0, op0);
6749 pat = GEN_FCN (icode) (op0);
6754 case ALTIVEC_BUILTIN_DSSALL:
6755 emit_insn (gen_altivec_dssall ());
6758 case ALTIVEC_BUILTIN_DSS:
6759 icode = CODE_FOR_altivec_dss;
6760 arg0 = TREE_VALUE (arglist);
6762 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6763 mode0 = insn_data[icode].operand[0].mode;
6765 /* If we got invalid arguments bail out before generating bad rtl. */
6766 if (arg0 == error_mark_node)
6769 if (TREE_CODE (arg0) != INTEGER_CST
6770 || TREE_INT_CST_LOW (arg0) & ~0x3)
6772 error ("argument to dss must be a 2-bit unsigned literal");
6776 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6777 op0 = copy_to_mode_reg (mode0, op0);
6779 emit_insn (gen_altivec_dss (op0));
6782 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6783 arg0 = TREE_VALUE (arglist);
6784 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR
6785 || TREE_CODE (arg0) == ARRAY_REF)
6786 arg0 = TREE_OPERAND (arg0, 0);
6787 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6788 TREE_STRING_POINTER (arg0));
6793 /* Expand abs* operations. */
6794 d = (struct builtin_description *) bdesc_abs;
6795 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6796 if (d->code == fcode)
6797 return altivec_expand_abs_builtin (d->icode, arglist, target);
6799 /* Expand the AltiVec predicates. */
6800 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6801 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6802 if (dp->code == fcode)
6803 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6805 /* LV* are funky. We initialized them differently. */
6808 case ALTIVEC_BUILTIN_LVSL:
6809 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6811 case ALTIVEC_BUILTIN_LVSR:
6812 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6814 case ALTIVEC_BUILTIN_LVEBX:
6815 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6817 case ALTIVEC_BUILTIN_LVEHX:
6818 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6820 case ALTIVEC_BUILTIN_LVEWX:
6821 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6823 case ALTIVEC_BUILTIN_LVXL:
6824 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6826 case ALTIVEC_BUILTIN_LVX:
6827 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6838 /* Binops that need to be initialized manually, but can be expanded
6839 automagically by rs6000_expand_binop_builtin. */
6840 static struct builtin_description bdesc_2arg_spe[] =
6842 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6843 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6844 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6845 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6846 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6847 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6848 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6849 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6850 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6851 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6852 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6853 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6854 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6855 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6856 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6857 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6858 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6859 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6860 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6861 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6862 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6863 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6866 /* Expand the builtin in EXP and store the result in TARGET. Store
6867 true in *EXPANDEDP if we found a builtin to expand.
6869 This expands the SPE builtins that are not simple unary and binary
6872 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6874 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6875 tree arglist = TREE_OPERAND (exp, 1);
6877 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6878 enum insn_code icode;
6879 enum machine_mode tmode, mode0;
6881 struct builtin_description *d;
6886 /* Syntax check for a 5-bit unsigned immediate. */
6889 case SPE_BUILTIN_EVSTDD:
6890 case SPE_BUILTIN_EVSTDH:
6891 case SPE_BUILTIN_EVSTDW:
6892 case SPE_BUILTIN_EVSTWHE:
6893 case SPE_BUILTIN_EVSTWHO:
6894 case SPE_BUILTIN_EVSTWWE:
6895 case SPE_BUILTIN_EVSTWWO:
6896 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6897 if (TREE_CODE (arg1) != INTEGER_CST
6898 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6900 error ("argument 2 must be a 5-bit unsigned literal");
6908 /* The evsplat*i instructions are not quite generic. */
6911 case SPE_BUILTIN_EVSPLATFI:
6912 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6914 case SPE_BUILTIN_EVSPLATI:
6915 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6921 d = (struct builtin_description *) bdesc_2arg_spe;
6922 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6923 if (d->code == fcode)
6924 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6926 d = (struct builtin_description *) bdesc_spe_predicates;
6927 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6928 if (d->code == fcode)
6929 return spe_expand_predicate_builtin (d->icode, arglist, target);
6931 d = (struct builtin_description *) bdesc_spe_evsel;
6932 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6933 if (d->code == fcode)
6934 return spe_expand_evsel_builtin (d->icode, arglist, target);
6938 case SPE_BUILTIN_EVSTDDX:
6939 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6940 case SPE_BUILTIN_EVSTDHX:
6941 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6942 case SPE_BUILTIN_EVSTDWX:
6943 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6944 case SPE_BUILTIN_EVSTWHEX:
6945 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6946 case SPE_BUILTIN_EVSTWHOX:
6947 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6948 case SPE_BUILTIN_EVSTWWEX:
6949 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6950 case SPE_BUILTIN_EVSTWWOX:
6951 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6952 case SPE_BUILTIN_EVSTDD:
6953 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6954 case SPE_BUILTIN_EVSTDH:
6955 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6956 case SPE_BUILTIN_EVSTDW:
6957 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6958 case SPE_BUILTIN_EVSTWHE:
6959 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6960 case SPE_BUILTIN_EVSTWHO:
6961 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6962 case SPE_BUILTIN_EVSTWWE:
6963 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6964 case SPE_BUILTIN_EVSTWWO:
6965 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6966 case SPE_BUILTIN_MFSPEFSCR:
6967 icode = CODE_FOR_spe_mfspefscr;
6968 tmode = insn_data[icode].operand[0].mode;
6971 || GET_MODE (target) != tmode
6972 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6973 target = gen_reg_rtx (tmode);
6975 pat = GEN_FCN (icode) (target);
6980 case SPE_BUILTIN_MTSPEFSCR:
6981 icode = CODE_FOR_spe_mtspefscr;
6982 arg0 = TREE_VALUE (arglist);
6983 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6984 mode0 = insn_data[icode].operand[0].mode;
6986 if (arg0 == error_mark_node)
6989 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6990 op0 = copy_to_mode_reg (mode0, op0);
6992 pat = GEN_FCN (icode) (op0);
7005 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7007 rtx pat, scratch, tmp;
7008 tree form = TREE_VALUE (arglist);
7009 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7010 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7011 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7012 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7013 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7014 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7018 if (TREE_CODE (form) != INTEGER_CST)
7020 error ("argument 1 of __builtin_spe_predicate must be a constant");
7024 form_int = TREE_INT_CST_LOW (form);
7029 if (arg0 == error_mark_node || arg1 == error_mark_node)
7033 || GET_MODE (target) != SImode
7034 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7035 target = gen_reg_rtx (SImode);
7037 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7038 op0 = copy_to_mode_reg (mode0, op0);
7039 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7040 op1 = copy_to_mode_reg (mode1, op1);
7042 scratch = gen_reg_rtx (CCmode);
7044 pat = GEN_FCN (icode) (scratch, op0, op1);
7049 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7050 _lower_. We use one compare, but look in different bits of the
7051 CR for each variant.
7053 There are 2 elements in each SPE simd type (upper/lower). The CR
7054 bits are set as follows:
7056 BIT0 | BIT 1 | BIT 2 | BIT 3
7057 U | L | (U | L) | (U & L)
7059 So, for an "all" relationship, BIT 3 would be set.
7060 For an "any" relationship, BIT 2 would be set. Etc.
7062 Following traditional nomenclature, these bits map to:
7064 BIT0 | BIT 1 | BIT 2 | BIT 3
7067 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7072 /* All variant. OV bit. */
7074 /* We need to get to the OV bit, which is the ORDERED bit. We
7075 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7076 that's ugly and will trigger a validate_condition_mode abort.
7077 So let's just use another pattern. */
7078 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7080 /* Any variant. EQ bit. */
7084 /* Upper variant. LT bit. */
7088 /* Lower variant. GT bit. */
7093 error ("argument 1 of __builtin_spe_predicate is out of range");
7097 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7098 emit_move_insn (target, tmp);
7103 /* The evsel builtins look like this:
7105 e = __builtin_spe_evsel_OP (a, b, c, d);
7109 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7110 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7114 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7117 tree arg0 = TREE_VALUE (arglist);
7118 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7119 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7120 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7121 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7122 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7123 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7124 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7125 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7126 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7131 if (arg0 == error_mark_node || arg1 == error_mark_node
7132 || arg2 == error_mark_node || arg3 == error_mark_node)
7136 || GET_MODE (target) != mode0
7137 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7138 target = gen_reg_rtx (mode0);
7140 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7141 op0 = copy_to_mode_reg (mode0, op0);
7142 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7143 op1 = copy_to_mode_reg (mode0, op1);
7144 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7145 op2 = copy_to_mode_reg (mode0, op2);
7146 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7147 op3 = copy_to_mode_reg (mode0, op3);
7149 /* Generate the compare. */
7150 scratch = gen_reg_rtx (CCmode);
7151 pat = GEN_FCN (icode) (scratch, op0, op1);
7156 if (mode0 == V2SImode)
7157 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7159 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7164 /* Expand an expression EXP that calls a built-in function,
7165 with result going to TARGET if that's convenient
7166 (and in mode MODE if that's convenient).
7167 SUBTARGET may be used as the target for computing one of EXP's operands.
7168 IGNORE is nonzero if the value is to be ignored. */
7171 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7172 enum machine_mode mode ATTRIBUTE_UNUSED,
7173 int ignore ATTRIBUTE_UNUSED)
7175 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7176 tree arglist = TREE_OPERAND (exp, 1);
7177 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7178 struct builtin_description *d;
7185 ret = altivec_expand_builtin (exp, target, &success);
7192 ret = spe_expand_builtin (exp, target, &success);
7198 if (TARGET_ALTIVEC || TARGET_SPE)
7200 /* Handle simple unary operations. */
7201 d = (struct builtin_description *) bdesc_1arg;
7202 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7203 if (d->code == fcode)
7204 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7206 /* Handle simple binary operations. */
7207 d = (struct builtin_description *) bdesc_2arg;
7208 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7209 if (d->code == fcode)
7210 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7212 /* Handle simple ternary operations. */
7213 d = (struct builtin_description *) bdesc_3arg;
7214 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7215 if (d->code == fcode)
7216 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7224 build_opaque_vector_type (tree node, int nunits)
7226 node = copy_node (node);
7227 TYPE_MAIN_VARIANT (node) = node;
7228 return build_vector_type (node, nunits);
7232 rs6000_init_builtins (void)
7234 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7235 V2SF_type_node = build_vector_type (float_type_node, 2);
7236 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7237 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7238 V4SF_type_node = build_vector_type (float_type_node, 4);
7239 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7240 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7242 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7243 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7244 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7246 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7247 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7248 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7250 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7251 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7252 'vector unsigned short'. */
7254 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7255 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7256 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7257 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7259 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7260 get_identifier ("__bool char"),
7261 bool_char_type_node));
7262 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7263 get_identifier ("__bool short"),
7264 bool_short_type_node));
7265 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7266 get_identifier ("__bool int"),
7267 bool_int_type_node));
7268 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7269 get_identifier ("__pixel"),
7272 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7273 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7274 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7275 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7277 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7278 get_identifier ("__vector unsigned char"),
7279 unsigned_V16QI_type_node));
7280 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7281 get_identifier ("__vector signed char"),
7283 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7284 get_identifier ("__vector __bool char"),
7285 bool_V16QI_type_node));
7287 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7288 get_identifier ("__vector unsigned short"),
7289 unsigned_V8HI_type_node));
7290 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7291 get_identifier ("__vector signed short"),
7293 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7294 get_identifier ("__vector __bool short"),
7295 bool_V8HI_type_node));
7297 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7298 get_identifier ("__vector unsigned int"),
7299 unsigned_V4SI_type_node));
7300 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7301 get_identifier ("__vector signed int"),
7303 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7304 get_identifier ("__vector __bool int"),
7305 bool_V4SI_type_node));
7307 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7308 get_identifier ("__vector float"),
7310 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7311 get_identifier ("__vector __pixel"),
7312 pixel_V8HI_type_node));
7315 spe_init_builtins ();
7317 altivec_init_builtins ();
7318 if (TARGET_ALTIVEC || TARGET_SPE)
7319 rs6000_common_init_builtins ();
7322 /* Search through a set of builtins and enable the mask bits.
7323 DESC is an array of builtins.
7324 SIZE is the total number of builtins.
7325 START is the builtin enum at which to start.
7326 END is the builtin enum at which to end. */
7328 enable_mask_for_builtins (struct builtin_description *desc, int size,
7329 enum rs6000_builtins start,
7330 enum rs6000_builtins end)
7334 for (i = 0; i < size; ++i)
7335 if (desc[i].code == start)
7341 for (; i < size; ++i)
7343 /* Flip all the bits on. */
7344 desc[i].mask = target_flags;
7345 if (desc[i].code == end)
7351 spe_init_builtins (void)
7353 tree endlink = void_list_node;
7354 tree puint_type_node = build_pointer_type (unsigned_type_node);
7355 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7356 struct builtin_description *d;
7359 tree v2si_ftype_4_v2si
7360 = build_function_type
7361 (opaque_V2SI_type_node,
7362 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7363 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7364 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7365 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7368 tree v2sf_ftype_4_v2sf
7369 = build_function_type
7370 (opaque_V2SF_type_node,
7371 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7372 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7373 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7374 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7377 tree int_ftype_int_v2si_v2si
7378 = build_function_type
7380 tree_cons (NULL_TREE, integer_type_node,
7381 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7382 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7385 tree int_ftype_int_v2sf_v2sf
7386 = build_function_type
7388 tree_cons (NULL_TREE, integer_type_node,
7389 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7390 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7393 tree void_ftype_v2si_puint_int
7394 = build_function_type (void_type_node,
7395 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7396 tree_cons (NULL_TREE, puint_type_node,
7397 tree_cons (NULL_TREE,
7401 tree void_ftype_v2si_puint_char
7402 = build_function_type (void_type_node,
7403 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7404 tree_cons (NULL_TREE, puint_type_node,
7405 tree_cons (NULL_TREE,
7409 tree void_ftype_v2si_pv2si_int
7410 = build_function_type (void_type_node,
7411 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7412 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7413 tree_cons (NULL_TREE,
7417 tree void_ftype_v2si_pv2si_char
7418 = build_function_type (void_type_node,
7419 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7420 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7421 tree_cons (NULL_TREE,
7426 = build_function_type (void_type_node,
7427 tree_cons (NULL_TREE, integer_type_node, endlink));
7430 = build_function_type (integer_type_node, endlink);
7432 tree v2si_ftype_pv2si_int
7433 = build_function_type (opaque_V2SI_type_node,
7434 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7435 tree_cons (NULL_TREE, integer_type_node,
7438 tree v2si_ftype_puint_int
7439 = build_function_type (opaque_V2SI_type_node,
7440 tree_cons (NULL_TREE, puint_type_node,
7441 tree_cons (NULL_TREE, integer_type_node,
7444 tree v2si_ftype_pushort_int
7445 = build_function_type (opaque_V2SI_type_node,
7446 tree_cons (NULL_TREE, pushort_type_node,
7447 tree_cons (NULL_TREE, integer_type_node,
7450 tree v2si_ftype_signed_char
7451 = build_function_type (opaque_V2SI_type_node,
7452 tree_cons (NULL_TREE, signed_char_type_node,
7455 /* The initialization of the simple binary and unary builtins is
7456 done in rs6000_common_init_builtins, but we have to enable the
7457 mask bits here manually because we have run out of `target_flags'
7458 bits. We really need to redesign this mask business. */
7460 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7461 ARRAY_SIZE (bdesc_2arg),
7464 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7465 ARRAY_SIZE (bdesc_1arg),
7467 SPE_BUILTIN_EVSUBFUSIAAW);
7468 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7469 ARRAY_SIZE (bdesc_spe_predicates),
7470 SPE_BUILTIN_EVCMPEQ,
7471 SPE_BUILTIN_EVFSTSTLT);
7472 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7473 ARRAY_SIZE (bdesc_spe_evsel),
7474 SPE_BUILTIN_EVSEL_CMPGTS,
7475 SPE_BUILTIN_EVSEL_FSTSTEQ);
7477 (*lang_hooks.decls.pushdecl)
7478 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7479 opaque_V2SI_type_node));
7481 /* Initialize irregular SPE builtins. */
7483 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7484 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7485 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7486 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7487 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7488 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7489 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7490 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7491 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7492 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7493 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7494 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7495 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7496 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7497 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7498 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7499 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7500 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7503 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7504 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7505 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7506 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7507 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7508 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7509 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7510 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7511 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7512 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7513 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7514 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7515 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7516 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7517 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7518 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7519 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7520 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7521 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7522 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7523 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7524 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7527 d = (struct builtin_description *) bdesc_spe_predicates;
7528 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7532 switch (insn_data[d->icode].operand[1].mode)
7535 type = int_ftype_int_v2si_v2si;
7538 type = int_ftype_int_v2sf_v2sf;
7544 def_builtin (d->mask, d->name, type, d->code);
7547 /* Evsel predicates. */
7548 d = (struct builtin_description *) bdesc_spe_evsel;
7549 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7553 switch (insn_data[d->icode].operand[1].mode)
7556 type = v2si_ftype_4_v2si;
7559 type = v2sf_ftype_4_v2sf;
7565 def_builtin (d->mask, d->name, type, d->code);
7570 altivec_init_builtins (void)
7572 struct builtin_description *d;
7573 struct builtin_description_predicates *dp;
7575 tree pfloat_type_node = build_pointer_type (float_type_node);
7576 tree pint_type_node = build_pointer_type (integer_type_node);
7577 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7578 tree pchar_type_node = build_pointer_type (char_type_node);
7580 tree pvoid_type_node = build_pointer_type (void_type_node);
7582 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7583 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7584 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7585 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7587 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7589 tree int_ftype_int_v4si_v4si
7590 = build_function_type_list (integer_type_node,
7591 integer_type_node, V4SI_type_node,
7592 V4SI_type_node, NULL_TREE);
7593 tree v4sf_ftype_pcfloat
7594 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7595 tree void_ftype_pfloat_v4sf
7596 = build_function_type_list (void_type_node,
7597 pfloat_type_node, V4SF_type_node, NULL_TREE);
7598 tree v4si_ftype_pcint
7599 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7600 tree void_ftype_pint_v4si
7601 = build_function_type_list (void_type_node,
7602 pint_type_node, V4SI_type_node, NULL_TREE);
7603 tree v8hi_ftype_pcshort
7604 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7605 tree void_ftype_pshort_v8hi
7606 = build_function_type_list (void_type_node,
7607 pshort_type_node, V8HI_type_node, NULL_TREE);
7608 tree v16qi_ftype_pcchar
7609 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7610 tree void_ftype_pchar_v16qi
7611 = build_function_type_list (void_type_node,
7612 pchar_type_node, V16QI_type_node, NULL_TREE);
7613 tree void_ftype_v4si
7614 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7615 tree v8hi_ftype_void
7616 = build_function_type (V8HI_type_node, void_list_node);
7617 tree void_ftype_void
7618 = build_function_type (void_type_node, void_list_node);
7620 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7622 tree v16qi_ftype_long_pcvoid
7623 = build_function_type_list (V16QI_type_node,
7624 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7625 tree v8hi_ftype_long_pcvoid
7626 = build_function_type_list (V8HI_type_node,
7627 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7628 tree v4si_ftype_long_pcvoid
7629 = build_function_type_list (V4SI_type_node,
7630 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7632 tree void_ftype_v4si_long_pvoid
7633 = build_function_type_list (void_type_node,
7634 V4SI_type_node, long_integer_type_node,
7635 pvoid_type_node, NULL_TREE);
7636 tree void_ftype_v16qi_long_pvoid
7637 = build_function_type_list (void_type_node,
7638 V16QI_type_node, long_integer_type_node,
7639 pvoid_type_node, NULL_TREE);
7640 tree void_ftype_v8hi_long_pvoid
7641 = build_function_type_list (void_type_node,
7642 V8HI_type_node, long_integer_type_node,
7643 pvoid_type_node, NULL_TREE);
7644 tree int_ftype_int_v8hi_v8hi
7645 = build_function_type_list (integer_type_node,
7646 integer_type_node, V8HI_type_node,
7647 V8HI_type_node, NULL_TREE);
7648 tree int_ftype_int_v16qi_v16qi
7649 = build_function_type_list (integer_type_node,
7650 integer_type_node, V16QI_type_node,
7651 V16QI_type_node, NULL_TREE);
7652 tree int_ftype_int_v4sf_v4sf
7653 = build_function_type_list (integer_type_node,
7654 integer_type_node, V4SF_type_node,
7655 V4SF_type_node, NULL_TREE);
7656 tree v4si_ftype_v4si
7657 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7658 tree v8hi_ftype_v8hi
7659 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7660 tree v16qi_ftype_v16qi
7661 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7662 tree v4sf_ftype_v4sf
7663 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7664 tree void_ftype_pcvoid_int_int
7665 = build_function_type_list (void_type_node,
7666 pcvoid_type_node, integer_type_node,
7667 integer_type_node, NULL_TREE);
7668 tree int_ftype_pcchar
7669 = build_function_type_list (integer_type_node,
7670 pcchar_type_node, NULL_TREE);
7672 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7673 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7674 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7675 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7676 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7677 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7678 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7679 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7680 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7681 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7682 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7683 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7684 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7685 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7686 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7687 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7690 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7693 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7694 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7695 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7696 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7697 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7698 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7699 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7700 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7701 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7702 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7703 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7705 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7706 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7707 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7709 /* Add the DST variants. */
7710 d = (struct builtin_description *) bdesc_dst;
7711 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7712 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7714 /* Initialize the predicates. */
7715 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7716 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7718 enum machine_mode mode1;
7721 mode1 = insn_data[dp->icode].operand[1].mode;
7726 type = int_ftype_int_v4si_v4si;
7729 type = int_ftype_int_v8hi_v8hi;
7732 type = int_ftype_int_v16qi_v16qi;
7735 type = int_ftype_int_v4sf_v4sf;
7741 def_builtin (dp->mask, dp->name, type, dp->code);
7744 /* Initialize the abs* operators. */
7745 d = (struct builtin_description *) bdesc_abs;
7746 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7748 enum machine_mode mode0;
7751 mode0 = insn_data[d->icode].operand[0].mode;
7756 type = v4si_ftype_v4si;
7759 type = v8hi_ftype_v8hi;
7762 type = v16qi_ftype_v16qi;
7765 type = v4sf_ftype_v4sf;
7771 def_builtin (d->mask, d->name, type, d->code);
7776 rs6000_common_init_builtins (void)
7778 struct builtin_description *d;
7781 tree v4sf_ftype_v4sf_v4sf_v16qi
7782 = build_function_type_list (V4SF_type_node,
7783 V4SF_type_node, V4SF_type_node,
7784 V16QI_type_node, NULL_TREE);
7785 tree v4si_ftype_v4si_v4si_v16qi
7786 = build_function_type_list (V4SI_type_node,
7787 V4SI_type_node, V4SI_type_node,
7788 V16QI_type_node, NULL_TREE);
7789 tree v8hi_ftype_v8hi_v8hi_v16qi
7790 = build_function_type_list (V8HI_type_node,
7791 V8HI_type_node, V8HI_type_node,
7792 V16QI_type_node, NULL_TREE);
7793 tree v16qi_ftype_v16qi_v16qi_v16qi
7794 = build_function_type_list (V16QI_type_node,
7795 V16QI_type_node, V16QI_type_node,
7796 V16QI_type_node, NULL_TREE);
7798 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7800 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7801 tree v16qi_ftype_int
7802 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7803 tree v8hi_ftype_v16qi
7804 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7805 tree v4sf_ftype_v4sf
7806 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7808 tree v2si_ftype_v2si_v2si
7809 = build_function_type_list (opaque_V2SI_type_node,
7810 opaque_V2SI_type_node,
7811 opaque_V2SI_type_node, NULL_TREE);
7813 tree v2sf_ftype_v2sf_v2sf
7814 = build_function_type_list (opaque_V2SF_type_node,
7815 opaque_V2SF_type_node,
7816 opaque_V2SF_type_node, NULL_TREE);
7818 tree v2si_ftype_int_int
7819 = build_function_type_list (opaque_V2SI_type_node,
7820 integer_type_node, integer_type_node,
7823 tree v2si_ftype_v2si
7824 = build_function_type_list (opaque_V2SI_type_node,
7825 opaque_V2SI_type_node, NULL_TREE);
7827 tree v2sf_ftype_v2sf
7828 = build_function_type_list (opaque_V2SF_type_node,
7829 opaque_V2SF_type_node, NULL_TREE);
7831 tree v2sf_ftype_v2si
7832 = build_function_type_list (opaque_V2SF_type_node,
7833 opaque_V2SI_type_node, NULL_TREE);
7835 tree v2si_ftype_v2sf
7836 = build_function_type_list (opaque_V2SI_type_node,
7837 opaque_V2SF_type_node, NULL_TREE);
7839 tree v2si_ftype_v2si_char
7840 = build_function_type_list (opaque_V2SI_type_node,
7841 opaque_V2SI_type_node,
7842 char_type_node, NULL_TREE);
7844 tree v2si_ftype_int_char
7845 = build_function_type_list (opaque_V2SI_type_node,
7846 integer_type_node, char_type_node, NULL_TREE);
7848 tree v2si_ftype_char
7849 = build_function_type_list (opaque_V2SI_type_node,
7850 char_type_node, NULL_TREE);
7852 tree int_ftype_int_int
7853 = build_function_type_list (integer_type_node,
7854 integer_type_node, integer_type_node,
7857 tree v4si_ftype_v4si_v4si
7858 = build_function_type_list (V4SI_type_node,
7859 V4SI_type_node, V4SI_type_node, NULL_TREE);
7860 tree v4sf_ftype_v4si_int
7861 = build_function_type_list (V4SF_type_node,
7862 V4SI_type_node, integer_type_node, NULL_TREE);
7863 tree v4si_ftype_v4sf_int
7864 = build_function_type_list (V4SI_type_node,
7865 V4SF_type_node, integer_type_node, NULL_TREE);
7866 tree v4si_ftype_v4si_int
7867 = build_function_type_list (V4SI_type_node,
7868 V4SI_type_node, integer_type_node, NULL_TREE);
7869 tree v8hi_ftype_v8hi_int
7870 = build_function_type_list (V8HI_type_node,
7871 V8HI_type_node, integer_type_node, NULL_TREE);
7872 tree v16qi_ftype_v16qi_int
7873 = build_function_type_list (V16QI_type_node,
7874 V16QI_type_node, integer_type_node, NULL_TREE);
7875 tree v16qi_ftype_v16qi_v16qi_int
7876 = build_function_type_list (V16QI_type_node,
7877 V16QI_type_node, V16QI_type_node,
7878 integer_type_node, NULL_TREE);
7879 tree v8hi_ftype_v8hi_v8hi_int
7880 = build_function_type_list (V8HI_type_node,
7881 V8HI_type_node, V8HI_type_node,
7882 integer_type_node, NULL_TREE);
7883 tree v4si_ftype_v4si_v4si_int
7884 = build_function_type_list (V4SI_type_node,
7885 V4SI_type_node, V4SI_type_node,
7886 integer_type_node, NULL_TREE);
7887 tree v4sf_ftype_v4sf_v4sf_int
7888 = build_function_type_list (V4SF_type_node,
7889 V4SF_type_node, V4SF_type_node,
7890 integer_type_node, NULL_TREE);
7891 tree v4sf_ftype_v4sf_v4sf
7892 = build_function_type_list (V4SF_type_node,
7893 V4SF_type_node, V4SF_type_node, NULL_TREE);
7894 tree v4sf_ftype_v4sf_v4sf_v4si
7895 = build_function_type_list (V4SF_type_node,
7896 V4SF_type_node, V4SF_type_node,
7897 V4SI_type_node, NULL_TREE);
7898 tree v4sf_ftype_v4sf_v4sf_v4sf
7899 = build_function_type_list (V4SF_type_node,
7900 V4SF_type_node, V4SF_type_node,
7901 V4SF_type_node, NULL_TREE);
7902 tree v4si_ftype_v4si_v4si_v4si
7903 = build_function_type_list (V4SI_type_node,
7904 V4SI_type_node, V4SI_type_node,
7905 V4SI_type_node, NULL_TREE);
7906 tree v8hi_ftype_v8hi_v8hi
7907 = build_function_type_list (V8HI_type_node,
7908 V8HI_type_node, V8HI_type_node, NULL_TREE);
7909 tree v8hi_ftype_v8hi_v8hi_v8hi
7910 = build_function_type_list (V8HI_type_node,
7911 V8HI_type_node, V8HI_type_node,
7912 V8HI_type_node, NULL_TREE);
7913 tree v4si_ftype_v8hi_v8hi_v4si
7914 = build_function_type_list (V4SI_type_node,
7915 V8HI_type_node, V8HI_type_node,
7916 V4SI_type_node, NULL_TREE);
7917 tree v4si_ftype_v16qi_v16qi_v4si
7918 = build_function_type_list (V4SI_type_node,
7919 V16QI_type_node, V16QI_type_node,
7920 V4SI_type_node, NULL_TREE);
7921 tree v16qi_ftype_v16qi_v16qi
7922 = build_function_type_list (V16QI_type_node,
7923 V16QI_type_node, V16QI_type_node, NULL_TREE);
7924 tree v4si_ftype_v4sf_v4sf
7925 = build_function_type_list (V4SI_type_node,
7926 V4SF_type_node, V4SF_type_node, NULL_TREE);
7927 tree v8hi_ftype_v16qi_v16qi
7928 = build_function_type_list (V8HI_type_node,
7929 V16QI_type_node, V16QI_type_node, NULL_TREE);
7930 tree v4si_ftype_v8hi_v8hi
7931 = build_function_type_list (V4SI_type_node,
7932 V8HI_type_node, V8HI_type_node, NULL_TREE);
7933 tree v8hi_ftype_v4si_v4si
7934 = build_function_type_list (V8HI_type_node,
7935 V4SI_type_node, V4SI_type_node, NULL_TREE);
7936 tree v16qi_ftype_v8hi_v8hi
7937 = build_function_type_list (V16QI_type_node,
7938 V8HI_type_node, V8HI_type_node, NULL_TREE);
7939 tree v4si_ftype_v16qi_v4si
7940 = build_function_type_list (V4SI_type_node,
7941 V16QI_type_node, V4SI_type_node, NULL_TREE);
7942 tree v4si_ftype_v16qi_v16qi
7943 = build_function_type_list (V4SI_type_node,
7944 V16QI_type_node, V16QI_type_node, NULL_TREE);
7945 tree v4si_ftype_v8hi_v4si
7946 = build_function_type_list (V4SI_type_node,
7947 V8HI_type_node, V4SI_type_node, NULL_TREE);
7948 tree v4si_ftype_v8hi
7949 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7950 tree int_ftype_v4si_v4si
7951 = build_function_type_list (integer_type_node,
7952 V4SI_type_node, V4SI_type_node, NULL_TREE);
7953 tree int_ftype_v4sf_v4sf
7954 = build_function_type_list (integer_type_node,
7955 V4SF_type_node, V4SF_type_node, NULL_TREE);
7956 tree int_ftype_v16qi_v16qi
7957 = build_function_type_list (integer_type_node,
7958 V16QI_type_node, V16QI_type_node, NULL_TREE);
7959 tree int_ftype_v8hi_v8hi
7960 = build_function_type_list (integer_type_node,
7961 V8HI_type_node, V8HI_type_node, NULL_TREE);
7963 /* Add the simple ternary operators. */
7964 d = (struct builtin_description *) bdesc_3arg;
7965 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7968 enum machine_mode mode0, mode1, mode2, mode3;
7971 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7974 mode0 = insn_data[d->icode].operand[0].mode;
7975 mode1 = insn_data[d->icode].operand[1].mode;
7976 mode2 = insn_data[d->icode].operand[2].mode;
7977 mode3 = insn_data[d->icode].operand[3].mode;
7979 /* When all four are of the same mode. */
7980 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7985 type = v4si_ftype_v4si_v4si_v4si;
7988 type = v4sf_ftype_v4sf_v4sf_v4sf;
7991 type = v8hi_ftype_v8hi_v8hi_v8hi;
7994 type = v16qi_ftype_v16qi_v16qi_v16qi;
8000 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8005 type = v4si_ftype_v4si_v4si_v16qi;
8008 type = v4sf_ftype_v4sf_v4sf_v16qi;
8011 type = v8hi_ftype_v8hi_v8hi_v16qi;
8014 type = v16qi_ftype_v16qi_v16qi_v16qi;
8020 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8021 && mode3 == V4SImode)
8022 type = v4si_ftype_v16qi_v16qi_v4si;
8023 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8024 && mode3 == V4SImode)
8025 type = v4si_ftype_v8hi_v8hi_v4si;
8026 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8027 && mode3 == V4SImode)
8028 type = v4sf_ftype_v4sf_v4sf_v4si;
8030 /* vchar, vchar, vchar, 4 bit literal. */
8031 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8033 type = v16qi_ftype_v16qi_v16qi_int;
8035 /* vshort, vshort, vshort, 4 bit literal. */
8036 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8038 type = v8hi_ftype_v8hi_v8hi_int;
8040 /* vint, vint, vint, 4 bit literal. */
8041 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8043 type = v4si_ftype_v4si_v4si_int;
8045 /* vfloat, vfloat, vfloat, 4 bit literal. */
8046 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8048 type = v4sf_ftype_v4sf_v4sf_int;
8053 def_builtin (d->mask, d->name, type, d->code);
8056 /* Add the simple binary operators. */
8057 d = (struct builtin_description *) bdesc_2arg;
8058 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8060 enum machine_mode mode0, mode1, mode2;
8063 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8066 mode0 = insn_data[d->icode].operand[0].mode;
8067 mode1 = insn_data[d->icode].operand[1].mode;
8068 mode2 = insn_data[d->icode].operand[2].mode;
8070 /* When all three operands are of the same mode. */
8071 if (mode0 == mode1 && mode1 == mode2)
8076 type = v4sf_ftype_v4sf_v4sf;
8079 type = v4si_ftype_v4si_v4si;
8082 type = v16qi_ftype_v16qi_v16qi;
8085 type = v8hi_ftype_v8hi_v8hi;
8088 type = v2si_ftype_v2si_v2si;
8091 type = v2sf_ftype_v2sf_v2sf;
8094 type = int_ftype_int_int;
8101 /* A few other combos we really don't want to do manually. */
8103 /* vint, vfloat, vfloat. */
8104 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8105 type = v4si_ftype_v4sf_v4sf;
8107 /* vshort, vchar, vchar. */
8108 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8109 type = v8hi_ftype_v16qi_v16qi;
8111 /* vint, vshort, vshort. */
8112 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8113 type = v4si_ftype_v8hi_v8hi;
8115 /* vshort, vint, vint. */
8116 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8117 type = v8hi_ftype_v4si_v4si;
8119 /* vchar, vshort, vshort. */
8120 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8121 type = v16qi_ftype_v8hi_v8hi;
8123 /* vint, vchar, vint. */
8124 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8125 type = v4si_ftype_v16qi_v4si;
8127 /* vint, vchar, vchar. */
8128 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8129 type = v4si_ftype_v16qi_v16qi;
8131 /* vint, vshort, vint. */
8132 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8133 type = v4si_ftype_v8hi_v4si;
8135 /* vint, vint, 5 bit literal. */
8136 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8137 type = v4si_ftype_v4si_int;
8139 /* vshort, vshort, 5 bit literal. */
8140 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8141 type = v8hi_ftype_v8hi_int;
8143 /* vchar, vchar, 5 bit literal. */
8144 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8145 type = v16qi_ftype_v16qi_int;
8147 /* vfloat, vint, 5 bit literal. */
8148 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8149 type = v4sf_ftype_v4si_int;
8151 /* vint, vfloat, 5 bit literal. */
8152 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8153 type = v4si_ftype_v4sf_int;
8155 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8156 type = v2si_ftype_int_int;
8158 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8159 type = v2si_ftype_v2si_char;
8161 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8162 type = v2si_ftype_int_char;
8165 else if (mode0 == SImode)
8170 type = int_ftype_v4si_v4si;
8173 type = int_ftype_v4sf_v4sf;
8176 type = int_ftype_v16qi_v16qi;
8179 type = int_ftype_v8hi_v8hi;
8189 def_builtin (d->mask, d->name, type, d->code);
8192 /* Add the simple unary operators. */
8193 d = (struct builtin_description *) bdesc_1arg;
8194 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8196 enum machine_mode mode0, mode1;
8199 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8202 mode0 = insn_data[d->icode].operand[0].mode;
8203 mode1 = insn_data[d->icode].operand[1].mode;
8205 if (mode0 == V4SImode && mode1 == QImode)
8206 type = v4si_ftype_int;
8207 else if (mode0 == V8HImode && mode1 == QImode)
8208 type = v8hi_ftype_int;
8209 else if (mode0 == V16QImode && mode1 == QImode)
8210 type = v16qi_ftype_int;
8211 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8212 type = v4sf_ftype_v4sf;
8213 else if (mode0 == V8HImode && mode1 == V16QImode)
8214 type = v8hi_ftype_v16qi;
8215 else if (mode0 == V4SImode && mode1 == V8HImode)
8216 type = v4si_ftype_v8hi;
8217 else if (mode0 == V2SImode && mode1 == V2SImode)
8218 type = v2si_ftype_v2si;
8219 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8220 type = v2sf_ftype_v2sf;
8221 else if (mode0 == V2SFmode && mode1 == V2SImode)
8222 type = v2sf_ftype_v2si;
8223 else if (mode0 == V2SImode && mode1 == V2SFmode)
8224 type = v2si_ftype_v2sf;
8225 else if (mode0 == V2SImode && mode1 == QImode)
8226 type = v2si_ftype_char;
8230 def_builtin (d->mask, d->name, type, d->code);
8235 rs6000_init_libfuncs (void)
8237 if (!TARGET_HARD_FLOAT)
8240 if (DEFAULT_ABI != ABI_V4)
8242 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8244 /* AIX library routines for float->int conversion. */
8245 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8246 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8247 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8248 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8251 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8252 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8253 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8254 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8255 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8259 /* 32-bit SVR4 quad floating point routines. */
8261 set_optab_libfunc (add_optab, TFmode, "_q_add");
8262 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8263 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8264 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8265 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8266 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8267 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8269 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8270 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8271 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8272 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8273 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8274 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8276 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8277 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8278 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8279 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8280 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8281 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8282 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8287 /* Expand a block clear operation, and return 1 if successful. Return 0
8288 if we should let the compiler generate normal code.
8290 operands[0] is the destination
8291 operands[1] is the length
8292 operands[2] is the alignment */
8295 expand_block_clear (rtx operands[])
8297 rtx orig_dest = operands[0];
8298 rtx bytes_rtx = operands[1];
8299 rtx align_rtx = operands[2];
8300 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8306 /* If this is not a fixed size move, just call memcpy */
8310 /* If this is not a fixed size alignment, abort */
8311 if (GET_CODE (align_rtx) != CONST_INT)
8313 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8315 /* Anything to clear? */
8316 bytes = INTVAL (bytes_rtx);
8320 if (bytes > (TARGET_POWERPC64 && align >= 32 ? 64 : 32))
8323 if (optimize_size && bytes > 16)
8326 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8328 rtx (*mov) (rtx, rtx);
8329 enum machine_mode mode = BLKmode;
8332 if (bytes >= 8 && TARGET_POWERPC64
8333 /* 64-bit loads and stores require word-aligned
8335 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8341 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8342 { /* move 4 bytes */
8347 else if (bytes == 2 && !STRICT_ALIGNMENT)
8348 { /* move 2 bytes */
8353 else /* move 1 byte at a time */
8360 dest = adjust_address (orig_dest, mode, offset);
8362 emit_insn ((*mov) (dest, const0_rtx));
8369 /* Expand a block move operation, and return 1 if successful. Return 0
8370 if we should let the compiler generate normal code.
8372 operands[0] is the destination
8373 operands[1] is the source
8374 operands[2] is the length
8375 operands[3] is the alignment */
8377 #define MAX_MOVE_REG 4
8380 expand_block_move (rtx operands[])
8382 rtx orig_dest = operands[0];
8383 rtx orig_src = operands[1];
8384 rtx bytes_rtx = operands[2];
8385 rtx align_rtx = operands[3];
8386 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8391 rtx stores[MAX_MOVE_REG];
8394 /* If this is not a fixed size move, just call memcpy */
8398 /* If this is not a fixed size alignment, abort */
8399 if (GET_CODE (align_rtx) != CONST_INT)
8401 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8403 /* Anything to move? */
8404 bytes = INTVAL (bytes_rtx);
8408 /* store_one_arg depends on expand_block_move to handle at least the size of
8409 reg_parm_stack_space. */
8410 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8413 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8416 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8417 rtx (*mov) (rtx, rtx);
8419 enum machine_mode mode = BLKmode;
8423 && bytes > 24 /* move up to 32 bytes at a time */
8431 && ! fixed_regs[12])
8433 move_bytes = (bytes > 32) ? 32 : bytes;
8434 gen_func.movmemsi = gen_movmemsi_8reg;
8436 else if (TARGET_STRING
8437 && bytes > 16 /* move up to 24 bytes at a time */
8443 && ! fixed_regs[10])
8445 move_bytes = (bytes > 24) ? 24 : bytes;
8446 gen_func.movmemsi = gen_movmemsi_6reg;
8448 else if (TARGET_STRING
8449 && bytes > 8 /* move up to 16 bytes at a time */
8455 move_bytes = (bytes > 16) ? 16 : bytes;
8456 gen_func.movmemsi = gen_movmemsi_4reg;
8458 else if (bytes >= 8 && TARGET_POWERPC64
8459 /* 64-bit loads and stores require word-aligned
8461 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8465 gen_func.mov = gen_movdi;
8467 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8468 { /* move up to 8 bytes at a time */
8469 move_bytes = (bytes > 8) ? 8 : bytes;
8470 gen_func.movmemsi = gen_movmemsi_2reg;
8472 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8473 { /* move 4 bytes */
8476 gen_func.mov = gen_movsi;
8478 else if (bytes == 2 && !STRICT_ALIGNMENT)
8479 { /* move 2 bytes */
8482 gen_func.mov = gen_movhi;
8484 else if (TARGET_STRING && bytes > 1)
8485 { /* move up to 4 bytes at a time */
8486 move_bytes = (bytes > 4) ? 4 : bytes;
8487 gen_func.movmemsi = gen_movmemsi_1reg;
8489 else /* move 1 byte at a time */
8493 gen_func.mov = gen_movqi;
8496 src = adjust_address (orig_src, mode, offset);
8497 dest = adjust_address (orig_dest, mode, offset);
8499 if (mode != BLKmode)
8501 rtx tmp_reg = gen_reg_rtx (mode);
8503 emit_insn ((*gen_func.mov) (tmp_reg, src));
8504 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8507 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8510 for (i = 0; i < num_reg; i++)
8511 emit_insn (stores[i]);
8515 if (mode == BLKmode)
8517 /* Move the address into scratch registers. The movmemsi
8518 patterns require zero offset. */
8519 if (!REG_P (XEXP (src, 0)))
8521 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8522 src = replace_equiv_address (src, src_reg);
8524 set_mem_size (src, GEN_INT (move_bytes));
8526 if (!REG_P (XEXP (dest, 0)))
8528 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8529 dest = replace_equiv_address (dest, dest_reg);
8531 set_mem_size (dest, GEN_INT (move_bytes));
8533 emit_insn ((*gen_func.movmemsi) (dest, src,
8534 GEN_INT (move_bytes & 31),
8543 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8544 known to be a PARALLEL. */
8546 save_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8551 int count = XVECLEN (op, 0);
8557 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8558 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8561 for (i=1; i <= 18; i++)
8563 elt = XVECEXP (op, 0, index++);
8564 if (GET_CODE (elt) != SET
8565 || GET_CODE (SET_DEST (elt)) != MEM
8566 || ! memory_operand (SET_DEST (elt), DFmode)
8567 || GET_CODE (SET_SRC (elt)) != REG
8568 || GET_MODE (SET_SRC (elt)) != DFmode)
8572 for (i=1; i <= 12; i++)
8574 elt = XVECEXP (op, 0, index++);
8575 if (GET_CODE (elt) != SET
8576 || GET_CODE (SET_DEST (elt)) != MEM
8577 || GET_CODE (SET_SRC (elt)) != REG
8578 || GET_MODE (SET_SRC (elt)) != V4SImode)
8582 for (i=1; i <= 19; i++)
8584 elt = XVECEXP (op, 0, index++);
8585 if (GET_CODE (elt) != SET
8586 || GET_CODE (SET_DEST (elt)) != MEM
8587 || ! memory_operand (SET_DEST (elt), Pmode)
8588 || GET_CODE (SET_SRC (elt)) != REG
8589 || GET_MODE (SET_SRC (elt)) != Pmode)
8593 elt = XVECEXP (op, 0, index++);
8594 if (GET_CODE (elt) != SET
8595 || GET_CODE (SET_DEST (elt)) != MEM
8596 || ! memory_operand (SET_DEST (elt), Pmode)
8597 || GET_CODE (SET_SRC (elt)) != REG
8598 || REGNO (SET_SRC (elt)) != CR2_REGNO
8599 || GET_MODE (SET_SRC (elt)) != Pmode)
8602 if (GET_CODE (XVECEXP (op, 0, index++)) != USE
8603 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8604 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8609 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8610 known to be a PARALLEL. */
8612 restore_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8617 int count = XVECLEN (op, 0);
8623 if (GET_CODE (XVECEXP (op, 0, index++)) != RETURN
8624 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8625 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8626 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8629 elt = XVECEXP (op, 0, index++);
8630 if (GET_CODE (elt) != SET
8631 || GET_CODE (SET_SRC (elt)) != MEM
8632 || ! memory_operand (SET_SRC (elt), Pmode)
8633 || GET_CODE (SET_DEST (elt)) != REG
8634 || REGNO (SET_DEST (elt)) != CR2_REGNO
8635 || GET_MODE (SET_DEST (elt)) != Pmode)
8638 for (i=1; i <= 19; i++)
8640 elt = XVECEXP (op, 0, index++);
8641 if (GET_CODE (elt) != SET
8642 || GET_CODE (SET_SRC (elt)) != MEM
8643 || ! memory_operand (SET_SRC (elt), Pmode)
8644 || GET_CODE (SET_DEST (elt)) != REG
8645 || GET_MODE (SET_DEST (elt)) != Pmode)
8649 for (i=1; i <= 12; i++)
8651 elt = XVECEXP (op, 0, index++);
8652 if (GET_CODE (elt) != SET
8653 || GET_CODE (SET_SRC (elt)) != MEM
8654 || GET_CODE (SET_DEST (elt)) != REG
8655 || GET_MODE (SET_DEST (elt)) != V4SImode)
8659 for (i=1; i <= 18; i++)
8661 elt = XVECEXP (op, 0, index++);
8662 if (GET_CODE (elt) != SET
8663 || GET_CODE (SET_SRC (elt)) != MEM
8664 || ! memory_operand (SET_SRC (elt), DFmode)
8665 || GET_CODE (SET_DEST (elt)) != REG
8666 || GET_MODE (SET_DEST (elt)) != DFmode)
8670 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8671 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8672 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8673 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8674 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8680 /* Return 1 if OP is a load multiple operation. It is known to be a
8681 PARALLEL and the first section will be tested. */
8684 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8686 int count = XVECLEN (op, 0);
8687 unsigned int dest_regno;
8691 /* Perform a quick check so we don't blow up below. */
8693 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8694 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8695 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8698 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8699 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8701 for (i = 1; i < count; i++)
8703 rtx elt = XVECEXP (op, 0, i);
8705 if (GET_CODE (elt) != SET
8706 || GET_CODE (SET_DEST (elt)) != REG
8707 || GET_MODE (SET_DEST (elt)) != SImode
8708 || REGNO (SET_DEST (elt)) != dest_regno + i
8709 || GET_CODE (SET_SRC (elt)) != MEM
8710 || GET_MODE (SET_SRC (elt)) != SImode
8711 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8712 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8713 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8714 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8721 /* Similar, but tests for store multiple. Here, the second vector element
8722 is a CLOBBER. It will be tested later. */
8725 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8727 int count = XVECLEN (op, 0) - 1;
8728 unsigned int src_regno;
8732 /* Perform a quick check so we don't blow up below. */
8734 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8735 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8736 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8739 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8740 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8742 for (i = 1; i < count; i++)
8744 rtx elt = XVECEXP (op, 0, i + 1);
8746 if (GET_CODE (elt) != SET
8747 || GET_CODE (SET_SRC (elt)) != REG
8748 || GET_MODE (SET_SRC (elt)) != SImode
8749 || REGNO (SET_SRC (elt)) != src_regno + i
8750 || GET_CODE (SET_DEST (elt)) != MEM
8751 || GET_MODE (SET_DEST (elt)) != SImode
8752 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8753 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8754 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8755 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8762 /* Return a string to perform a load_multiple operation.
8763 operands[0] is the vector.
8764 operands[1] is the source address.
8765 operands[2] is the first destination register. */
8768 rs6000_output_load_multiple (rtx operands[3])
8770 /* We have to handle the case where the pseudo used to contain the address
8771 is assigned to one of the output registers. */
8773 int words = XVECLEN (operands[0], 0);
8776 if (XVECLEN (operands[0], 0) == 1)
8777 return "{l|lwz} %2,0(%1)";
8779 for (i = 0; i < words; i++)
8780 if (refers_to_regno_p (REGNO (operands[2]) + i,
8781 REGNO (operands[2]) + i + 1, operands[1], 0))
8785 xop[0] = GEN_INT (4 * (words-1));
8786 xop[1] = operands[1];
8787 xop[2] = operands[2];
8788 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8793 xop[0] = GEN_INT (4 * (words-1));
8794 xop[1] = operands[1];
8795 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8796 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8801 for (j = 0; j < words; j++)
8804 xop[0] = GEN_INT (j * 4);
8805 xop[1] = operands[1];
8806 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8807 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8809 xop[0] = GEN_INT (i * 4);
8810 xop[1] = operands[1];
8811 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8816 return "{lsi|lswi} %2,%1,%N0";
8819 /* Return 1 for a parallel vrsave operation. */
8822 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8824 int count = XVECLEN (op, 0);
8825 unsigned int dest_regno, src_regno;
8829 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8830 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8831 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8834 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8835 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8837 if (dest_regno != VRSAVE_REGNO
8838 && src_regno != VRSAVE_REGNO)
8841 for (i = 1; i < count; i++)
8843 rtx elt = XVECEXP (op, 0, i);
8845 if (GET_CODE (elt) != CLOBBER
8846 && GET_CODE (elt) != SET)
8853 /* Return 1 for an PARALLEL suitable for mfcr. */
8856 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8858 int count = XVECLEN (op, 0);
8861 /* Perform a quick check so we don't blow up below. */
8863 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8864 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8865 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8868 for (i = 0; i < count; i++)
8870 rtx exp = XVECEXP (op, 0, i);
8875 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8877 if (GET_CODE (src_reg) != REG
8878 || GET_MODE (src_reg) != CCmode
8879 || ! CR_REGNO_P (REGNO (src_reg)))
8882 if (GET_CODE (exp) != SET
8883 || GET_CODE (SET_DEST (exp)) != REG
8884 || GET_MODE (SET_DEST (exp)) != SImode
8885 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8887 unspec = SET_SRC (exp);
8888 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8890 if (GET_CODE (unspec) != UNSPEC
8891 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8892 || XVECLEN (unspec, 0) != 2
8893 || XVECEXP (unspec, 0, 0) != src_reg
8894 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8895 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8901 /* Return 1 for an PARALLEL suitable for mtcrf. */
8904 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8906 int count = XVECLEN (op, 0);
8910 /* Perform a quick check so we don't blow up below. */
8912 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8913 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8914 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8916 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8918 if (GET_CODE (src_reg) != REG
8919 || GET_MODE (src_reg) != SImode
8920 || ! INT_REGNO_P (REGNO (src_reg)))
8923 for (i = 0; i < count; i++)
8925 rtx exp = XVECEXP (op, 0, i);
8929 if (GET_CODE (exp) != SET
8930 || GET_CODE (SET_DEST (exp)) != REG
8931 || GET_MODE (SET_DEST (exp)) != CCmode
8932 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8934 unspec = SET_SRC (exp);
8935 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8937 if (GET_CODE (unspec) != UNSPEC
8938 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8939 || XVECLEN (unspec, 0) != 2
8940 || XVECEXP (unspec, 0, 0) != src_reg
8941 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8942 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8948 /* Return 1 for an PARALLEL suitable for lmw. */
8951 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8953 int count = XVECLEN (op, 0);
8954 unsigned int dest_regno;
8956 unsigned int base_regno;
8957 HOST_WIDE_INT offset;
8960 /* Perform a quick check so we don't blow up below. */
8962 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8963 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8964 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8967 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8968 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8971 || count != 32 - (int) dest_regno)
8974 if (legitimate_indirect_address_p (src_addr, 0))
8977 base_regno = REGNO (src_addr);
8978 if (base_regno == 0)
8981 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, 0))
8983 offset = INTVAL (XEXP (src_addr, 1));
8984 base_regno = REGNO (XEXP (src_addr, 0));
8989 for (i = 0; i < count; i++)
8991 rtx elt = XVECEXP (op, 0, i);
8994 HOST_WIDE_INT newoffset;
8996 if (GET_CODE (elt) != SET
8997 || GET_CODE (SET_DEST (elt)) != REG
8998 || GET_MODE (SET_DEST (elt)) != SImode
8999 || REGNO (SET_DEST (elt)) != dest_regno + i
9000 || GET_CODE (SET_SRC (elt)) != MEM
9001 || GET_MODE (SET_SRC (elt)) != SImode)
9003 newaddr = XEXP (SET_SRC (elt), 0);
9004 if (legitimate_indirect_address_p (newaddr, 0))
9009 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9011 addr_reg = XEXP (newaddr, 0);
9012 newoffset = INTVAL (XEXP (newaddr, 1));
9016 if (REGNO (addr_reg) != base_regno
9017 || newoffset != offset + 4 * i)
9024 /* Return 1 for an PARALLEL suitable for stmw. */
9027 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9029 int count = XVECLEN (op, 0);
9030 unsigned int src_regno;
9032 unsigned int base_regno;
9033 HOST_WIDE_INT offset;
9036 /* Perform a quick check so we don't blow up below. */
9038 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9039 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
9040 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
9043 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
9044 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
9047 || count != 32 - (int) src_regno)
9050 if (legitimate_indirect_address_p (dest_addr, 0))
9053 base_regno = REGNO (dest_addr);
9054 if (base_regno == 0)
9057 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, 0))
9059 offset = INTVAL (XEXP (dest_addr, 1));
9060 base_regno = REGNO (XEXP (dest_addr, 0));
9065 for (i = 0; i < count; i++)
9067 rtx elt = XVECEXP (op, 0, i);
9070 HOST_WIDE_INT newoffset;
9072 if (GET_CODE (elt) != SET
9073 || GET_CODE (SET_SRC (elt)) != REG
9074 || GET_MODE (SET_SRC (elt)) != SImode
9075 || REGNO (SET_SRC (elt)) != src_regno + i
9076 || GET_CODE (SET_DEST (elt)) != MEM
9077 || GET_MODE (SET_DEST (elt)) != SImode)
9079 newaddr = XEXP (SET_DEST (elt), 0);
9080 if (legitimate_indirect_address_p (newaddr, 0))
9085 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9087 addr_reg = XEXP (newaddr, 0);
9088 newoffset = INTVAL (XEXP (newaddr, 1));
9092 if (REGNO (addr_reg) != base_regno
9093 || newoffset != offset + 4 * i)
9100 /* A validation routine: say whether CODE, a condition code, and MODE
9101 match. The other alternatives either don't make sense or should
9102 never be generated. */
9105 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9107 if ((GET_RTX_CLASS (code) != RTX_COMPARE
9108 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
9109 || GET_MODE_CLASS (mode) != MODE_CC)
9112 /* These don't make sense. */
9113 if ((code == GT || code == LT || code == GE || code == LE)
9114 && mode == CCUNSmode)
9117 if ((code == GTU || code == LTU || code == GEU || code == LEU)
9118 && mode != CCUNSmode)
9121 if (mode != CCFPmode
9122 && (code == ORDERED || code == UNORDERED
9123 || code == UNEQ || code == LTGT
9124 || code == UNGT || code == UNLT
9125 || code == UNGE || code == UNLE))
9128 /* These should never be generated except for
9129 flag_finite_math_only. */
9130 if (mode == CCFPmode
9131 && ! flag_finite_math_only
9132 && (code == LE || code == GE
9133 || code == UNEQ || code == LTGT
9134 || code == UNGT || code == UNLT))
9137 /* These are invalid; the information is not there. */
9138 if (mode == CCEQmode
9139 && code != EQ && code != NE)
9143 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
9144 We only check the opcode against the mode of the CC value here. */
9147 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9149 enum rtx_code code = GET_CODE (op);
9150 enum machine_mode cc_mode;
9152 if (!COMPARISON_P (op))
9155 cc_mode = GET_MODE (XEXP (op, 0));
9156 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
9159 validate_condition_mode (code, cc_mode);
9164 /* Return 1 if OP is a comparison operation that is valid for a branch
9165 insn and which is true if the corresponding bit in the CC register
9169 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
9173 if (! branch_comparison_operator (op, mode))
9176 code = GET_CODE (op);
9177 return (code == EQ || code == LT || code == GT
9178 || code == LTU || code == GTU
9179 || code == UNORDERED);
9182 /* Return 1 if OP is a comparison operation that is valid for an scc
9183 insn: it must be a positive comparison. */
9186 scc_comparison_operator (rtx op, enum machine_mode mode)
9188 return branch_positive_comparison_operator (op, mode);
9192 trap_comparison_operator (rtx op, enum machine_mode mode)
9194 if (mode != VOIDmode && mode != GET_MODE (op))
9196 return COMPARISON_P (op);
9200 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9202 enum rtx_code code = GET_CODE (op);
9203 return (code == AND || code == IOR || code == XOR);
9207 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9209 enum rtx_code code = GET_CODE (op);
9210 return (code == IOR || code == XOR);
9214 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9216 enum rtx_code code = GET_CODE (op);
9217 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
9220 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9221 mask required to convert the result of a rotate insn into a shift
9222 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9225 includes_lshift_p (rtx shiftop, rtx andop)
9227 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9229 shift_mask <<= INTVAL (shiftop);
9231 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9234 /* Similar, but for right shift. */
9237 includes_rshift_p (rtx shiftop, rtx andop)
9239 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9241 shift_mask >>= INTVAL (shiftop);
9243 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9246 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9247 to perform a left shift. It must have exactly SHIFTOP least
9248 significant 0's, then one or more 1's, then zero or more 0's. */
9251 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9253 if (GET_CODE (andop) == CONST_INT)
9255 HOST_WIDE_INT c, lsb, shift_mask;
9258 if (c == 0 || c == ~0)
9262 shift_mask <<= INTVAL (shiftop);
9264 /* Find the least significant one bit. */
9267 /* It must coincide with the LSB of the shift mask. */
9268 if (-lsb != shift_mask)
9271 /* Invert to look for the next transition (if any). */
9274 /* Remove the low group of ones (originally low group of zeros). */
9277 /* Again find the lsb, and check we have all 1's above. */
9281 else if (GET_CODE (andop) == CONST_DOUBLE
9282 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9284 HOST_WIDE_INT low, high, lsb;
9285 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9287 low = CONST_DOUBLE_LOW (andop);
9288 if (HOST_BITS_PER_WIDE_INT < 64)
9289 high = CONST_DOUBLE_HIGH (andop);
9291 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9292 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9295 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9297 shift_mask_high = ~0;
9298 if (INTVAL (shiftop) > 32)
9299 shift_mask_high <<= INTVAL (shiftop) - 32;
9303 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9310 return high == -lsb;
9313 shift_mask_low = ~0;
9314 shift_mask_low <<= INTVAL (shiftop);
9318 if (-lsb != shift_mask_low)
9321 if (HOST_BITS_PER_WIDE_INT < 64)
9326 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9329 return high == -lsb;
9333 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9339 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9340 to perform a left shift. It must have SHIFTOP or more least
9341 significant 0's, with the remainder of the word 1's. */
9344 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9346 if (GET_CODE (andop) == CONST_INT)
9348 HOST_WIDE_INT c, lsb, shift_mask;
9351 shift_mask <<= INTVAL (shiftop);
9354 /* Find the least significant one bit. */
9357 /* It must be covered by the shift mask.
9358 This test also rejects c == 0. */
9359 if ((lsb & shift_mask) == 0)
9362 /* Check we have all 1's above the transition, and reject all 1's. */
9363 return c == -lsb && lsb != 1;
9365 else if (GET_CODE (andop) == CONST_DOUBLE
9366 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9368 HOST_WIDE_INT low, lsb, shift_mask_low;
9370 low = CONST_DOUBLE_LOW (andop);
9372 if (HOST_BITS_PER_WIDE_INT < 64)
9374 HOST_WIDE_INT high, shift_mask_high;
9376 high = CONST_DOUBLE_HIGH (andop);
9380 shift_mask_high = ~0;
9381 if (INTVAL (shiftop) > 32)
9382 shift_mask_high <<= INTVAL (shiftop) - 32;
9386 if ((lsb & shift_mask_high) == 0)
9389 return high == -lsb;
9395 shift_mask_low = ~0;
9396 shift_mask_low <<= INTVAL (shiftop);
9400 if ((lsb & shift_mask_low) == 0)
9403 return low == -lsb && lsb != 1;
9409 /* Return 1 if operands will generate a valid arguments to rlwimi
9410 instruction for insert with right shift in 64-bit mode. The mask may
9411 not start on the first bit or stop on the last bit because wrap-around
9412 effects of instruction do not correspond to semantics of RTL insn. */
9415 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9417 if (INTVAL (startop) < 64
9418 && INTVAL (startop) > 32
9419 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9420 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9421 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9422 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9423 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9429 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9430 for lfq and stfq insns iff the registers are hard registers. */
9433 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9435 /* We might have been passed a SUBREG. */
9436 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9439 /* We might have been passed non floating point registers. */
9440 if (!FP_REGNO_P (REGNO (reg1))
9441 || !FP_REGNO_P (REGNO (reg2)))
9444 return (REGNO (reg1) == REGNO (reg2) - 1);
9447 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9448 addr1 and addr2 must be in consecutive memory locations
9449 (addr2 == addr1 + 8). */
9452 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9458 /* The mems cannot be volatile. */
9459 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9462 addr1 = XEXP (mem1, 0);
9463 addr2 = XEXP (mem2, 0);
9465 /* Extract an offset (if used) from the first addr. */
9466 if (GET_CODE (addr1) == PLUS)
9468 /* If not a REG, return zero. */
9469 if (GET_CODE (XEXP (addr1, 0)) != REG)
9473 reg1 = REGNO (XEXP (addr1, 0));
9474 /* The offset must be constant! */
9475 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9477 offset1 = INTVAL (XEXP (addr1, 1));
9480 else if (GET_CODE (addr1) != REG)
9484 reg1 = REGNO (addr1);
9485 /* This was a simple (mem (reg)) expression. Offset is 0. */
9489 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9490 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9491 register as addr1. */
9492 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9494 if (GET_CODE (addr2) != PLUS)
9497 if (GET_CODE (XEXP (addr2, 0)) != REG
9498 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9501 if (reg1 != REGNO (XEXP (addr2, 0)))
9504 /* The offset for the second addr must be 8 more than the first addr. */
9505 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9508 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9513 /* Return the register class of a scratch register needed to copy IN into
9514 or out of a register in CLASS in MODE. If it can be done directly,
9515 NO_REGS is returned. */
9518 secondary_reload_class (enum reg_class class,
9519 enum machine_mode mode ATTRIBUTE_UNUSED,
9524 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9526 && MACHOPIC_INDIRECT
9530 /* We cannot copy a symbolic operand directly into anything
9531 other than BASE_REGS for TARGET_ELF. So indicate that a
9532 register from BASE_REGS is needed as an intermediate
9535 On Darwin, pic addresses require a load from memory, which
9536 needs a base register. */
9537 if (class != BASE_REGS
9538 && (GET_CODE (in) == SYMBOL_REF
9539 || GET_CODE (in) == HIGH
9540 || GET_CODE (in) == LABEL_REF
9541 || GET_CODE (in) == CONST))
9545 if (GET_CODE (in) == REG)
9548 if (regno >= FIRST_PSEUDO_REGISTER)
9550 regno = true_regnum (in);
9551 if (regno >= FIRST_PSEUDO_REGISTER)
9555 else if (GET_CODE (in) == SUBREG)
9557 regno = true_regnum (in);
9558 if (regno >= FIRST_PSEUDO_REGISTER)
9564 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9566 if (class == GENERAL_REGS || class == BASE_REGS
9567 || (regno >= 0 && INT_REGNO_P (regno)))
9570 /* Constants, memory, and FP registers can go into FP registers. */
9571 if ((regno == -1 || FP_REGNO_P (regno))
9572 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9575 /* Memory, and AltiVec registers can go into AltiVec registers. */
9576 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9577 && class == ALTIVEC_REGS)
9580 /* We can copy among the CR registers. */
9581 if ((class == CR_REGS || class == CR0_REGS)
9582 && regno >= 0 && CR_REGNO_P (regno))
9585 /* Otherwise, we need GENERAL_REGS. */
9586 return GENERAL_REGS;
9589 /* Given a comparison operation, return the bit number in CCR to test. We
9590 know this is a valid comparison.
9592 SCC_P is 1 if this is for an scc. That means that %D will have been
9593 used instead of %C, so the bits will be in different places.
9595 Return -1 if OP isn't a valid comparison for some reason. */
9598 ccr_bit (rtx op, int scc_p)
9600 enum rtx_code code = GET_CODE (op);
9601 enum machine_mode cc_mode;
9606 if (!COMPARISON_P (op))
9611 if (GET_CODE (reg) != REG
9612 || ! CR_REGNO_P (REGNO (reg)))
9615 cc_mode = GET_MODE (reg);
9616 cc_regnum = REGNO (reg);
9617 base_bit = 4 * (cc_regnum - CR0_REGNO);
9619 validate_condition_mode (code, cc_mode);
9621 /* When generating a sCOND operation, only positive conditions are
9623 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9624 && code != GTU && code != LTU)
9630 return scc_p ? base_bit + 3 : base_bit + 2;
9632 return base_bit + 2;
9633 case GT: case GTU: case UNLE:
9634 return base_bit + 1;
9635 case LT: case LTU: case UNGE:
9637 case ORDERED: case UNORDERED:
9638 return base_bit + 3;
9641 /* If scc, we will have done a cror to put the bit in the
9642 unordered position. So test that bit. For integer, this is ! LT
9643 unless this is an scc insn. */
9644 return scc_p ? base_bit + 3 : base_bit;
9647 return scc_p ? base_bit + 3 : base_bit + 1;
9654 /* Return the GOT register. */
9657 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9659 /* The second flow pass currently (June 1999) can't update
9660 regs_ever_live without disturbing other parts of the compiler, so
9661 update it here to make the prolog/epilogue code happy. */
9662 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9663 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9665 current_function_uses_pic_offset_table = 1;
9667 return pic_offset_table_rtx;
9670 /* Function to init struct machine_function.
9671 This will be called, via a pointer variable,
9672 from push_function_context. */
9674 static struct machine_function *
9675 rs6000_init_machine_status (void)
9677 return ggc_alloc_cleared (sizeof (machine_function));
9680 /* These macros test for integers and extract the low-order bits. */
9682 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9683 && GET_MODE (X) == VOIDmode)
9685 #define INT_LOWPART(X) \
9686 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9692 unsigned long val = INT_LOWPART (op);
9694 /* If the high bit is zero, the value is the first 1 bit we find
9696 if ((val & 0x80000000) == 0)
9698 if ((val & 0xffffffff) == 0)
9702 while (((val <<= 1) & 0x80000000) == 0)
9707 /* If the high bit is set and the low bit is not, or the mask is all
9708 1's, the value is zero. */
9709 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9712 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9715 while (((val >>= 1) & 1) != 0)
9725 unsigned long val = INT_LOWPART (op);
9727 /* If the low bit is zero, the value is the first 1 bit we find from
9731 if ((val & 0xffffffff) == 0)
9735 while (((val >>= 1) & 1) == 0)
9741 /* If the low bit is set and the high bit is not, or the mask is all
9742 1's, the value is 31. */
9743 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9746 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9749 while (((val <<= 1) & 0x80000000) != 0)
9755 /* Locate some local-dynamic symbol still in use by this function
9756 so that we can print its name in some tls_ld pattern. */
9759 rs6000_get_some_local_dynamic_name (void)
9763 if (cfun->machine->some_ld_name)
9764 return cfun->machine->some_ld_name;
9766 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9768 && for_each_rtx (&PATTERN (insn),
9769 rs6000_get_some_local_dynamic_name_1, 0))
9770 return cfun->machine->some_ld_name;
9775 /* Helper function for rs6000_get_some_local_dynamic_name. */
9778 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9782 if (GET_CODE (x) == SYMBOL_REF)
9784 const char *str = XSTR (x, 0);
9785 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9787 cfun->machine->some_ld_name = str;
9795 /* Write out a function code label. */
9798 rs6000_output_function_entry (FILE *file, const char *fname)
9800 if (fname[0] != '.')
9802 switch (DEFAULT_ABI)
9811 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
9820 RS6000_OUTPUT_BASENAME (file, fname);
9822 assemble_name (file, fname);
9825 /* Print an operand. Recognize special options, documented below. */
9828 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9829 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9831 #define SMALL_DATA_RELOC "sda21"
9832 #define SMALL_DATA_REG 0
9836 print_operand (FILE *file, rtx x, int code)
9840 unsigned HOST_WIDE_INT uval;
9845 /* Write out an instruction after the call which may be replaced
9846 with glue code by the loader. This depends on the AIX version. */
9847 asm_fprintf (file, RS6000_CALL_GLUE);
9850 /* %a is output_address. */
9853 /* If X is a constant integer whose low-order 5 bits are zero,
9854 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9855 in the AIX assembler where "sri" with a zero shift count
9856 writes a trash instruction. */
9857 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9864 /* If constant, low-order 16 bits of constant, unsigned.
9865 Otherwise, write normally. */
9867 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9869 print_operand (file, x, 0);
9873 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9874 for 64-bit mask direction. */
9875 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9878 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9882 /* X is a CR register. Print the number of the GT bit of the CR. */
9883 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9884 output_operand_lossage ("invalid %%E value");
9886 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9890 /* Like 'J' but get to the EQ bit. */
9891 if (GET_CODE (x) != REG)
9894 /* Bit 1 is EQ bit. */
9895 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9897 /* If we want bit 31, write a shift count of zero, not 32. */
9898 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9902 /* X is a CR register. Print the number of the EQ bit of the CR */
9903 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9904 output_operand_lossage ("invalid %%E value");
9906 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9910 /* X is a CR register. Print the shift count needed to move it
9911 to the high-order four bits. */
9912 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9913 output_operand_lossage ("invalid %%f value");
9915 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9919 /* Similar, but print the count for the rotate in the opposite
9921 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9922 output_operand_lossage ("invalid %%F value");
9924 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9928 /* X is a constant integer. If it is negative, print "m",
9929 otherwise print "z". This is to make an aze or ame insn. */
9930 if (GET_CODE (x) != CONST_INT)
9931 output_operand_lossage ("invalid %%G value");
9932 else if (INTVAL (x) >= 0)
9939 /* If constant, output low-order five bits. Otherwise, write
9942 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9944 print_operand (file, x, 0);
9948 /* If constant, output low-order six bits. Otherwise, write
9951 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9953 print_operand (file, x, 0);
9957 /* Print `i' if this is a constant, else nothing. */
9963 /* Write the bit number in CCR for jump. */
9966 output_operand_lossage ("invalid %%j code");
9968 fprintf (file, "%d", i);
9972 /* Similar, but add one for shift count in rlinm for scc and pass
9973 scc flag to `ccr_bit'. */
9976 output_operand_lossage ("invalid %%J code");
9978 /* If we want bit 31, write a shift count of zero, not 32. */
9979 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9983 /* X must be a constant. Write the 1's complement of the
9986 output_operand_lossage ("invalid %%k value");
9988 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9992 /* X must be a symbolic constant on ELF. Write an
9993 expression suitable for an 'addi' that adds in the low 16
9995 if (GET_CODE (x) != CONST)
9997 print_operand_address (file, x);
10002 if (GET_CODE (XEXP (x, 0)) != PLUS
10003 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10004 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10005 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10006 output_operand_lossage ("invalid %%K value");
10007 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10008 fputs ("@l", file);
10009 /* For GNU as, there must be a non-alphanumeric character
10010 between 'l' and the number. The '-' is added by
10011 print_operand() already. */
10012 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10014 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10018 /* %l is output_asm_label. */
10021 /* Write second word of DImode or DFmode reference. Works on register
10022 or non-indexed memory only. */
10023 if (GET_CODE (x) == REG)
10024 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
10025 else if (GET_CODE (x) == MEM)
10027 /* Handle possible auto-increment. Since it is pre-increment and
10028 we have already done it, we can just use an offset of word. */
10029 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10030 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10031 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10034 output_address (XEXP (adjust_address_nv (x, SImode,
10038 if (small_data_operand (x, GET_MODE (x)))
10039 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10040 reg_names[SMALL_DATA_REG]);
10045 /* MB value for a mask operand. */
10046 if (! mask_operand (x, SImode))
10047 output_operand_lossage ("invalid %%m value");
10049 fprintf (file, "%d", extract_MB (x));
10053 /* ME value for a mask operand. */
10054 if (! mask_operand (x, SImode))
10055 output_operand_lossage ("invalid %%M value");
10057 fprintf (file, "%d", extract_ME (x));
10060 /* %n outputs the negative of its operand. */
10063 /* Write the number of elements in the vector times 4. */
10064 if (GET_CODE (x) != PARALLEL)
10065 output_operand_lossage ("invalid %%N value");
10067 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10071 /* Similar, but subtract 1 first. */
10072 if (GET_CODE (x) != PARALLEL)
10073 output_operand_lossage ("invalid %%O value");
10075 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10079 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10081 || INT_LOWPART (x) < 0
10082 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10083 output_operand_lossage ("invalid %%p value");
10085 fprintf (file, "%d", i);
10089 /* The operand must be an indirect memory reference. The result
10090 is the register name. */
10091 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10092 || REGNO (XEXP (x, 0)) >= 32)
10093 output_operand_lossage ("invalid %%P value");
10095 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
10099 /* This outputs the logical code corresponding to a boolean
10100 expression. The expression may have one or both operands
10101 negated (if one, only the first one). For condition register
10102 logical operations, it will also treat the negated
10103 CR codes as NOTs, but not handle NOTs of them. */
10105 const char *const *t = 0;
10107 enum rtx_code code = GET_CODE (x);
10108 static const char * const tbl[3][3] = {
10109 { "and", "andc", "nor" },
10110 { "or", "orc", "nand" },
10111 { "xor", "eqv", "xor" } };
10115 else if (code == IOR)
10117 else if (code == XOR)
10120 output_operand_lossage ("invalid %%q value");
10122 if (GET_CODE (XEXP (x, 0)) != NOT)
10126 if (GET_CODE (XEXP (x, 1)) == NOT)
10144 /* X is a CR register. Print the mask for `mtcrf'. */
10145 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10146 output_operand_lossage ("invalid %%R value");
10148 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10152 /* Low 5 bits of 32 - value */
10154 output_operand_lossage ("invalid %%s value");
10156 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10160 /* PowerPC64 mask position. All 0's is excluded.
10161 CONST_INT 32-bit mask is considered sign-extended so any
10162 transition must occur within the CONST_INT, not on the boundary. */
10163 if (! mask64_operand (x, DImode))
10164 output_operand_lossage ("invalid %%S value");
10166 uval = INT_LOWPART (x);
10168 if (uval & 1) /* Clear Left */
10170 #if HOST_BITS_PER_WIDE_INT > 64
10171 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10175 else /* Clear Right */
10178 #if HOST_BITS_PER_WIDE_INT > 64
10179 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10187 fprintf (file, "%d", i);
10191 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10192 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
10195 /* Bit 3 is OV bit. */
10196 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10198 /* If we want bit 31, write a shift count of zero, not 32. */
10199 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10203 /* Print the symbolic name of a branch target register. */
10204 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10205 && REGNO (x) != COUNT_REGISTER_REGNUM))
10206 output_operand_lossage ("invalid %%T value");
10207 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10208 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10210 fputs ("ctr", file);
10214 /* High-order 16 bits of constant for use in unsigned operand. */
10216 output_operand_lossage ("invalid %%u value");
10218 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10219 (INT_LOWPART (x) >> 16) & 0xffff);
10223 /* High-order 16 bits of constant for use in signed operand. */
10225 output_operand_lossage ("invalid %%v value");
10227 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10228 (INT_LOWPART (x) >> 16) & 0xffff);
10232 /* Print `u' if this has an auto-increment or auto-decrement. */
10233 if (GET_CODE (x) == MEM
10234 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10235 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10240 /* Print the trap code for this operand. */
10241 switch (GET_CODE (x))
10244 fputs ("eq", file); /* 4 */
10247 fputs ("ne", file); /* 24 */
10250 fputs ("lt", file); /* 16 */
10253 fputs ("le", file); /* 20 */
10256 fputs ("gt", file); /* 8 */
10259 fputs ("ge", file); /* 12 */
10262 fputs ("llt", file); /* 2 */
10265 fputs ("lle", file); /* 6 */
10268 fputs ("lgt", file); /* 1 */
10271 fputs ("lge", file); /* 5 */
10279 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10282 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10283 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10285 print_operand (file, x, 0);
10289 /* MB value for a PowerPC64 rldic operand. */
10290 val = (GET_CODE (x) == CONST_INT
10291 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10296 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10297 if ((val <<= 1) < 0)
10300 #if HOST_BITS_PER_WIDE_INT == 32
10301 if (GET_CODE (x) == CONST_INT && i >= 0)
10302 i += 32; /* zero-extend high-part was all 0's */
10303 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10305 val = CONST_DOUBLE_LOW (x);
10312 for ( ; i < 64; i++)
10313 if ((val <<= 1) < 0)
10318 fprintf (file, "%d", i + 1);
10322 if (GET_CODE (x) == MEM
10323 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10328 /* Like 'L', for third word of TImode */
10329 if (GET_CODE (x) == REG)
10330 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
10331 else if (GET_CODE (x) == MEM)
10333 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10334 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10335 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10337 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10338 if (small_data_operand (x, GET_MODE (x)))
10339 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10340 reg_names[SMALL_DATA_REG]);
10345 /* X is a SYMBOL_REF. Write out the name preceded by a
10346 period and without any trailing data in brackets. Used for function
10347 names. If we are configured for System V (or the embedded ABI) on
10348 the PowerPC, do not emit the period, since those systems do not use
10349 TOCs and the like. */
10350 if (GET_CODE (x) != SYMBOL_REF)
10353 /* Mark the decl as referenced so that cgraph will output the function. */
10354 if (SYMBOL_REF_DECL (x))
10355 mark_decl_referenced (SYMBOL_REF_DECL (x));
10357 /* For macho, check to see if we need a stub. */
10360 const char *name = XSTR (x, 0);
10362 if (MACHOPIC_INDIRECT
10363 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10364 name = machopic_indirection_name (x, /*stub_p=*/true);
10366 assemble_name (file, name);
10368 else if (!DOT_SYMBOLS)
10369 assemble_name (file, XSTR (x, 0));
10371 rs6000_output_function_entry (file, XSTR (x, 0));
10375 /* Like 'L', for last word of TImode. */
10376 if (GET_CODE (x) == REG)
10377 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
10378 else if (GET_CODE (x) == MEM)
10380 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10381 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10382 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10384 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10385 if (small_data_operand (x, GET_MODE (x)))
10386 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10387 reg_names[SMALL_DATA_REG]);
10391 /* Print AltiVec or SPE memory operand. */
10396 if (GET_CODE (x) != MEM)
10403 /* Handle [reg]. */
10404 if (GET_CODE (tmp) == REG)
10406 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10409 /* Handle [reg+UIMM]. */
10410 else if (GET_CODE (tmp) == PLUS &&
10411 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10415 if (GET_CODE (XEXP (tmp, 0)) != REG)
10418 x = INTVAL (XEXP (tmp, 1));
10419 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10423 /* Fall through. Must be [reg+reg]. */
10425 if (GET_CODE (tmp) == REG)
10426 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10427 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
10429 if (REGNO (XEXP (tmp, 0)) == 0)
10430 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10431 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10433 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10434 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10442 if (GET_CODE (x) == REG)
10443 fprintf (file, "%s", reg_names[REGNO (x)]);
10444 else if (GET_CODE (x) == MEM)
10446 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10447 know the width from the mode. */
10448 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10449 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10450 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10451 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10452 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10453 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10455 output_address (XEXP (x, 0));
10458 output_addr_const (file, x);
10462 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10466 output_operand_lossage ("invalid %%xn code");
10470 /* Print the address of an operand. */
10473 print_operand_address (FILE *file, rtx x)
10475 if (GET_CODE (x) == REG)
10476 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10477 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10478 || GET_CODE (x) == LABEL_REF)
10480 output_addr_const (file, x);
10481 if (small_data_operand (x, GET_MODE (x)))
10482 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10483 reg_names[SMALL_DATA_REG]);
10484 else if (TARGET_TOC)
10487 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10489 if (REGNO (XEXP (x, 0)) == 0)
10490 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10491 reg_names[ REGNO (XEXP (x, 0)) ]);
10493 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10494 reg_names[ REGNO (XEXP (x, 1)) ]);
10496 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10497 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10498 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10500 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10501 && CONSTANT_P (XEXP (x, 1)))
10503 output_addr_const (file, XEXP (x, 1));
10504 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10508 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10509 && CONSTANT_P (XEXP (x, 1)))
10511 fprintf (file, "lo16(");
10512 output_addr_const (file, XEXP (x, 1));
10513 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10516 else if (legitimate_constant_pool_address_p (x))
10518 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10520 rtx contains_minus = XEXP (x, 1);
10524 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10525 turn it into (sym) for output_addr_const. */
10526 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10527 contains_minus = XEXP (contains_minus, 0);
10529 minus = XEXP (contains_minus, 0);
10530 symref = XEXP (minus, 0);
10531 XEXP (contains_minus, 0) = symref;
10536 name = XSTR (symref, 0);
10537 newname = alloca (strlen (name) + sizeof ("@toc"));
10538 strcpy (newname, name);
10539 strcat (newname, "@toc");
10540 XSTR (symref, 0) = newname;
10542 output_addr_const (file, XEXP (x, 1));
10544 XSTR (symref, 0) = name;
10545 XEXP (contains_minus, 0) = minus;
10548 output_addr_const (file, XEXP (x, 1));
10550 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10556 /* Target hook for assembling integer objects. The PowerPC version has
10557 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10558 is defined. It also needs to handle DI-mode objects on 64-bit
10562 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10564 #ifdef RELOCATABLE_NEEDS_FIXUP
10565 /* Special handling for SI values. */
10566 if (size == 4 && aligned_p)
10568 extern int in_toc_section (void);
10569 static int recurse = 0;
10571 /* For -mrelocatable, we mark all addresses that need to be fixed up
10572 in the .fixup section. */
10573 if (TARGET_RELOCATABLE
10574 && !in_toc_section ()
10575 && !in_text_section ()
10576 && !in_unlikely_text_section ()
10578 && GET_CODE (x) != CONST_INT
10579 && GET_CODE (x) != CONST_DOUBLE
10585 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10587 ASM_OUTPUT_LABEL (asm_out_file, buf);
10588 fprintf (asm_out_file, "\t.long\t(");
10589 output_addr_const (asm_out_file, x);
10590 fprintf (asm_out_file, ")@fixup\n");
10591 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10592 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10593 fprintf (asm_out_file, "\t.long\t");
10594 assemble_name (asm_out_file, buf);
10595 fprintf (asm_out_file, "\n\t.previous\n");
10599 /* Remove initial .'s to turn a -mcall-aixdesc function
10600 address into the address of the descriptor, not the function
10602 else if (GET_CODE (x) == SYMBOL_REF
10603 && XSTR (x, 0)[0] == '.'
10604 && DEFAULT_ABI == ABI_AIX)
10606 const char *name = XSTR (x, 0);
10607 while (*name == '.')
10610 fprintf (asm_out_file, "\t.long\t%s\n", name);
10614 #endif /* RELOCATABLE_NEEDS_FIXUP */
10615 return default_assemble_integer (x, size, aligned_p);
10618 #ifdef HAVE_GAS_HIDDEN
10619 /* Emit an assembler directive to set symbol visibility for DECL to
10620 VISIBILITY_TYPE. */
10623 rs6000_assemble_visibility (tree decl, int vis)
10625 /* Functions need to have their entry point symbol visibility set as
10626 well as their descriptor symbol visibility. */
10627 if (DEFAULT_ABI == ABI_AIX
10629 && TREE_CODE (decl) == FUNCTION_DECL)
10631 static const char * const visibility_types[] = {
10632 NULL, "internal", "hidden", "protected"
10635 const char *name, *type;
10637 name = ((* targetm.strip_name_encoding)
10638 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10639 type = visibility_types[vis];
10641 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10642 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10645 default_assemble_visibility (decl, vis);
10650 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10652 /* Reversal of FP compares takes care -- an ordered compare
10653 becomes an unordered compare and vice versa. */
10654 if (mode == CCFPmode
10655 && (!flag_finite_math_only
10656 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10657 || code == UNEQ || code == LTGT))
10658 return reverse_condition_maybe_unordered (code);
10660 return reverse_condition (code);
10663 /* Generate a compare for CODE. Return a brand-new rtx that
10664 represents the result of the compare. */
10667 rs6000_generate_compare (enum rtx_code code)
10669 enum machine_mode comp_mode;
10670 rtx compare_result;
10672 if (rs6000_compare_fp_p)
10673 comp_mode = CCFPmode;
10674 else if (code == GTU || code == LTU
10675 || code == GEU || code == LEU)
10676 comp_mode = CCUNSmode;
10678 comp_mode = CCmode;
10680 /* First, the compare. */
10681 compare_result = gen_reg_rtx (comp_mode);
10683 /* SPE FP compare instructions on the GPRs. Yuck! */
10684 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10685 && rs6000_compare_fp_p)
10687 rtx cmp, or1, or2, or_result, compare_result2;
10689 /* Note: The E500 comparison instructions set the GT bit (x +
10690 1), on success. This explains the mess. */
10694 case EQ: case UNEQ: case NE: case LTGT:
10695 cmp = flag_finite_math_only
10696 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10697 rs6000_compare_op1)
10698 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10699 rs6000_compare_op1);
10701 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10702 cmp = flag_finite_math_only
10703 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10704 rs6000_compare_op1)
10705 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10706 rs6000_compare_op1);
10708 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10709 cmp = flag_finite_math_only
10710 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10711 rs6000_compare_op1)
10712 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10713 rs6000_compare_op1);
10719 /* Synthesize LE and GE from LT/GT || EQ. */
10720 if (code == LE || code == GE || code == LEU || code == GEU)
10726 case LE: code = LT; break;
10727 case GE: code = GT; break;
10728 case LEU: code = LT; break;
10729 case GEU: code = GT; break;
10733 or1 = gen_reg_rtx (SImode);
10734 or2 = gen_reg_rtx (SImode);
10735 or_result = gen_reg_rtx (CCEQmode);
10736 compare_result2 = gen_reg_rtx (CCFPmode);
10739 cmp = flag_finite_math_only
10740 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10741 rs6000_compare_op1)
10742 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10743 rs6000_compare_op1);
10746 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10747 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10749 /* OR them together. */
10750 cmp = gen_rtx_SET (VOIDmode, or_result,
10751 gen_rtx_COMPARE (CCEQmode,
10752 gen_rtx_IOR (SImode, or1, or2),
10754 compare_result = or_result;
10759 if (code == NE || code == LTGT)
10768 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10769 gen_rtx_COMPARE (comp_mode,
10770 rs6000_compare_op0,
10771 rs6000_compare_op1)));
10773 /* Some kinds of FP comparisons need an OR operation;
10774 under flag_finite_math_only we don't bother. */
10775 if (rs6000_compare_fp_p
10776 && ! flag_finite_math_only
10777 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10778 && (code == LE || code == GE
10779 || code == UNEQ || code == LTGT
10780 || code == UNGT || code == UNLT))
10782 enum rtx_code or1, or2;
10783 rtx or1_rtx, or2_rtx, compare2_rtx;
10784 rtx or_result = gen_reg_rtx (CCEQmode);
10788 case LE: or1 = LT; or2 = EQ; break;
10789 case GE: or1 = GT; or2 = EQ; break;
10790 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10791 case LTGT: or1 = LT; or2 = GT; break;
10792 case UNGT: or1 = UNORDERED; or2 = GT; break;
10793 case UNLT: or1 = UNORDERED; or2 = LT; break;
10796 validate_condition_mode (or1, comp_mode);
10797 validate_condition_mode (or2, comp_mode);
10798 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10799 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10800 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10801 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10803 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10805 compare_result = or_result;
10809 validate_condition_mode (code, GET_MODE (compare_result));
10811 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10815 /* Emit the RTL for an sCOND pattern. */
10818 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10821 enum machine_mode op_mode;
10822 enum rtx_code cond_code;
10824 condition_rtx = rs6000_generate_compare (code);
10825 cond_code = GET_CODE (condition_rtx);
10827 if (TARGET_E500 && rs6000_compare_fp_p
10828 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10832 PUT_MODE (condition_rtx, SImode);
10833 t = XEXP (condition_rtx, 0);
10835 if (cond_code != NE && cond_code != EQ)
10838 if (cond_code == NE)
10839 emit_insn (gen_e500_flip_eq_bit (t, t));
10841 emit_insn (gen_move_from_CR_eq_bit (result, t));
10845 if (cond_code == NE
10846 || cond_code == GE || cond_code == LE
10847 || cond_code == GEU || cond_code == LEU
10848 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10850 rtx not_result = gen_reg_rtx (CCEQmode);
10851 rtx not_op, rev_cond_rtx;
10852 enum machine_mode cc_mode;
10854 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10856 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10857 SImode, XEXP (condition_rtx, 0), const0_rtx);
10858 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10859 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10860 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10863 op_mode = GET_MODE (rs6000_compare_op0);
10864 if (op_mode == VOIDmode)
10865 op_mode = GET_MODE (rs6000_compare_op1);
10867 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10869 PUT_MODE (condition_rtx, DImode);
10870 convert_move (result, condition_rtx, 0);
10874 PUT_MODE (condition_rtx, SImode);
10875 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10879 /* Emit a branch of kind CODE to location LOC. */
10882 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10884 rtx condition_rtx, loc_ref;
10886 condition_rtx = rs6000_generate_compare (code);
10887 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10888 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10889 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10890 loc_ref, pc_rtx)));
10893 /* Return the string to output a conditional branch to LABEL, which is
10894 the operand number of the label, or -1 if the branch is really a
10895 conditional return.
10897 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10898 condition code register and its mode specifies what kind of
10899 comparison we made.
10901 REVERSED is nonzero if we should reverse the sense of the comparison.
10903 INSN is the insn. */
10906 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10908 static char string[64];
10909 enum rtx_code code = GET_CODE (op);
10910 rtx cc_reg = XEXP (op, 0);
10911 enum machine_mode mode = GET_MODE (cc_reg);
10912 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10913 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10914 int really_reversed = reversed ^ need_longbranch;
10920 validate_condition_mode (code, mode);
10922 /* Work out which way this really branches. We could use
10923 reverse_condition_maybe_unordered here always but this
10924 makes the resulting assembler clearer. */
10925 if (really_reversed)
10927 /* Reversal of FP compares takes care -- an ordered compare
10928 becomes an unordered compare and vice versa. */
10929 if (mode == CCFPmode)
10930 code = reverse_condition_maybe_unordered (code);
10932 code = reverse_condition (code);
10935 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10937 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10940 /* Opposite of GT. */
10942 else if (code == NE)
10950 /* Not all of these are actually distinct opcodes, but
10951 we distinguish them for clarity of the resulting assembler. */
10952 case NE: case LTGT:
10953 ccode = "ne"; break;
10954 case EQ: case UNEQ:
10955 ccode = "eq"; break;
10957 ccode = "ge"; break;
10958 case GT: case GTU: case UNGT:
10959 ccode = "gt"; break;
10961 ccode = "le"; break;
10962 case LT: case LTU: case UNLT:
10963 ccode = "lt"; break;
10964 case UNORDERED: ccode = "un"; break;
10965 case ORDERED: ccode = "nu"; break;
10966 case UNGE: ccode = "nl"; break;
10967 case UNLE: ccode = "ng"; break;
10972 /* Maybe we have a guess as to how likely the branch is.
10973 The old mnemonics don't have a way to specify this information. */
10975 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10976 if (note != NULL_RTX)
10978 /* PROB is the difference from 50%. */
10979 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10981 /* Only hint for highly probable/improbable branches on newer
10982 cpus as static prediction overrides processor dynamic
10983 prediction. For older cpus we may as well always hint, but
10984 assume not taken for branches that are very close to 50% as a
10985 mispredicted taken branch is more expensive than a
10986 mispredicted not-taken branch. */
10987 if (rs6000_always_hint
10988 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10990 if (abs (prob) > REG_BR_PROB_BASE / 20
10991 && ((prob > 0) ^ need_longbranch))
10999 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11001 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11003 /* We need to escape any '%' characters in the reg_names string.
11004 Assume they'd only be the first character.... */
11005 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11007 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11011 /* If the branch distance was too far, we may have to use an
11012 unconditional branch to go the distance. */
11013 if (need_longbranch)
11014 s += sprintf (s, ",$+8\n\tb %s", label);
11016 s += sprintf (s, ",%s", label);
11022 /* Return the string to flip the EQ bit on a CR. */
11024 output_e500_flip_eq_bit (rtx dst, rtx src)
11026 static char string[64];
11029 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
11030 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
11034 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
11035 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
11037 sprintf (string, "crnot %d,%d", a, b);
11041 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11042 operands of the last comparison is nonzero/true, FALSE_COND if it
11043 is zero/false. Return 0 if the hardware has no such operation. */
11046 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11048 enum rtx_code code = GET_CODE (op);
11049 rtx op0 = rs6000_compare_op0;
11050 rtx op1 = rs6000_compare_op1;
11051 REAL_VALUE_TYPE c1;
11052 enum machine_mode compare_mode = GET_MODE (op0);
11053 enum machine_mode result_mode = GET_MODE (dest);
11056 /* These modes should always match. */
11057 if (GET_MODE (op1) != compare_mode
11058 /* In the isel case however, we can use a compare immediate, so
11059 op1 may be a small constant. */
11060 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11062 if (GET_MODE (true_cond) != result_mode)
11064 if (GET_MODE (false_cond) != result_mode)
11067 /* First, work out if the hardware can do this at all, or
11068 if it's too slow.... */
11069 if (! rs6000_compare_fp_p)
11072 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11075 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11076 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11079 /* Eliminate half of the comparisons by switching operands, this
11080 makes the remaining code simpler. */
11081 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11082 || code == LTGT || code == LT || code == UNLE)
11084 code = reverse_condition_maybe_unordered (code);
11086 true_cond = false_cond;
11090 /* UNEQ and LTGT take four instructions for a comparison with zero,
11091 it'll probably be faster to use a branch here too. */
11092 if (code == UNEQ && HONOR_NANS (compare_mode))
11095 if (GET_CODE (op1) == CONST_DOUBLE)
11096 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11098 /* We're going to try to implement comparisons by performing
11099 a subtract, then comparing against zero. Unfortunately,
11100 Inf - Inf is NaN which is not zero, and so if we don't
11101 know that the operand is finite and the comparison
11102 would treat EQ different to UNORDERED, we can't do it. */
11103 if (HONOR_INFINITIES (compare_mode)
11104 && code != GT && code != UNGE
11105 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11106 /* Constructs of the form (a OP b ? a : b) are safe. */
11107 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11108 || (! rtx_equal_p (op0, true_cond)
11109 && ! rtx_equal_p (op1, true_cond))))
11111 /* At this point we know we can use fsel. */
11113 /* Reduce the comparison to a comparison against zero. */
11114 temp = gen_reg_rtx (compare_mode);
11115 emit_insn (gen_rtx_SET (VOIDmode, temp,
11116 gen_rtx_MINUS (compare_mode, op0, op1)));
11118 op1 = CONST0_RTX (compare_mode);
11120 /* If we don't care about NaNs we can reduce some of the comparisons
11121 down to faster ones. */
11122 if (! HONOR_NANS (compare_mode))
11128 true_cond = false_cond;
11141 /* Now, reduce everything down to a GE. */
11148 temp = gen_reg_rtx (compare_mode);
11149 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11154 temp = gen_reg_rtx (compare_mode);
11155 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11160 temp = gen_reg_rtx (compare_mode);
11161 emit_insn (gen_rtx_SET (VOIDmode, temp,
11162 gen_rtx_NEG (compare_mode,
11163 gen_rtx_ABS (compare_mode, op0))));
11168 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11169 temp = gen_reg_rtx (result_mode);
11170 emit_insn (gen_rtx_SET (VOIDmode, temp,
11171 gen_rtx_IF_THEN_ELSE (result_mode,
11172 gen_rtx_GE (VOIDmode,
11174 true_cond, false_cond)));
11175 false_cond = true_cond;
11178 temp = gen_reg_rtx (compare_mode);
11179 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11184 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11185 temp = gen_reg_rtx (result_mode);
11186 emit_insn (gen_rtx_SET (VOIDmode, temp,
11187 gen_rtx_IF_THEN_ELSE (result_mode,
11188 gen_rtx_GE (VOIDmode,
11190 true_cond, false_cond)));
11191 true_cond = false_cond;
11194 temp = gen_reg_rtx (compare_mode);
11195 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11203 emit_insn (gen_rtx_SET (VOIDmode, dest,
11204 gen_rtx_IF_THEN_ELSE (result_mode,
11205 gen_rtx_GE (VOIDmode,
11207 true_cond, false_cond)));
11211 /* Same as above, but for ints (isel). */
11214 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11216 rtx condition_rtx, cr;
11218 /* All isel implementations thus far are 32-bits. */
11219 if (GET_MODE (rs6000_compare_op0) != SImode)
11222 /* We still have to do the compare, because isel doesn't do a
11223 compare, it just looks at the CRx bits set by a previous compare
11225 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11226 cr = XEXP (condition_rtx, 0);
11228 if (GET_MODE (cr) == CCmode)
11229 emit_insn (gen_isel_signed (dest, condition_rtx,
11230 true_cond, false_cond, cr));
11232 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11233 true_cond, false_cond, cr));
11239 output_isel (rtx *operands)
11241 enum rtx_code code;
11243 code = GET_CODE (operands[1]);
11244 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11246 PUT_CODE (operands[1], reverse_condition (code));
11247 return "isel %0,%3,%2,%j1";
11250 return "isel %0,%2,%3,%j1";
11254 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11256 enum machine_mode mode = GET_MODE (op0);
11260 if (code == SMAX || code == SMIN)
11265 if (code == SMAX || code == UMAX)
11266 target = emit_conditional_move (dest, c, op0, op1, mode,
11267 op0, op1, mode, 0);
11269 target = emit_conditional_move (dest, c, op0, op1, mode,
11270 op1, op0, mode, 0);
11271 if (target == NULL_RTX)
11273 if (target != dest)
11274 emit_move_insn (dest, target);
11277 /* Emit instructions to move SRC to DST. Called by splitters for
11278 multi-register moves. It will emit at most one instruction for
11279 each register that is accessed; that is, it won't emit li/lis pairs
11280 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11284 rs6000_split_multireg_move (rtx dst, rtx src)
11286 /* The register number of the first register being moved. */
11288 /* The mode that is to be moved. */
11289 enum machine_mode mode;
11290 /* The mode that the move is being done in, and its size. */
11291 enum machine_mode reg_mode;
11293 /* The number of registers that will be moved. */
11296 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11297 mode = GET_MODE (dst);
11298 nregs = HARD_REGNO_NREGS (reg, mode);
11299 if (FP_REGNO_P (reg))
11301 else if (ALTIVEC_REGNO_P (reg))
11302 reg_mode = V16QImode;
11304 reg_mode = word_mode;
11305 reg_mode_size = GET_MODE_SIZE (reg_mode);
11307 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
11310 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11312 /* Move register range backwards, if we might have destructive
11315 for (i = nregs - 1; i >= 0; i--)
11316 emit_insn (gen_rtx_SET (VOIDmode,
11317 simplify_gen_subreg (reg_mode, dst, mode,
11318 i * reg_mode_size),
11319 simplify_gen_subreg (reg_mode, src, mode,
11320 i * reg_mode_size)));
11326 bool used_update = false;
11328 if (MEM_P (src) && INT_REGNO_P (reg))
11332 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11333 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11336 breg = XEXP (XEXP (src, 0), 0);
11337 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
11338 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11339 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
11340 emit_insn (TARGET_32BIT
11341 ? gen_addsi3 (breg, breg, delta_rtx)
11342 : gen_adddi3 (breg, breg, delta_rtx));
11343 src = gen_rtx_MEM (mode, breg);
11345 else if (! offsettable_memref_p (src))
11347 rtx newsrc, basereg;
11348 basereg = gen_rtx_REG (Pmode, reg);
11349 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11350 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11351 MEM_COPY_ATTRIBUTES (newsrc, src);
11355 /* We have now address involving an base register only.
11356 If we use one of the registers to address memory,
11357 we have change that register last. */
11359 breg = (GET_CODE (XEXP (src, 0)) == PLUS
11360 ? XEXP (XEXP (src, 0), 0)
11366 if (REGNO (breg) >= REGNO (dst)
11367 && REGNO (breg) < REGNO (dst) + nregs)
11368 j = REGNO (breg) - REGNO (dst);
11371 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11375 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11376 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11379 breg = XEXP (XEXP (dst, 0), 0);
11380 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
11381 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11382 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
11384 /* We have to update the breg before doing the store.
11385 Use store with update, if available. */
11389 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11390 emit_insn (TARGET_32BIT
11391 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
11392 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
11393 used_update = true;
11396 emit_insn (TARGET_32BIT
11397 ? gen_addsi3 (breg, breg, delta_rtx)
11398 : gen_adddi3 (breg, breg, delta_rtx));
11399 dst = gen_rtx_MEM (mode, breg);
11401 else if (! offsettable_memref_p (dst))
11405 for (i = 0; i < nregs; i++)
11407 /* Calculate index to next subword. */
11412 /* If compiler already emited move of first word by
11413 store with update, no need to do anything. */
11414 if (j == 0 && used_update)
11417 emit_insn (gen_rtx_SET (VOIDmode,
11418 simplify_gen_subreg (reg_mode, dst, mode,
11419 j * reg_mode_size),
11420 simplify_gen_subreg (reg_mode, src, mode,
11421 j * reg_mode_size)));
11427 /* This page contains routines that are used to determine what the
11428 function prologue and epilogue code will do and write them out. */
11430 /* Return the first fixed-point register that is required to be
11431 saved. 32 if none. */
11434 first_reg_to_save (void)
11438 /* Find lowest numbered live register. */
11439 for (first_reg = 13; first_reg <= 31; first_reg++)
11440 if (regs_ever_live[first_reg]
11441 && (! call_used_regs[first_reg]
11442 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11443 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11444 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11445 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11450 && current_function_uses_pic_offset_table
11451 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11452 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11458 /* Similar, for FP regs. */
11461 first_fp_reg_to_save (void)
11465 /* Find lowest numbered live register. */
11466 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11467 if (regs_ever_live[first_reg])
11473 /* Similar, for AltiVec regs. */
11476 first_altivec_reg_to_save (void)
11480 /* Stack frame remains as is unless we are in AltiVec ABI. */
11481 if (! TARGET_ALTIVEC_ABI)
11482 return LAST_ALTIVEC_REGNO + 1;
11484 /* Find lowest numbered live register. */
11485 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11486 if (regs_ever_live[i])
11492 /* Return a 32-bit mask of the AltiVec registers we need to set in
11493 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11494 the 32-bit word is 0. */
11496 static unsigned int
11497 compute_vrsave_mask (void)
11499 unsigned int i, mask = 0;
11501 /* First, find out if we use _any_ altivec registers. */
11502 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11503 if (regs_ever_live[i])
11504 mask |= ALTIVEC_REG_BIT (i);
11509 /* Next, remove the argument registers from the set. These must
11510 be in the VRSAVE mask set by the caller, so we don't need to add
11511 them in again. More importantly, the mask we compute here is
11512 used to generate CLOBBERs in the set_vrsave insn, and we do not
11513 wish the argument registers to die. */
11514 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11515 mask &= ~ALTIVEC_REG_BIT (i);
11517 /* Similarly, remove the return value from the set. */
11520 diddle_return_value (is_altivec_return_reg, &yes);
11522 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11528 /* For a very restricted set of circumstances, we can cut down the
11529 size of prologs/epilogs by calling our own save/restore-the-world
11533 compute_save_world_info(rs6000_stack_t *info_ptr)
11535 info_ptr->world_save_p =
11536 (DEFAULT_ABI == ABI_DARWIN)
11537 && ! (current_function_calls_setjmp && flag_exceptions)
11538 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
11539 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
11540 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
11541 && info_ptr->cr_save_p;
11543 /* This will not work in conjunction with sibcalls. Make sure there
11544 are none. (This check is expensive, but seldom executed.) */
11545 if ( info_ptr->world_save_p )
11548 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
11549 if ( GET_CODE (insn) == CALL_INSN
11550 && SIBLING_CALL_P (insn))
11552 info_ptr->world_save_p = 0;
11557 if (info_ptr->world_save_p)
11559 /* Even if we're not touching VRsave, make sure there's room on the
11560 stack for it, if it looks like we're calling SAVE_WORLD, which
11561 will attempt to save it. */
11562 info_ptr->vrsave_size = 4;
11564 /* "Save" the VRsave register too if we're saving the world. */
11565 if (info_ptr->vrsave_mask == 0)
11566 info_ptr->vrsave_mask = compute_vrsave_mask ();
11568 /* Because the Darwin register save/restore routines only handle
11569 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistancy
11570 check and abort if there's something worng. */
11571 if (info_ptr->first_fp_reg_save < FIRST_SAVED_FP_REGNO
11572 || info_ptr->first_altivec_reg_save < FIRST_SAVED_ALTIVEC_REGNO)
11580 is_altivec_return_reg (rtx reg, void *xyes)
11582 bool *yes = (bool *) xyes;
11583 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11588 /* Calculate the stack information for the current function. This is
11589 complicated by having two separate calling sequences, the AIX calling
11590 sequence and the V.4 calling sequence.
11592 AIX (and Darwin/Mac OS X) stack frames look like:
11594 SP----> +---------------------------------------+
11595 | back chain to caller | 0 0
11596 +---------------------------------------+
11597 | saved CR | 4 8 (8-11)
11598 +---------------------------------------+
11600 +---------------------------------------+
11601 | reserved for compilers | 12 24
11602 +---------------------------------------+
11603 | reserved for binders | 16 32
11604 +---------------------------------------+
11605 | saved TOC pointer | 20 40
11606 +---------------------------------------+
11607 | Parameter save area (P) | 24 48
11608 +---------------------------------------+
11609 | Alloca space (A) | 24+P etc.
11610 +---------------------------------------+
11611 | Local variable space (L) | 24+P+A
11612 +---------------------------------------+
11613 | Float/int conversion temporary (X) | 24+P+A+L
11614 +---------------------------------------+
11615 | Save area for AltiVec registers (W) | 24+P+A+L+X
11616 +---------------------------------------+
11617 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11618 +---------------------------------------+
11619 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11620 +---------------------------------------+
11621 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11622 +---------------------------------------+
11623 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11624 +---------------------------------------+
11625 old SP->| back chain to caller's caller |
11626 +---------------------------------------+
11628 The required alignment for AIX configurations is two words (i.e., 8
11632 V.4 stack frames look like:
11634 SP----> +---------------------------------------+
11635 | back chain to caller | 0
11636 +---------------------------------------+
11637 | caller's saved LR | 4
11638 +---------------------------------------+
11639 | Parameter save area (P) | 8
11640 +---------------------------------------+
11641 | Alloca space (A) | 8+P
11642 +---------------------------------------+
11643 | Varargs save area (V) | 8+P+A
11644 +---------------------------------------+
11645 | Local variable space (L) | 8+P+A+V
11646 +---------------------------------------+
11647 | Float/int conversion temporary (X) | 8+P+A+V+L
11648 +---------------------------------------+
11649 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11650 +---------------------------------------+
11651 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11652 +---------------------------------------+
11653 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11654 +---------------------------------------+
11655 | SPE: area for 64-bit GP registers |
11656 +---------------------------------------+
11657 | SPE alignment padding |
11658 +---------------------------------------+
11659 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11660 +---------------------------------------+
11661 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11662 +---------------------------------------+
11663 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11664 +---------------------------------------+
11665 old SP->| back chain to caller's caller |
11666 +---------------------------------------+
11668 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11669 given. (But note below and in sysv4.h that we require only 8 and
11670 may round up the size of our stack frame anyways. The historical
11671 reason is early versions of powerpc-linux which didn't properly
11672 align the stack at program startup. A happy side-effect is that
11673 -mno-eabi libraries can be used with -meabi programs.)
11675 The EABI configuration defaults to the V.4 layout. However,
11676 the stack alignment requirements may differ. If -mno-eabi is not
11677 given, the required stack alignment is 8 bytes; if -mno-eabi is
11678 given, the required alignment is 16 bytes. (But see V.4 comment
11681 #ifndef ABI_STACK_BOUNDARY
11682 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11685 static rs6000_stack_t *
11686 rs6000_stack_info (void)
11688 static rs6000_stack_t info, zero_info;
11689 rs6000_stack_t *info_ptr = &info;
11690 int reg_size = TARGET_32BIT ? 4 : 8;
11692 HOST_WIDE_INT non_fixed_size;
11694 /* Zero all fields portably. */
11699 /* Cache value so we don't rescan instruction chain over and over. */
11700 if (cfun->machine->insn_chain_scanned_p == 0)
11702 cfun->machine->insn_chain_scanned_p = 1;
11703 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
11707 /* Select which calling sequence. */
11708 info_ptr->abi = DEFAULT_ABI;
11710 /* Calculate which registers need to be saved & save area size. */
11711 info_ptr->first_gp_reg_save = first_reg_to_save ();
11712 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11713 even if it currently looks like we won't. */
11714 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11715 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11716 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11717 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11718 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11720 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11722 /* For the SPE, we have an additional upper 32-bits on each GPR.
11723 Ideally we should save the entire 64-bits only when the upper
11724 half is used in SIMD instructions. Since we only record
11725 registers live (not the size they are used in), this proves
11726 difficult because we'd have to traverse the instruction chain at
11727 the right time, taking reload into account. This is a real pain,
11728 so we opt to save the GPRs in 64-bits always if but one register
11729 gets used in 64-bits. Otherwise, all the registers in the frame
11730 get saved in 32-bits.
11732 So... since when we save all GPRs (except the SP) in 64-bits, the
11733 traditional GP save area will be empty. */
11734 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11735 info_ptr->gp_size = 0;
11737 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11738 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11740 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11741 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11742 - info_ptr->first_altivec_reg_save);
11744 /* Does this function call anything? */
11745 info_ptr->calls_p = (! current_function_is_leaf
11746 || cfun->machine->ra_needs_full_frame);
11748 /* Determine if we need to save the link register. */
11749 if (rs6000_ra_ever_killed ()
11750 || (DEFAULT_ABI == ABI_AIX
11751 && current_function_profile
11752 && !TARGET_PROFILE_KERNEL)
11753 #ifdef TARGET_RELOCATABLE
11754 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11756 || (info_ptr->first_fp_reg_save != 64
11757 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11758 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11759 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11760 || (DEFAULT_ABI == ABI_DARWIN
11762 && current_function_uses_pic_offset_table)
11763 || info_ptr->calls_p)
11765 info_ptr->lr_save_p = 1;
11766 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11769 /* Determine if we need to save the condition code registers. */
11770 if (regs_ever_live[CR2_REGNO]
11771 || regs_ever_live[CR3_REGNO]
11772 || regs_ever_live[CR4_REGNO])
11774 info_ptr->cr_save_p = 1;
11775 if (DEFAULT_ABI == ABI_V4)
11776 info_ptr->cr_size = reg_size;
11779 /* If the current function calls __builtin_eh_return, then we need
11780 to allocate stack space for registers that will hold data for
11781 the exception handler. */
11782 if (current_function_calls_eh_return)
11785 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11788 /* SPE saves EH registers in 64-bits. */
11789 ehrd_size = i * (TARGET_SPE_ABI
11790 && info_ptr->spe_64bit_regs_used != 0
11791 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11796 /* Determine various sizes. */
11797 info_ptr->reg_size = reg_size;
11798 info_ptr->fixed_size = RS6000_SAVE_AREA;
11799 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11800 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11801 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11802 TARGET_ALTIVEC ? 16 : 8);
11804 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11805 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11807 info_ptr->spe_gp_size = 0;
11809 if (TARGET_ALTIVEC_ABI)
11810 info_ptr->vrsave_mask = compute_vrsave_mask ();
11812 info_ptr->vrsave_mask = 0;
11814 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11815 info_ptr->vrsave_size = 4;
11817 info_ptr->vrsave_size = 0;
11819 compute_save_world_info (info_ptr);
11821 /* Calculate the offsets. */
11822 switch (DEFAULT_ABI)
11830 info_ptr->fp_save_offset = - info_ptr->fp_size;
11831 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11833 if (TARGET_ALTIVEC_ABI)
11835 info_ptr->vrsave_save_offset
11836 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11838 /* Align stack so vector save area is on a quadword boundary. */
11839 if (info_ptr->altivec_size != 0)
11840 info_ptr->altivec_padding_size
11841 = 16 - (-info_ptr->vrsave_save_offset % 16);
11843 info_ptr->altivec_padding_size = 0;
11845 info_ptr->altivec_save_offset
11846 = info_ptr->vrsave_save_offset
11847 - info_ptr->altivec_padding_size
11848 - info_ptr->altivec_size;
11850 /* Adjust for AltiVec case. */
11851 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11854 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11855 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11856 info_ptr->lr_save_offset = 2*reg_size;
11860 info_ptr->fp_save_offset = - info_ptr->fp_size;
11861 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11862 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11864 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11866 /* Align stack so SPE GPR save area is aligned on a
11867 double-word boundary. */
11868 if (info_ptr->spe_gp_size != 0)
11869 info_ptr->spe_padding_size
11870 = 8 - (-info_ptr->cr_save_offset % 8);
11872 info_ptr->spe_padding_size = 0;
11874 info_ptr->spe_gp_save_offset
11875 = info_ptr->cr_save_offset
11876 - info_ptr->spe_padding_size
11877 - info_ptr->spe_gp_size;
11879 /* Adjust for SPE case. */
11880 info_ptr->toc_save_offset
11881 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11883 else if (TARGET_ALTIVEC_ABI)
11885 info_ptr->vrsave_save_offset
11886 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11888 /* Align stack so vector save area is on a quadword boundary. */
11889 if (info_ptr->altivec_size != 0)
11890 info_ptr->altivec_padding_size
11891 = 16 - (-info_ptr->vrsave_save_offset % 16);
11893 info_ptr->altivec_padding_size = 0;
11895 info_ptr->altivec_save_offset
11896 = info_ptr->vrsave_save_offset
11897 - info_ptr->altivec_padding_size
11898 - info_ptr->altivec_size;
11900 /* Adjust for AltiVec case. */
11901 info_ptr->toc_save_offset
11902 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11905 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11906 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11907 info_ptr->lr_save_offset = reg_size;
11911 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11912 + info_ptr->gp_size
11913 + info_ptr->altivec_size
11914 + info_ptr->altivec_padding_size
11915 + info_ptr->spe_gp_size
11916 + info_ptr->spe_padding_size
11918 + info_ptr->cr_size
11919 + info_ptr->lr_size
11920 + info_ptr->vrsave_size
11921 + info_ptr->toc_size,
11922 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11925 non_fixed_size = (info_ptr->vars_size
11926 + info_ptr->parm_size
11927 + info_ptr->save_size
11928 + info_ptr->varargs_size);
11930 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11931 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11933 /* Determine if we need to allocate any stack frame:
11935 For AIX we need to push the stack if a frame pointer is needed
11936 (because the stack might be dynamically adjusted), if we are
11937 debugging, if we make calls, or if the sum of fp_save, gp_save,
11938 and local variables are more than the space needed to save all
11939 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11940 + 18*8 = 288 (GPR13 reserved).
11942 For V.4 we don't have the stack cushion that AIX uses, but assume
11943 that the debugger can handle stackless frames. */
11945 if (info_ptr->calls_p)
11946 info_ptr->push_p = 1;
11948 else if (DEFAULT_ABI == ABI_V4)
11949 info_ptr->push_p = non_fixed_size != 0;
11951 else if (frame_pointer_needed)
11952 info_ptr->push_p = 1;
11954 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11955 info_ptr->push_p = 1;
11958 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11960 /* Zero offsets if we're not saving those registers. */
11961 if (info_ptr->fp_size == 0)
11962 info_ptr->fp_save_offset = 0;
11964 if (info_ptr->gp_size == 0)
11965 info_ptr->gp_save_offset = 0;
11967 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11968 info_ptr->altivec_save_offset = 0;
11970 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11971 info_ptr->vrsave_save_offset = 0;
11973 if (! TARGET_SPE_ABI
11974 || info_ptr->spe_64bit_regs_used == 0
11975 || info_ptr->spe_gp_size == 0)
11976 info_ptr->spe_gp_save_offset = 0;
11978 if (! info_ptr->lr_save_p)
11979 info_ptr->lr_save_offset = 0;
11981 if (! info_ptr->cr_save_p)
11982 info_ptr->cr_save_offset = 0;
11984 if (! info_ptr->toc_save_p)
11985 info_ptr->toc_save_offset = 0;
11990 /* Return true if the current function uses any GPRs in 64-bit SIMD
11994 spe_func_has_64bit_regs_p (void)
11998 /* Functions that save and restore all the call-saved registers will
11999 need to save/restore the registers in 64-bits. */
12000 if (current_function_calls_eh_return
12001 || current_function_calls_setjmp
12002 || current_function_has_nonlocal_goto)
12005 insns = get_insns ();
12007 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12013 i = PATTERN (insn);
12014 if (GET_CODE (i) == SET
12015 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
12024 debug_stack_info (rs6000_stack_t *info)
12026 const char *abi_string;
12029 info = rs6000_stack_info ();
12031 fprintf (stderr, "\nStack information for function %s:\n",
12032 ((current_function_decl && DECL_NAME (current_function_decl))
12033 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12038 default: abi_string = "Unknown"; break;
12039 case ABI_NONE: abi_string = "NONE"; break;
12040 case ABI_AIX: abi_string = "AIX"; break;
12041 case ABI_DARWIN: abi_string = "Darwin"; break;
12042 case ABI_V4: abi_string = "V.4"; break;
12045 fprintf (stderr, "\tABI = %5s\n", abi_string);
12047 if (TARGET_ALTIVEC_ABI)
12048 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12050 if (TARGET_SPE_ABI)
12051 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12053 if (info->first_gp_reg_save != 32)
12054 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12056 if (info->first_fp_reg_save != 64)
12057 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
12059 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12060 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12061 info->first_altivec_reg_save);
12063 if (info->lr_save_p)
12064 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
12066 if (info->cr_save_p)
12067 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12069 if (info->toc_save_p)
12070 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
12072 if (info->vrsave_mask)
12073 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
12076 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
12079 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
12081 if (info->gp_save_offset)
12082 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
12084 if (info->fp_save_offset)
12085 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
12087 if (info->altivec_save_offset)
12088 fprintf (stderr, "\taltivec_save_offset = %5d\n",
12089 info->altivec_save_offset);
12091 if (info->spe_gp_save_offset)
12092 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
12093 info->spe_gp_save_offset);
12095 if (info->vrsave_save_offset)
12096 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
12097 info->vrsave_save_offset);
12099 if (info->lr_save_offset)
12100 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
12102 if (info->cr_save_offset)
12103 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
12105 if (info->toc_save_offset)
12106 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
12108 if (info->varargs_save_offset)
12109 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
12111 if (info->total_size)
12112 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12115 if (info->varargs_size)
12116 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
12118 if (info->vars_size)
12119 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12122 if (info->parm_size)
12123 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
12125 if (info->fixed_size)
12126 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
12129 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
12131 if (info->spe_gp_size)
12132 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
12135 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
12137 if (info->altivec_size)
12138 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
12140 if (info->vrsave_size)
12141 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
12143 if (info->altivec_padding_size)
12144 fprintf (stderr, "\taltivec_padding_size= %5d\n",
12145 info->altivec_padding_size);
12147 if (info->spe_padding_size)
12148 fprintf (stderr, "\tspe_padding_size = %5d\n",
12149 info->spe_padding_size);
12152 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
12155 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
12157 if (info->toc_size)
12158 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
12160 if (info->save_size)
12161 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
12163 if (info->reg_size != 4)
12164 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
12166 fprintf (stderr, "\n");
12170 rs6000_return_addr (int count, rtx frame)
12172 /* Currently we don't optimize very well between prolog and body
12173 code and for PIC code the code can be actually quite bad, so
12174 don't try to be too clever here. */
12175 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
12177 cfun->machine->ra_needs_full_frame = 1;
12184 plus_constant (copy_to_reg
12185 (gen_rtx_MEM (Pmode,
12186 memory_address (Pmode, frame))),
12187 RETURN_ADDRESS_OFFSET)));
12190 cfun->machine->ra_need_lr = 1;
12191 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
12194 /* Say whether a function is a candidate for sibcall handling or not.
12195 We do not allow indirect calls to be optimized into sibling calls.
12196 Also, we can't do it if there are any vector parameters; there's
12197 nowhere to put the VRsave code so it works; note that functions with
12198 vector parameters are required to have a prototype, so the argument
12199 type info must be available here. (The tail recursion case can work
12200 with vector parameters, but there's no way to distinguish here.) */
12202 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
12207 if (TARGET_ALTIVEC_VRSAVE)
12209 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
12210 type; type = TREE_CHAIN (type))
12212 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
12216 if (DEFAULT_ABI == ABI_DARWIN
12217 || (*targetm.binds_local_p) (decl))
12219 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12221 if (!lookup_attribute ("longcall", attr_list)
12222 || lookup_attribute ("shortcall", attr_list))
12230 rs6000_ra_ever_killed (void)
12236 if (current_function_is_thunk)
12239 /* regs_ever_live has LR marked as used if any sibcalls are present,
12240 but this should not force saving and restoring in the
12241 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12242 clobbers LR, so that is inappropriate. */
12244 /* Also, the prologue can generate a store into LR that
12245 doesn't really count, like this:
12248 bcl to set PIC register
12252 When we're called from the epilogue, we need to avoid counting
12253 this as a store. */
12255 push_topmost_sequence ();
12256 top = get_insns ();
12257 pop_topmost_sequence ();
12258 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12260 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12264 if (FIND_REG_INC_NOTE (insn, reg))
12266 else if (GET_CODE (insn) == CALL_INSN
12267 && !SIBLING_CALL_P (insn))
12269 else if (set_of (reg, insn) != NULL_RTX
12270 && !prologue_epilogue_contains (insn))
12277 /* Add a REG_MAYBE_DEAD note to the insn. */
12279 rs6000_maybe_dead (rtx insn)
12281 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12286 /* Emit instructions needed to load the TOC register.
12287 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12288 a constant pool; or for SVR4 -fpic. */
12291 rs6000_emit_load_toc_table (int fromprolog)
12294 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12296 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12298 rtx temp = (fromprolog
12299 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12300 : gen_reg_rtx (Pmode));
12301 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
12303 rs6000_maybe_dead (insn);
12304 insn = emit_move_insn (dest, temp);
12306 rs6000_maybe_dead (insn);
12308 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12311 rtx tempLR = (fromprolog
12312 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12313 : gen_reg_rtx (Pmode));
12314 rtx temp0 = (fromprolog
12315 ? gen_rtx_REG (Pmode, 0)
12316 : gen_reg_rtx (Pmode));
12319 /* possibly create the toc section */
12320 if (! toc_initialized)
12323 function_section (current_function_decl);
12330 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12331 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12333 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12334 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12336 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12338 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12339 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12346 static int reload_toc_labelno = 0;
12348 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12350 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
12351 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12353 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
12354 emit_move_insn (dest, tempLR);
12355 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12357 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12359 rs6000_maybe_dead (insn);
12361 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12363 /* This is for AIX code running in non-PIC ELF32. */
12366 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12367 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12369 insn = emit_insn (gen_elf_high (dest, realsym));
12371 rs6000_maybe_dead (insn);
12372 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12374 rs6000_maybe_dead (insn);
12376 else if (DEFAULT_ABI == ABI_AIX)
12379 insn = emit_insn (gen_load_toc_aix_si (dest));
12381 insn = emit_insn (gen_load_toc_aix_di (dest));
12383 rs6000_maybe_dead (insn);
12389 /* Emit instructions to restore the link register after determining where
12390 its value has been stored. */
12393 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12395 rs6000_stack_t *info = rs6000_stack_info ();
12398 operands[0] = source;
12399 operands[1] = scratch;
12401 if (info->lr_save_p)
12403 rtx frame_rtx = stack_pointer_rtx;
12404 HOST_WIDE_INT sp_offset = 0;
12407 if (frame_pointer_needed
12408 || current_function_calls_alloca
12409 || info->total_size > 32767)
12411 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12412 frame_rtx = operands[1];
12414 else if (info->push_p)
12415 sp_offset = info->total_size;
12417 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12418 tmp = gen_rtx_MEM (Pmode, tmp);
12419 emit_move_insn (tmp, operands[0]);
12422 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12425 static GTY(()) int set = -1;
12428 get_TOC_alias_set (void)
12431 set = new_alias_set ();
12435 /* This returns nonzero if the current function uses the TOC. This is
12436 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12437 is generated by the ABI_V4 load_toc_* patterns. */
12444 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12447 rtx pat = PATTERN (insn);
12450 if (GET_CODE (pat) == PARALLEL)
12451 for (i = 0; i < XVECLEN (pat, 0); i++)
12453 rtx sub = XVECEXP (pat, 0, i);
12454 if (GET_CODE (sub) == USE)
12456 sub = XEXP (sub, 0);
12457 if (GET_CODE (sub) == UNSPEC
12458 && XINT (sub, 1) == UNSPEC_TOC)
12468 create_TOC_reference (rtx symbol)
12470 return gen_rtx_PLUS (Pmode,
12471 gen_rtx_REG (Pmode, TOC_REGISTER),
12472 gen_rtx_CONST (Pmode,
12473 gen_rtx_MINUS (Pmode, symbol,
12474 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12477 /* If _Unwind_* has been called from within the same module,
12478 toc register is not guaranteed to be saved to 40(1) on function
12479 entry. Save it there in that case. */
12482 rs6000_aix_emit_builtin_unwind_init (void)
12485 rtx stack_top = gen_reg_rtx (Pmode);
12486 rtx opcode_addr = gen_reg_rtx (Pmode);
12487 rtx opcode = gen_reg_rtx (SImode);
12488 rtx tocompare = gen_reg_rtx (SImode);
12489 rtx no_toc_save_needed = gen_label_rtx ();
12491 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12492 emit_move_insn (stack_top, mem);
12494 mem = gen_rtx_MEM (Pmode,
12495 gen_rtx_PLUS (Pmode, stack_top,
12496 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12497 emit_move_insn (opcode_addr, mem);
12498 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12499 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12500 : 0xE8410028, SImode));
12502 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12503 SImode, NULL_RTX, NULL_RTX,
12504 no_toc_save_needed);
12506 mem = gen_rtx_MEM (Pmode,
12507 gen_rtx_PLUS (Pmode, stack_top,
12508 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12509 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12510 emit_label (no_toc_save_needed);
12513 /* This ties together stack memory (MEM with an alias set of
12514 rs6000_sr_alias_set) and the change to the stack pointer. */
12517 rs6000_emit_stack_tie (void)
12519 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12521 set_mem_alias_set (mem, rs6000_sr_alias_set);
12522 emit_insn (gen_stack_tie (mem));
12525 /* Emit the correct code for allocating stack space, as insns.
12526 If COPY_R12, make sure a copy of the old frame is left in r12.
12527 The generated code may use hard register 0 as a temporary. */
12530 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12533 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12534 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12535 rtx todec = GEN_INT (-size);
12537 if (current_function_limit_stack)
12539 if (REG_P (stack_limit_rtx)
12540 && REGNO (stack_limit_rtx) > 1
12541 && REGNO (stack_limit_rtx) <= 31)
12543 emit_insn (TARGET_32BIT
12544 ? gen_addsi3 (tmp_reg,
12547 : gen_adddi3 (tmp_reg,
12551 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12554 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12556 && DEFAULT_ABI == ABI_V4)
12558 rtx toload = gen_rtx_CONST (VOIDmode,
12559 gen_rtx_PLUS (Pmode,
12563 emit_insn (gen_elf_high (tmp_reg, toload));
12564 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12565 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12569 warning ("stack limit expression is not supported");
12572 if (copy_r12 || ! TARGET_UPDATE)
12573 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12579 /* Need a note here so that try_split doesn't get confused. */
12580 if (get_last_insn() == NULL_RTX)
12581 emit_note (NOTE_INSN_DELETED);
12582 insn = emit_move_insn (tmp_reg, todec);
12583 try_split (PATTERN (insn), insn, 0);
12587 insn = emit_insn (TARGET_32BIT
12588 ? gen_movsi_update (stack_reg, stack_reg,
12590 : gen_movdi_update (stack_reg, stack_reg,
12591 todec, stack_reg));
12595 insn = emit_insn (TARGET_32BIT
12596 ? gen_addsi3 (stack_reg, stack_reg, todec)
12597 : gen_adddi3 (stack_reg, stack_reg, todec));
12598 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12599 gen_rtx_REG (Pmode, 12));
12602 RTX_FRAME_RELATED_P (insn) = 1;
12604 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12605 gen_rtx_SET (VOIDmode, stack_reg,
12606 gen_rtx_PLUS (Pmode, stack_reg,
12611 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12612 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12613 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12614 deduce these equivalences by itself so it wasn't necessary to hold
12615 its hand so much. */
12618 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12619 rtx reg2, rtx rreg)
12623 /* copy_rtx will not make unique copies of registers, so we need to
12624 ensure we don't have unwanted sharing here. */
12626 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12629 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12631 real = copy_rtx (PATTERN (insn));
12633 if (reg2 != NULL_RTX)
12634 real = replace_rtx (real, reg2, rreg);
12636 real = replace_rtx (real, reg,
12637 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12638 STACK_POINTER_REGNUM),
12641 /* We expect that 'real' is either a SET or a PARALLEL containing
12642 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12643 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12645 if (GET_CODE (real) == SET)
12649 temp = simplify_rtx (SET_SRC (set));
12651 SET_SRC (set) = temp;
12652 temp = simplify_rtx (SET_DEST (set));
12654 SET_DEST (set) = temp;
12655 if (GET_CODE (SET_DEST (set)) == MEM)
12657 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12659 XEXP (SET_DEST (set), 0) = temp;
12662 else if (GET_CODE (real) == PARALLEL)
12665 for (i = 0; i < XVECLEN (real, 0); i++)
12666 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
12668 rtx set = XVECEXP (real, 0, i);
12670 temp = simplify_rtx (SET_SRC (set));
12672 SET_SRC (set) = temp;
12673 temp = simplify_rtx (SET_DEST (set));
12675 SET_DEST (set) = temp;
12676 if (GET_CODE (SET_DEST (set)) == MEM)
12678 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12680 XEXP (SET_DEST (set), 0) = temp;
12682 RTX_FRAME_RELATED_P (set) = 1;
12689 real = spe_synthesize_frame_save (real);
12691 RTX_FRAME_RELATED_P (insn) = 1;
12692 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12697 /* Given an SPE frame note, return a PARALLEL of SETs with the
12698 original note, plus a synthetic register save. */
12701 spe_synthesize_frame_save (rtx real)
12703 rtx synth, offset, reg, real2;
12705 if (GET_CODE (real) != SET
12706 || GET_MODE (SET_SRC (real)) != V2SImode)
12709 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12710 frame related note. The parallel contains a set of the register
12711 being saved, and another set to a synthetic register (n+1200).
12712 This is so we can differentiate between 64-bit and 32-bit saves.
12713 Words cannot describe this nastiness. */
12715 if (GET_CODE (SET_DEST (real)) != MEM
12716 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
12717 || GET_CODE (SET_SRC (real)) != REG)
12721 (set (mem (plus (reg x) (const y)))
12724 (set (mem (plus (reg x) (const y+4)))
12728 real2 = copy_rtx (real);
12729 PUT_MODE (SET_DEST (real2), SImode);
12730 reg = SET_SRC (real2);
12731 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12732 synth = copy_rtx (real2);
12734 if (BYTES_BIG_ENDIAN)
12736 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12737 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12740 reg = SET_SRC (synth);
12742 synth = replace_rtx (synth, reg,
12743 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12745 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12746 synth = replace_rtx (synth, offset,
12747 GEN_INT (INTVAL (offset)
12748 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12750 RTX_FRAME_RELATED_P (synth) = 1;
12751 RTX_FRAME_RELATED_P (real2) = 1;
12752 if (BYTES_BIG_ENDIAN)
12753 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12755 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12760 /* Returns an insn that has a vrsave set operation with the
12761 appropriate CLOBBERs. */
12764 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12767 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12768 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12771 = gen_rtx_SET (VOIDmode,
12773 gen_rtx_UNSPEC_VOLATILE (SImode,
12774 gen_rtvec (2, reg, vrsave),
12779 /* We need to clobber the registers in the mask so the scheduler
12780 does not move sets to VRSAVE before sets of AltiVec registers.
12782 However, if the function receives nonlocal gotos, reload will set
12783 all call saved registers live. We will end up with:
12785 (set (reg 999) (mem))
12786 (parallel [ (set (reg vrsave) (unspec blah))
12787 (clobber (reg 999))])
12789 The clobber will cause the store into reg 999 to be dead, and
12790 flow will attempt to delete an epilogue insn. In this case, we
12791 need an unspec use/set of the register. */
12793 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12794 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12796 if (!epiloguep || call_used_regs [i])
12797 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12798 gen_rtx_REG (V4SImode, i));
12801 rtx reg = gen_rtx_REG (V4SImode, i);
12804 = gen_rtx_SET (VOIDmode,
12806 gen_rtx_UNSPEC (V4SImode,
12807 gen_rtvec (1, reg), 27));
12811 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12813 for (i = 0; i < nclobs; ++i)
12814 XVECEXP (insn, 0, i) = clobs[i];
12819 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12820 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12823 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12824 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12826 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12827 rtx replacea, replaceb;
12829 int_rtx = GEN_INT (offset);
12831 /* Some cases that need register indexed addressing. */
12832 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12834 && SPE_VECTOR_MODE (mode)
12835 && !SPE_CONST_OFFSET_OK (offset)))
12837 /* Whomever calls us must make sure r11 is available in the
12838 flow path of instructions in the prologue. */
12839 offset_rtx = gen_rtx_REG (Pmode, 11);
12840 emit_move_insn (offset_rtx, int_rtx);
12842 replacea = offset_rtx;
12843 replaceb = int_rtx;
12847 offset_rtx = int_rtx;
12848 replacea = NULL_RTX;
12849 replaceb = NULL_RTX;
12852 reg = gen_rtx_REG (mode, regno);
12853 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12854 mem = gen_rtx_MEM (mode, addr);
12855 set_mem_alias_set (mem, rs6000_sr_alias_set);
12857 insn = emit_move_insn (mem, reg);
12859 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12862 /* Emit an offset memory reference suitable for a frame store, while
12863 converting to a valid addressing mode. */
12866 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12868 rtx int_rtx, offset_rtx;
12870 int_rtx = GEN_INT (offset);
12872 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12874 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12875 emit_move_insn (offset_rtx, int_rtx);
12878 offset_rtx = int_rtx;
12880 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12883 #ifndef TARGET_FIX_AND_CONTINUE
12884 #define TARGET_FIX_AND_CONTINUE 0
12887 /* Emit function prologue as insns. */
12890 rs6000_emit_prologue (void)
12892 rs6000_stack_t *info = rs6000_stack_info ();
12893 enum machine_mode reg_mode = Pmode;
12894 int reg_size = TARGET_32BIT ? 4 : 8;
12895 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12896 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12897 rtx frame_reg_rtx = sp_reg_rtx;
12898 rtx cr_save_rtx = NULL_RTX;
12900 int saving_FPRs_inline;
12901 int using_store_multiple;
12902 HOST_WIDE_INT sp_offset = 0;
12904 if (TARGET_FIX_AND_CONTINUE)
12906 /* gdb on darwin arranges to forward a function from the old
12907 address by modifying the first 4 instructions of the function
12908 to branch to the overriding function. This is necessary to
12909 permit function pointers that point to the old function to
12910 actually forward to the new function. */
12911 emit_insn (gen_nop ());
12912 emit_insn (gen_nop ());
12913 emit_insn (gen_nop ());
12914 emit_insn (gen_nop ());
12917 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12919 reg_mode = V2SImode;
12923 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12924 && (!TARGET_SPE_ABI
12925 || info->spe_64bit_regs_used == 0)
12926 && info->first_gp_reg_save < 31);
12927 saving_FPRs_inline = (info->first_fp_reg_save == 64
12928 || FP_SAVE_INLINE (info->first_fp_reg_save)
12929 || current_function_calls_eh_return
12930 || cfun->machine->ra_need_lr);
12932 /* For V.4, update stack before we do any saving and set back pointer. */
12934 && (DEFAULT_ABI == ABI_V4
12935 || current_function_calls_eh_return))
12937 if (info->total_size < 32767)
12938 sp_offset = info->total_size;
12940 frame_reg_rtx = frame_ptr_rtx;
12941 rs6000_emit_allocate_stack (info->total_size,
12942 (frame_reg_rtx != sp_reg_rtx
12943 && (info->cr_save_p
12945 || info->first_fp_reg_save < 64
12946 || info->first_gp_reg_save < 32
12948 if (frame_reg_rtx != sp_reg_rtx)
12949 rs6000_emit_stack_tie ();
12952 /* Handle world saves specially here. */
12953 if (info->world_save_p)
12959 /* save_world expects lr in r0. */
12960 if (info->lr_save_p)
12962 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
12963 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12964 RTX_FRAME_RELATED_P (insn) = 1;
12967 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
12968 assumptions about the offsets of various bits of the stack
12969 frame. Abort if things aren't what they should be. */
12970 if (info->gp_save_offset != -220
12971 || info->fp_save_offset != -144
12972 || info->lr_save_offset != 8
12973 || info->cr_save_offset != 4
12975 || !info->lr_save_p
12976 || (current_function_calls_eh_return && info->ehrd_offset != -432)
12977 || (info->vrsave_save_offset != -224
12978 || info->altivec_save_offset != (-224 -16 -192)))
12981 treg = gen_rtx_REG (SImode, 11);
12982 emit_move_insn (treg, GEN_INT (-info->total_size));
12984 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
12985 in R11. It also clobbers R12, so beware! */
12987 /* Preserve CR2 for save_world prologues */
12989 sz += 32 - info->first_gp_reg_save;
12990 sz += 64 - info->first_fp_reg_save;
12991 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
12992 p = rtvec_alloc (sz);
12994 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
12995 gen_rtx_REG (Pmode,
12996 LINK_REGISTER_REGNUM));
12997 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
12998 gen_rtx_SYMBOL_REF (Pmode,
13000 /* We do floats first so that the instruction pattern matches
13002 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13004 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13005 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13006 GEN_INT (info->fp_save_offset
13007 + sp_offset + 8 * i));
13008 rtx mem = gen_rtx_MEM (DFmode, addr);
13009 set_mem_alias_set (mem, rs6000_sr_alias_set);
13011 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13013 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13015 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13016 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13017 GEN_INT (info->altivec_save_offset
13018 + sp_offset + 16 * i));
13019 rtx mem = gen_rtx_MEM (V4SImode, addr);
13020 set_mem_alias_set (mem, rs6000_sr_alias_set);
13022 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13024 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13026 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13027 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13028 GEN_INT (info->gp_save_offset
13029 + sp_offset + reg_size * i));
13030 rtx mem = gen_rtx_MEM (reg_mode, addr);
13031 set_mem_alias_set (mem, rs6000_sr_alias_set);
13033 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13037 /* CR register traditionally saved as CR2. */
13038 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13039 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13040 GEN_INT (info->cr_save_offset
13042 rtx mem = gen_rtx_MEM (reg_mode, addr);
13043 set_mem_alias_set (mem, rs6000_sr_alias_set);
13045 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13047 /* Prevent any attempt to delete the setting of r0 and treg! */
13048 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
13049 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
13050 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
13052 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13053 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13054 NULL_RTX, NULL_RTX);
13056 if (current_function_calls_eh_return)
13061 unsigned int regno = EH_RETURN_DATA_REGNO (i);
13062 if (regno == INVALID_REGNUM)
13064 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13065 info->ehrd_offset + sp_offset
13066 + reg_size * (int) i,
13072 /* Save AltiVec registers if needed. */
13073 if (! info->world_save_p && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13077 /* There should be a non inline version of this, for when we
13078 are saving lots of vector registers. */
13079 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13080 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13082 rtx areg, savereg, mem;
13085 offset = info->altivec_save_offset + sp_offset
13086 + 16 * (i - info->first_altivec_reg_save);
13088 savereg = gen_rtx_REG (V4SImode, i);
13090 areg = gen_rtx_REG (Pmode, 0);
13091 emit_move_insn (areg, GEN_INT (offset));
13093 /* AltiVec addressing mode is [reg+reg]. */
13094 mem = gen_rtx_MEM (V4SImode,
13095 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
13097 set_mem_alias_set (mem, rs6000_sr_alias_set);
13099 insn = emit_move_insn (mem, savereg);
13101 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13102 areg, GEN_INT (offset));
13106 /* VRSAVE is a bit vector representing which AltiVec registers
13107 are used. The OS uses this to determine which vector
13108 registers to save on a context switch. We need to save
13109 VRSAVE on the stack frame, add whatever AltiVec registers we
13110 used in this function, and do the corresponding magic in the
13113 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13114 && ! info->world_save_p && info->vrsave_mask != 0)
13116 rtx reg, mem, vrsave;
13119 /* Get VRSAVE onto a GPR. */
13120 reg = gen_rtx_REG (SImode, 12);
13121 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13123 emit_insn (gen_get_vrsave_internal (reg));
13125 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
13128 offset = info->vrsave_save_offset + sp_offset;
13130 = gen_rtx_MEM (SImode,
13131 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
13132 set_mem_alias_set (mem, rs6000_sr_alias_set);
13133 insn = emit_move_insn (mem, reg);
13135 /* Include the registers in the mask. */
13136 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
13138 insn = emit_insn (generate_set_vrsave (reg, info, 0));
13141 /* If we use the link register, get it into r0. */
13142 if (! info->world_save_p && info->lr_save_p)
13144 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13145 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13146 RTX_FRAME_RELATED_P (insn) = 1;
13149 /* If we need to save CR, put it into r12. */
13150 if (! info->world_save_p && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
13154 cr_save_rtx = gen_rtx_REG (SImode, 12);
13155 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13156 RTX_FRAME_RELATED_P (insn) = 1;
13157 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13158 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13159 But that's OK. All we have to do is specify that _one_ condition
13160 code register is saved in this stack slot. The thrower's epilogue
13161 will then restore all the call-saved registers.
13162 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13163 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
13164 gen_rtx_REG (SImode, CR2_REGNO));
13165 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13170 /* Do any required saving of fpr's. If only one or two to save, do
13171 it ourselves. Otherwise, call function. */
13172 if (! info->world_save_p && saving_FPRs_inline)
13175 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13176 if ((regs_ever_live[info->first_fp_reg_save+i]
13177 && ! call_used_regs[info->first_fp_reg_save+i]))
13178 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
13179 info->first_fp_reg_save + i,
13180 info->fp_save_offset + sp_offset + 8 * i,
13183 else if (! info->world_save_p && info->first_fp_reg_save != 64)
13187 const char *alloc_rname;
13189 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
13191 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
13192 gen_rtx_REG (Pmode,
13193 LINK_REGISTER_REGNUM));
13194 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
13195 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
13196 alloc_rname = ggc_strdup (rname);
13197 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13198 gen_rtx_SYMBOL_REF (Pmode,
13200 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13202 rtx addr, reg, mem;
13203 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13204 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13205 GEN_INT (info->fp_save_offset
13206 + sp_offset + 8*i));
13207 mem = gen_rtx_MEM (DFmode, addr);
13208 set_mem_alias_set (mem, rs6000_sr_alias_set);
13210 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
13212 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13213 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13214 NULL_RTX, NULL_RTX);
13217 /* Save GPRs. This is done as a PARALLEL if we are using
13218 the store-multiple instructions. */
13219 if (! info->world_save_p && using_store_multiple)
13223 p = rtvec_alloc (32 - info->first_gp_reg_save);
13224 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13226 rtx addr, reg, mem;
13227 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13228 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13229 GEN_INT (info->gp_save_offset
13232 mem = gen_rtx_MEM (reg_mode, addr);
13233 set_mem_alias_set (mem, rs6000_sr_alias_set);
13235 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
13237 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13238 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13239 NULL_RTX, NULL_RTX);
13241 else if (! info->world_save_p)
13244 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13245 if ((regs_ever_live[info->first_gp_reg_save+i]
13246 && (! call_used_regs[info->first_gp_reg_save+i]
13247 || (i+info->first_gp_reg_save
13248 == RS6000_PIC_OFFSET_TABLE_REGNUM
13249 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13250 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13251 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13252 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13254 rtx addr, reg, mem;
13255 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13257 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13259 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13262 if (!SPE_CONST_OFFSET_OK (offset))
13264 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13265 emit_move_insn (b, GEN_INT (offset));
13268 b = GEN_INT (offset);
13270 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13271 mem = gen_rtx_MEM (V2SImode, addr);
13272 set_mem_alias_set (mem, rs6000_sr_alias_set);
13273 insn = emit_move_insn (mem, reg);
13275 if (GET_CODE (b) == CONST_INT)
13276 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13277 NULL_RTX, NULL_RTX);
13279 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13280 b, GEN_INT (offset));
13284 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13285 GEN_INT (info->gp_save_offset
13288 mem = gen_rtx_MEM (reg_mode, addr);
13289 set_mem_alias_set (mem, rs6000_sr_alias_set);
13291 insn = emit_move_insn (mem, reg);
13292 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13293 NULL_RTX, NULL_RTX);
13298 /* ??? There's no need to emit actual instructions here, but it's the
13299 easiest way to get the frame unwind information emitted. */
13300 if (! info->world_save_p && current_function_calls_eh_return)
13302 unsigned int i, regno;
13304 /* In AIX ABI we need to pretend we save r2 here. */
13307 rtx addr, reg, mem;
13309 reg = gen_rtx_REG (reg_mode, 2);
13310 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13311 GEN_INT (sp_offset + 5 * reg_size));
13312 mem = gen_rtx_MEM (reg_mode, addr);
13313 set_mem_alias_set (mem, rs6000_sr_alias_set);
13315 insn = emit_move_insn (mem, reg);
13316 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13317 NULL_RTX, NULL_RTX);
13318 PATTERN (insn) = gen_blockage ();
13323 regno = EH_RETURN_DATA_REGNO (i);
13324 if (regno == INVALID_REGNUM)
13327 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13328 info->ehrd_offset + sp_offset
13329 + reg_size * (int) i,
13334 /* Save lr if we used it. */
13335 if (! info->world_save_p && info->lr_save_p)
13337 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13338 GEN_INT (info->lr_save_offset + sp_offset));
13339 rtx reg = gen_rtx_REG (Pmode, 0);
13340 rtx mem = gen_rtx_MEM (Pmode, addr);
13341 /* This should not be of rs6000_sr_alias_set, because of
13342 __builtin_return_address. */
13344 insn = emit_move_insn (mem, reg);
13345 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13346 NULL_RTX, NULL_RTX);
13349 /* Save CR if we use any that must be preserved. */
13350 if (! info->world_save_p && info->cr_save_p)
13352 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13353 GEN_INT (info->cr_save_offset + sp_offset));
13354 rtx mem = gen_rtx_MEM (SImode, addr);
13355 /* See the large comment above about why CR2_REGNO is used. */
13356 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13358 set_mem_alias_set (mem, rs6000_sr_alias_set);
13360 /* If r12 was used to hold the original sp, copy cr into r0 now
13362 if (REGNO (frame_reg_rtx) == 12)
13366 cr_save_rtx = gen_rtx_REG (SImode, 0);
13367 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13368 RTX_FRAME_RELATED_P (insn) = 1;
13369 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13370 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13375 insn = emit_move_insn (mem, cr_save_rtx);
13377 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13378 NULL_RTX, NULL_RTX);
13381 /* Update stack and set back pointer unless this is V.4,
13382 for which it was done previously. */
13383 if (! info->world_save_p && info->push_p
13384 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13385 rs6000_emit_allocate_stack (info->total_size, FALSE);
13387 /* Set frame pointer, if needed. */
13388 if (frame_pointer_needed)
13390 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13392 RTX_FRAME_RELATED_P (insn) = 1;
13395 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13396 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13397 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
13398 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13400 /* If emit_load_toc_table will use the link register, we need to save
13401 it. We use R12 for this purpose because emit_load_toc_table
13402 can use register 0. This allows us to use a plain 'blr' to return
13403 from the procedure more often. */
13404 int save_LR_around_toc_setup = (TARGET_ELF
13405 && DEFAULT_ABI != ABI_AIX
13407 && ! info->lr_save_p
13408 && EXIT_BLOCK_PTR->pred != NULL);
13409 if (save_LR_around_toc_setup)
13411 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13413 insn = emit_move_insn (frame_ptr_rtx, lr);
13414 rs6000_maybe_dead (insn);
13415 RTX_FRAME_RELATED_P (insn) = 1;
13417 rs6000_emit_load_toc_table (TRUE);
13419 insn = emit_move_insn (lr, frame_ptr_rtx);
13420 rs6000_maybe_dead (insn);
13421 RTX_FRAME_RELATED_P (insn) = 1;
13424 rs6000_emit_load_toc_table (TRUE);
13428 if (DEFAULT_ABI == ABI_DARWIN
13429 && flag_pic && current_function_uses_pic_offset_table)
13431 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13432 rtx src = machopic_function_base_sym ();
13434 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
13436 insn = emit_move_insn (gen_rtx_REG (Pmode,
13437 RS6000_PIC_OFFSET_TABLE_REGNUM),
13439 rs6000_maybe_dead (insn);
13444 /* Write function prologue. */
13447 rs6000_output_function_prologue (FILE *file,
13448 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13450 rs6000_stack_t *info = rs6000_stack_info ();
13452 if (TARGET_DEBUG_STACK)
13453 debug_stack_info (info);
13455 /* Write .extern for any function we will call to save and restore
13457 if (info->first_fp_reg_save < 64
13458 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13459 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13460 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13461 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13462 RESTORE_FP_SUFFIX);
13464 /* Write .extern for AIX common mode routines, if needed. */
13465 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13467 fputs ("\t.extern __mulh\n", file);
13468 fputs ("\t.extern __mull\n", file);
13469 fputs ("\t.extern __divss\n", file);
13470 fputs ("\t.extern __divus\n", file);
13471 fputs ("\t.extern __quoss\n", file);
13472 fputs ("\t.extern __quous\n", file);
13473 common_mode_defined = 1;
13476 if (! HAVE_prologue)
13480 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13481 the "toplevel" insn chain. */
13482 emit_note (NOTE_INSN_DELETED);
13483 rs6000_emit_prologue ();
13484 emit_note (NOTE_INSN_DELETED);
13486 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13490 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13492 INSN_ADDRESSES_NEW (insn, addr);
13497 if (TARGET_DEBUG_STACK)
13498 debug_rtx_list (get_insns (), 100);
13499 final (get_insns (), file, FALSE, FALSE);
13503 rs6000_pic_labelno++;
13506 /* Emit function epilogue as insns.
13508 At present, dwarf2out_frame_debug_expr doesn't understand
13509 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13510 anywhere in the epilogue. Most of the insns below would in any case
13511 need special notes to explain where r11 is in relation to the stack. */
13514 rs6000_emit_epilogue (int sibcall)
13516 rs6000_stack_t *info;
13517 int restoring_FPRs_inline;
13518 int using_load_multiple;
13519 int using_mfcr_multiple;
13520 int use_backchain_to_restore_sp;
13522 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13523 rtx frame_reg_rtx = sp_reg_rtx;
13524 enum machine_mode reg_mode = Pmode;
13525 int reg_size = TARGET_32BIT ? 4 : 8;
13528 info = rs6000_stack_info ();
13530 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13532 reg_mode = V2SImode;
13536 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13537 && (!TARGET_SPE_ABI
13538 || info->spe_64bit_regs_used == 0)
13539 && info->first_gp_reg_save < 31);
13540 restoring_FPRs_inline = (sibcall
13541 || current_function_calls_eh_return
13542 || info->first_fp_reg_save == 64
13543 || FP_SAVE_INLINE (info->first_fp_reg_save));
13544 use_backchain_to_restore_sp = (frame_pointer_needed
13545 || current_function_calls_alloca
13546 || info->total_size > 32767);
13547 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13548 || rs6000_cpu == PROCESSOR_PPC603
13549 || rs6000_cpu == PROCESSOR_PPC750
13552 if (info->world_save_p)
13556 const char *alloc_rname;
13559 /* eh_rest_world_r10 will return to the location saved in the LR
13560 stack slot (which is not likely to be our caller.)
13561 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
13562 rest_world is similar, except any R10 parameter is ignored.
13563 The exception-handling stuff that was here in 2.95 is no
13564 longer necessary. */
13568 + 32 - info->first_gp_reg_save
13569 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
13570 + 63 + 1 - info->first_fp_reg_save);
13572 strcpy (rname, (current_function_calls_eh_return) ?
13573 "*eh_rest_world_r10" : "*rest_world");
13574 alloc_rname = ggc_strdup (rname);
13577 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
13578 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13579 gen_rtx_REG (Pmode,
13580 LINK_REGISTER_REGNUM));
13582 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
13583 /* The instruction pattern requires a clobber here;
13584 it is shared with the restVEC helper. */
13586 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
13589 /* CR register traditionally saved as CR2. */
13590 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13591 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13592 GEN_INT (info->cr_save_offset));
13593 rtx mem = gen_rtx_MEM (reg_mode, addr);
13594 set_mem_alias_set (mem, rs6000_sr_alias_set);
13596 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13599 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13601 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13602 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13603 GEN_INT (info->gp_save_offset
13605 rtx mem = gen_rtx_MEM (reg_mode, addr);
13606 set_mem_alias_set (mem, rs6000_sr_alias_set);
13608 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13610 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13612 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13613 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13614 GEN_INT (info->altivec_save_offset
13616 rtx mem = gen_rtx_MEM (V4SImode, addr);
13617 set_mem_alias_set (mem, rs6000_sr_alias_set);
13619 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13621 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
13623 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13624 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13625 GEN_INT (info->fp_save_offset
13627 rtx mem = gen_rtx_MEM (DFmode, addr);
13628 set_mem_alias_set (mem, rs6000_sr_alias_set);
13630 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13633 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
13635 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
13637 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
13639 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
13641 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
13642 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13647 /* If we have a frame pointer, a call to alloca, or a large stack
13648 frame, restore the old stack pointer using the backchain. Otherwise,
13649 we know what size to update it with. */
13650 if (use_backchain_to_restore_sp)
13652 /* Under V.4, don't reset the stack pointer until after we're done
13653 loading the saved registers. */
13654 if (DEFAULT_ABI == ABI_V4)
13655 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
13657 emit_move_insn (frame_reg_rtx,
13658 gen_rtx_MEM (Pmode, sp_reg_rtx));
13661 else if (info->push_p)
13663 if (DEFAULT_ABI == ABI_V4
13664 || current_function_calls_eh_return)
13665 sp_offset = info->total_size;
13668 emit_insn (TARGET_32BIT
13669 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13670 GEN_INT (info->total_size))
13671 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13672 GEN_INT (info->total_size)));
13676 /* Restore AltiVec registers if needed. */
13677 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13681 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13682 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13684 rtx addr, areg, mem;
13686 areg = gen_rtx_REG (Pmode, 0);
13688 (areg, GEN_INT (info->altivec_save_offset
13690 + 16 * (i - info->first_altivec_reg_save)));
13692 /* AltiVec addressing mode is [reg+reg]. */
13693 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
13694 mem = gen_rtx_MEM (V4SImode, addr);
13695 set_mem_alias_set (mem, rs6000_sr_alias_set);
13697 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
13701 /* Restore VRSAVE if needed. */
13702 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13703 && info->vrsave_mask != 0)
13705 rtx addr, mem, reg;
13707 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13708 GEN_INT (info->vrsave_save_offset + sp_offset));
13709 mem = gen_rtx_MEM (SImode, addr);
13710 set_mem_alias_set (mem, rs6000_sr_alias_set);
13711 reg = gen_rtx_REG (SImode, 12);
13712 emit_move_insn (reg, mem);
13714 emit_insn (generate_set_vrsave (reg, info, 1));
13717 /* Get the old lr if we saved it. */
13718 if (info->lr_save_p)
13720 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
13721 info->lr_save_offset + sp_offset);
13723 set_mem_alias_set (mem, rs6000_sr_alias_set);
13725 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
13728 /* Get the old cr if we saved it. */
13729 if (info->cr_save_p)
13731 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13732 GEN_INT (info->cr_save_offset + sp_offset));
13733 rtx mem = gen_rtx_MEM (SImode, addr);
13735 set_mem_alias_set (mem, rs6000_sr_alias_set);
13737 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
13740 /* Set LR here to try to overlap restores below. */
13741 if (info->lr_save_p)
13742 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
13743 gen_rtx_REG (Pmode, 0));
13745 /* Load exception handler data registers, if needed. */
13746 if (current_function_calls_eh_return)
13748 unsigned int i, regno;
13752 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13753 GEN_INT (sp_offset + 5 * reg_size));
13754 rtx mem = gen_rtx_MEM (reg_mode, addr);
13756 set_mem_alias_set (mem, rs6000_sr_alias_set);
13758 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
13765 regno = EH_RETURN_DATA_REGNO (i);
13766 if (regno == INVALID_REGNUM)
13769 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
13770 info->ehrd_offset + sp_offset
13771 + reg_size * (int) i);
13772 set_mem_alias_set (mem, rs6000_sr_alias_set);
13774 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
13778 /* Restore GPRs. This is done as a PARALLEL if we are using
13779 the load-multiple instructions. */
13780 if (using_load_multiple)
13783 p = rtvec_alloc (32 - info->first_gp_reg_save);
13784 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13786 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13787 GEN_INT (info->gp_save_offset
13790 rtx mem = gen_rtx_MEM (reg_mode, addr);
13792 set_mem_alias_set (mem, rs6000_sr_alias_set);
13795 gen_rtx_SET (VOIDmode,
13796 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
13799 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13802 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13803 if ((regs_ever_live[info->first_gp_reg_save+i]
13804 && (! call_used_regs[info->first_gp_reg_save+i]
13805 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13806 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13807 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13808 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13809 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13811 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13812 GEN_INT (info->gp_save_offset
13815 rtx mem = gen_rtx_MEM (reg_mode, addr);
13817 /* Restore 64-bit quantities for SPE. */
13818 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13820 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13823 if (!SPE_CONST_OFFSET_OK (offset))
13825 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13826 emit_move_insn (b, GEN_INT (offset));
13829 b = GEN_INT (offset);
13831 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13832 mem = gen_rtx_MEM (V2SImode, addr);
13835 set_mem_alias_set (mem, rs6000_sr_alias_set);
13837 emit_move_insn (gen_rtx_REG (reg_mode,
13838 info->first_gp_reg_save + i), mem);
13841 /* Restore fpr's if we need to do it without calling a function. */
13842 if (restoring_FPRs_inline)
13843 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13844 if ((regs_ever_live[info->first_fp_reg_save+i]
13845 && ! call_used_regs[info->first_fp_reg_save+i]))
13848 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13849 GEN_INT (info->fp_save_offset
13852 mem = gen_rtx_MEM (DFmode, addr);
13853 set_mem_alias_set (mem, rs6000_sr_alias_set);
13855 emit_move_insn (gen_rtx_REG (DFmode,
13856 info->first_fp_reg_save + i),
13860 /* If we saved cr, restore it here. Just those that were used. */
13861 if (info->cr_save_p)
13863 rtx r12_rtx = gen_rtx_REG (SImode, 12);
13866 if (using_mfcr_multiple)
13868 for (i = 0; i < 8; i++)
13869 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13875 if (using_mfcr_multiple && count > 1)
13880 p = rtvec_alloc (count);
13883 for (i = 0; i < 8; i++)
13884 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13886 rtvec r = rtvec_alloc (2);
13887 RTVEC_ELT (r, 0) = r12_rtx;
13888 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
13889 RTVEC_ELT (p, ndx) =
13890 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
13891 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
13894 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13899 for (i = 0; i < 8; i++)
13900 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13902 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
13908 /* If this is V.4, unwind the stack pointer after all of the loads
13909 have been done. We need to emit a block here so that sched
13910 doesn't decide to move the sp change before the register restores
13911 (which may not have any obvious dependency on the stack). This
13912 doesn't hurt performance, because there is no scheduling that can
13913 be done after this point. */
13914 if (DEFAULT_ABI == ABI_V4
13915 || current_function_calls_eh_return)
13917 if (frame_reg_rtx != sp_reg_rtx)
13918 rs6000_emit_stack_tie ();
13920 if (use_backchain_to_restore_sp)
13922 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
13924 else if (sp_offset != 0)
13926 emit_insn (TARGET_32BIT
13927 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13928 GEN_INT (sp_offset))
13929 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13930 GEN_INT (sp_offset)));
13934 if (current_function_calls_eh_return)
13936 rtx sa = EH_RETURN_STACKADJ_RTX;
13937 emit_insn (TARGET_32BIT
13938 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
13939 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
13945 if (! restoring_FPRs_inline)
13946 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
13948 p = rtvec_alloc (2);
13950 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
13951 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13952 gen_rtx_REG (Pmode,
13953 LINK_REGISTER_REGNUM));
13955 /* If we have to restore more than two FP registers, branch to the
13956 restore function. It will return to our caller. */
13957 if (! restoring_FPRs_inline)
13961 const char *alloc_rname;
13963 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
13964 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
13965 alloc_rname = ggc_strdup (rname);
13966 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
13967 gen_rtx_SYMBOL_REF (Pmode,
13970 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13973 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
13974 GEN_INT (info->fp_save_offset + 8*i));
13975 mem = gen_rtx_MEM (DFmode, addr);
13976 set_mem_alias_set (mem, rs6000_sr_alias_set);
13978 RTVEC_ELT (p, i+3) =
13979 gen_rtx_SET (VOIDmode,
13980 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
13985 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13989 /* Write function epilogue. */
13992 rs6000_output_function_epilogue (FILE *file,
13993 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13995 rs6000_stack_t *info = rs6000_stack_info ();
13997 if (! HAVE_epilogue)
13999 rtx insn = get_last_insn ();
14000 /* If the last insn was a BARRIER, we don't have to write anything except
14001 the trace table. */
14002 if (GET_CODE (insn) == NOTE)
14003 insn = prev_nonnote_insn (insn);
14004 if (insn == 0 || GET_CODE (insn) != BARRIER)
14006 /* This is slightly ugly, but at least we don't have two
14007 copies of the epilogue-emitting code. */
14010 /* A NOTE_INSN_DELETED is supposed to be at the start
14011 and end of the "toplevel" insn chain. */
14012 emit_note (NOTE_INSN_DELETED);
14013 rs6000_emit_epilogue (FALSE);
14014 emit_note (NOTE_INSN_DELETED);
14016 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14020 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14022 INSN_ADDRESSES_NEW (insn, addr);
14027 if (TARGET_DEBUG_STACK)
14028 debug_rtx_list (get_insns (), 100);
14029 final (get_insns (), file, FALSE, FALSE);
14035 macho_branch_islands ();
14036 /* Mach-O doesn't support labels at the end of objects, so if
14037 it looks like we might want one, insert a NOP. */
14039 rtx insn = get_last_insn ();
14042 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
14043 insn = PREV_INSN (insn);
14047 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
14048 fputs ("\tnop\n", file);
14052 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14055 We don't output a traceback table if -finhibit-size-directive was
14056 used. The documentation for -finhibit-size-directive reads
14057 ``don't output a @code{.size} assembler directive, or anything
14058 else that would cause trouble if the function is split in the
14059 middle, and the two halves are placed at locations far apart in
14060 memory.'' The traceback table has this property, since it
14061 includes the offset from the start of the function to the
14062 traceback table itself.
14064 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14065 different traceback table. */
14066 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
14067 && rs6000_traceback != traceback_none)
14069 const char *fname = NULL;
14070 const char *language_string = lang_hooks.name;
14071 int fixed_parms = 0, float_parms = 0, parm_info = 0;
14073 int optional_tbtab;
14075 if (rs6000_traceback == traceback_full)
14076 optional_tbtab = 1;
14077 else if (rs6000_traceback == traceback_part)
14078 optional_tbtab = 0;
14080 optional_tbtab = !optimize_size && !TARGET_ELF;
14082 if (optional_tbtab)
14084 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
14085 while (*fname == '.') /* V.4 encodes . in the name */
14088 /* Need label immediately before tbtab, so we can compute
14089 its offset from the function start. */
14090 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14091 ASM_OUTPUT_LABEL (file, fname);
14094 /* The .tbtab pseudo-op can only be used for the first eight
14095 expressions, since it can't handle the possibly variable
14096 length fields that follow. However, if you omit the optional
14097 fields, the assembler outputs zeros for all optional fields
14098 anyways, giving each variable length field is minimum length
14099 (as defined in sys/debug.h). Thus we can not use the .tbtab
14100 pseudo-op at all. */
14102 /* An all-zero word flags the start of the tbtab, for debuggers
14103 that have to find it by searching forward from the entry
14104 point or from the current pc. */
14105 fputs ("\t.long 0\n", file);
14107 /* Tbtab format type. Use format type 0. */
14108 fputs ("\t.byte 0,", file);
14110 /* Language type. Unfortunately, there does not seem to be any
14111 official way to discover the language being compiled, so we
14112 use language_string.
14113 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14114 Java is 13. Objective-C is 14. */
14115 if (! strcmp (language_string, "GNU C"))
14117 else if (! strcmp (language_string, "GNU F77")
14118 || ! strcmp (language_string, "GNU F95"))
14120 else if (! strcmp (language_string, "GNU Pascal"))
14122 else if (! strcmp (language_string, "GNU Ada"))
14124 else if (! strcmp (language_string, "GNU C++"))
14126 else if (! strcmp (language_string, "GNU Java"))
14128 else if (! strcmp (language_string, "GNU Objective-C"))
14132 fprintf (file, "%d,", i);
14134 /* 8 single bit fields: global linkage (not set for C extern linkage,
14135 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14136 from start of procedure stored in tbtab, internal function, function
14137 has controlled storage, function has no toc, function uses fp,
14138 function logs/aborts fp operations. */
14139 /* Assume that fp operations are used if any fp reg must be saved. */
14140 fprintf (file, "%d,",
14141 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
14143 /* 6 bitfields: function is interrupt handler, name present in
14144 proc table, function calls alloca, on condition directives
14145 (controls stack walks, 3 bits), saves condition reg, saves
14147 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14148 set up as a frame pointer, even when there is no alloca call. */
14149 fprintf (file, "%d,",
14150 ((optional_tbtab << 6)
14151 | ((optional_tbtab & frame_pointer_needed) << 5)
14152 | (info->cr_save_p << 1)
14153 | (info->lr_save_p)));
14155 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14157 fprintf (file, "%d,",
14158 (info->push_p << 7) | (64 - info->first_fp_reg_save));
14160 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14161 fprintf (file, "%d,", (32 - first_reg_to_save ()));
14163 if (optional_tbtab)
14165 /* Compute the parameter info from the function decl argument
14168 int next_parm_info_bit = 31;
14170 for (decl = DECL_ARGUMENTS (current_function_decl);
14171 decl; decl = TREE_CHAIN (decl))
14173 rtx parameter = DECL_INCOMING_RTL (decl);
14174 enum machine_mode mode = GET_MODE (parameter);
14176 if (GET_CODE (parameter) == REG)
14178 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
14184 if (mode == SFmode)
14186 else if (mode == DFmode || mode == TFmode)
14191 /* If only one bit will fit, don't or in this entry. */
14192 if (next_parm_info_bit > 0)
14193 parm_info |= (bits << (next_parm_info_bit - 1));
14194 next_parm_info_bit -= 2;
14198 fixed_parms += ((GET_MODE_SIZE (mode)
14199 + (UNITS_PER_WORD - 1))
14201 next_parm_info_bit -= 1;
14207 /* Number of fixed point parameters. */
14208 /* This is actually the number of words of fixed point parameters; thus
14209 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14210 fprintf (file, "%d,", fixed_parms);
14212 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14214 /* This is actually the number of fp registers that hold parameters;
14215 and thus the maximum value is 13. */
14216 /* Set parameters on stack bit if parameters are not in their original
14217 registers, regardless of whether they are on the stack? Xlc
14218 seems to set the bit when not optimizing. */
14219 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
14221 if (! optional_tbtab)
14224 /* Optional fields follow. Some are variable length. */
14226 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14227 11 double float. */
14228 /* There is an entry for each parameter in a register, in the order that
14229 they occur in the parameter list. Any intervening arguments on the
14230 stack are ignored. If the list overflows a long (max possible length
14231 34 bits) then completely leave off all elements that don't fit. */
14232 /* Only emit this long if there was at least one parameter. */
14233 if (fixed_parms || float_parms)
14234 fprintf (file, "\t.long %d\n", parm_info);
14236 /* Offset from start of code to tb table. */
14237 fputs ("\t.long ", file);
14238 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14240 RS6000_OUTPUT_BASENAME (file, fname);
14242 assemble_name (file, fname);
14244 rs6000_output_function_entry (file, fname);
14247 /* Interrupt handler mask. */
14248 /* Omit this long, since we never set the interrupt handler bit
14251 /* Number of CTL (controlled storage) anchors. */
14252 /* Omit this long, since the has_ctl bit is never set above. */
14254 /* Displacement into stack of each CTL anchor. */
14255 /* Omit this list of longs, because there are no CTL anchors. */
14257 /* Length of function name. */
14260 fprintf (file, "\t.short %d\n", (int) strlen (fname));
14262 /* Function name. */
14263 assemble_string (fname, strlen (fname));
14265 /* Register for alloca automatic storage; this is always reg 31.
14266 Only emit this if the alloca bit was set above. */
14267 if (frame_pointer_needed)
14268 fputs ("\t.byte 31\n", file);
14270 fputs ("\t.align 2\n", file);
14274 /* A C compound statement that outputs the assembler code for a thunk
14275 function, used to implement C++ virtual function calls with
14276 multiple inheritance. The thunk acts as a wrapper around a virtual
14277 function, adjusting the implicit object parameter before handing
14278 control off to the real function.
14280 First, emit code to add the integer DELTA to the location that
14281 contains the incoming first argument. Assume that this argument
14282 contains a pointer, and is the one used to pass the `this' pointer
14283 in C++. This is the incoming argument *before* the function
14284 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14285 values of all other incoming arguments.
14287 After the addition, emit code to jump to FUNCTION, which is a
14288 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14289 not touch the return address. Hence returning from FUNCTION will
14290 return to whoever called the current `thunk'.
14292 The effect must be as if FUNCTION had been called directly with the
14293 adjusted first argument. This macro is responsible for emitting
14294 all of the code for a thunk function; output_function_prologue()
14295 and output_function_epilogue() are not invoked.
14297 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14298 been extracted from it.) It might possibly be useful on some
14299 targets, but probably not.
14301 If you do not define this macro, the target-independent code in the
14302 C++ frontend will generate a less efficient heavyweight thunk that
14303 calls FUNCTION instead of jumping to it. The generic approach does
14304 not support varargs. */
14307 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
14308 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
14311 rtx this, insn, funexp;
14313 reload_completed = 1;
14314 epilogue_completed = 1;
14315 no_new_pseudos = 1;
14316 reset_block_changes ();
14318 /* Mark the end of the (empty) prologue. */
14319 emit_note (NOTE_INSN_PROLOGUE_END);
14321 /* Find the "this" pointer. If the function returns a structure,
14322 the structure return pointer is in r3. */
14323 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
14324 this = gen_rtx_REG (Pmode, 4);
14326 this = gen_rtx_REG (Pmode, 3);
14328 /* Apply the constant offset, if required. */
14331 rtx delta_rtx = GEN_INT (delta);
14332 emit_insn (TARGET_32BIT
14333 ? gen_addsi3 (this, this, delta_rtx)
14334 : gen_adddi3 (this, this, delta_rtx));
14337 /* Apply the offset from the vtable, if required. */
14340 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
14341 rtx tmp = gen_rtx_REG (Pmode, 12);
14343 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
14344 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
14346 emit_insn (TARGET_32BIT
14347 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
14348 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
14349 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
14353 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
14355 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
14357 emit_insn (TARGET_32BIT
14358 ? gen_addsi3 (this, this, tmp)
14359 : gen_adddi3 (this, this, tmp));
14362 /* Generate a tail call to the target function. */
14363 if (!TREE_USED (function))
14365 assemble_external (function);
14366 TREE_USED (function) = 1;
14368 funexp = XEXP (DECL_RTL (function), 0);
14369 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
14372 if (MACHOPIC_INDIRECT)
14373 funexp = machopic_indirect_call_target (funexp);
14376 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14377 generate sibcall RTL explicitly to avoid constraint abort. */
14378 insn = emit_call_insn (
14379 gen_rtx_PARALLEL (VOIDmode,
14381 gen_rtx_CALL (VOIDmode,
14382 funexp, const0_rtx),
14383 gen_rtx_USE (VOIDmode, const0_rtx),
14384 gen_rtx_USE (VOIDmode,
14385 gen_rtx_REG (SImode,
14386 LINK_REGISTER_REGNUM)),
14387 gen_rtx_RETURN (VOIDmode))));
14388 SIBLING_CALL_P (insn) = 1;
14391 /* Run just enough of rest_of_compilation to get the insns emitted.
14392 There's not really enough bulk here to make other passes such as
14393 instruction scheduling worth while. Note that use_thunk calls
14394 assemble_start_function and assemble_end_function. */
14395 insn = get_insns ();
14396 insn_locators_initialize ();
14397 shorten_branches (insn);
14398 final_start_function (insn, file, 1);
14399 final (insn, file, 1, 0);
14400 final_end_function ();
14402 reload_completed = 0;
14403 epilogue_completed = 0;
14404 no_new_pseudos = 0;
14407 /* A quick summary of the various types of 'constant-pool tables'
14410 Target Flags Name One table per
14411 AIX (none) AIX TOC object file
14412 AIX -mfull-toc AIX TOC object file
14413 AIX -mminimal-toc AIX minimal TOC translation unit
14414 SVR4/EABI (none) SVR4 SDATA object file
14415 SVR4/EABI -fpic SVR4 pic object file
14416 SVR4/EABI -fPIC SVR4 PIC translation unit
14417 SVR4/EABI -mrelocatable EABI TOC function
14418 SVR4/EABI -maix AIX TOC object file
14419 SVR4/EABI -maix -mminimal-toc
14420 AIX minimal TOC translation unit
14422 Name Reg. Set by entries contains:
14423 made by addrs? fp? sum?
14425 AIX TOC 2 crt0 as Y option option
14426 AIX minimal TOC 30 prolog gcc Y Y option
14427 SVR4 SDATA 13 crt0 gcc N Y N
14428 SVR4 pic 30 prolog ld Y not yet N
14429 SVR4 PIC 30 prolog gcc Y option option
14430 EABI TOC 30 prolog gcc Y option option
14434 /* Hash functions for the hash table. */
14437 rs6000_hash_constant (rtx k)
14439 enum rtx_code code = GET_CODE (k);
14440 enum machine_mode mode = GET_MODE (k);
14441 unsigned result = (code << 3) ^ mode;
14442 const char *format;
14445 format = GET_RTX_FORMAT (code);
14446 flen = strlen (format);
14452 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
14455 if (mode != VOIDmode)
14456 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
14468 for (; fidx < flen; fidx++)
14469 switch (format[fidx])
14474 const char *str = XSTR (k, fidx);
14475 len = strlen (str);
14476 result = result * 613 + len;
14477 for (i = 0; i < len; i++)
14478 result = result * 613 + (unsigned) str[i];
14483 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
14487 result = result * 613 + (unsigned) XINT (k, fidx);
14490 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
14491 result = result * 613 + (unsigned) XWINT (k, fidx);
14495 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
14496 result = result * 613 + (unsigned) (XWINT (k, fidx)
14510 toc_hash_function (const void *hash_entry)
14512 const struct toc_hash_struct *thc =
14513 (const struct toc_hash_struct *) hash_entry;
14514 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
14517 /* Compare H1 and H2 for equivalence. */
14520 toc_hash_eq (const void *h1, const void *h2)
14522 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
14523 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
14525 if (((const struct toc_hash_struct *) h1)->key_mode
14526 != ((const struct toc_hash_struct *) h2)->key_mode)
14529 return rtx_equal_p (r1, r2);
14532 /* These are the names given by the C++ front-end to vtables, and
14533 vtable-like objects. Ideally, this logic should not be here;
14534 instead, there should be some programmatic way of inquiring as
14535 to whether or not an object is a vtable. */
14537 #define VTABLE_NAME_P(NAME) \
14538 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
14539 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14540 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14541 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14542 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14545 rs6000_output_symbol_ref (FILE *file, rtx x)
14547 /* Currently C++ toc references to vtables can be emitted before it
14548 is decided whether the vtable is public or private. If this is
14549 the case, then the linker will eventually complain that there is
14550 a reference to an unknown section. Thus, for vtables only,
14551 we emit the TOC reference to reference the symbol and not the
14553 const char *name = XSTR (x, 0);
14555 if (VTABLE_NAME_P (name))
14557 RS6000_OUTPUT_BASENAME (file, name);
14560 assemble_name (file, name);
14563 /* Output a TOC entry. We derive the entry name from what is being
14567 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14570 const char *name = buf;
14571 const char *real_name;
14578 /* When the linker won't eliminate them, don't output duplicate
14579 TOC entries (this happens on AIX if there is any kind of TOC,
14580 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14582 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14584 struct toc_hash_struct *h;
14587 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14588 time because GGC is not initialized at that point. */
14589 if (toc_hash_table == NULL)
14590 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14591 toc_hash_eq, NULL);
14593 h = ggc_alloc (sizeof (*h));
14595 h->key_mode = mode;
14596 h->labelno = labelno;
14598 found = htab_find_slot (toc_hash_table, h, 1);
14599 if (*found == NULL)
14601 else /* This is indeed a duplicate.
14602 Set this label equal to that label. */
14604 fputs ("\t.set ", file);
14605 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14606 fprintf (file, "%d,", labelno);
14607 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14608 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
14614 /* If we're going to put a double constant in the TOC, make sure it's
14615 aligned properly when strict alignment is on. */
14616 if (GET_CODE (x) == CONST_DOUBLE
14617 && STRICT_ALIGNMENT
14618 && GET_MODE_BITSIZE (mode) >= 64
14619 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
14620 ASM_OUTPUT_ALIGN (file, 3);
14623 (*targetm.asm_out.internal_label) (file, "LC", labelno);
14625 /* Handle FP constants specially. Note that if we have a minimal
14626 TOC, things we put here aren't actually in the TOC, so we can allow
14628 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
14630 REAL_VALUE_TYPE rv;
14633 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14634 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
14638 if (TARGET_MINIMAL_TOC)
14639 fputs (DOUBLE_INT_ASM_OP, file);
14641 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14642 k[0] & 0xffffffff, k[1] & 0xffffffff,
14643 k[2] & 0xffffffff, k[3] & 0xffffffff);
14644 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
14645 k[0] & 0xffffffff, k[1] & 0xffffffff,
14646 k[2] & 0xffffffff, k[3] & 0xffffffff);
14651 if (TARGET_MINIMAL_TOC)
14652 fputs ("\t.long ", file);
14654 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14655 k[0] & 0xffffffff, k[1] & 0xffffffff,
14656 k[2] & 0xffffffff, k[3] & 0xffffffff);
14657 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14658 k[0] & 0xffffffff, k[1] & 0xffffffff,
14659 k[2] & 0xffffffff, k[3] & 0xffffffff);
14663 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
14665 REAL_VALUE_TYPE rv;
14668 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14669 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
14673 if (TARGET_MINIMAL_TOC)
14674 fputs (DOUBLE_INT_ASM_OP, file);
14676 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14677 k[0] & 0xffffffff, k[1] & 0xffffffff);
14678 fprintf (file, "0x%lx%08lx\n",
14679 k[0] & 0xffffffff, k[1] & 0xffffffff);
14684 if (TARGET_MINIMAL_TOC)
14685 fputs ("\t.long ", file);
14687 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14688 k[0] & 0xffffffff, k[1] & 0xffffffff);
14689 fprintf (file, "0x%lx,0x%lx\n",
14690 k[0] & 0xffffffff, k[1] & 0xffffffff);
14694 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
14696 REAL_VALUE_TYPE rv;
14699 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14700 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
14704 if (TARGET_MINIMAL_TOC)
14705 fputs (DOUBLE_INT_ASM_OP, file);
14707 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14708 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
14713 if (TARGET_MINIMAL_TOC)
14714 fputs ("\t.long ", file);
14716 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14717 fprintf (file, "0x%lx\n", l & 0xffffffff);
14721 else if (GET_MODE (x) == VOIDmode
14722 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
14724 unsigned HOST_WIDE_INT low;
14725 HOST_WIDE_INT high;
14727 if (GET_CODE (x) == CONST_DOUBLE)
14729 low = CONST_DOUBLE_LOW (x);
14730 high = CONST_DOUBLE_HIGH (x);
14733 #if HOST_BITS_PER_WIDE_INT == 32
14736 high = (low & 0x80000000) ? ~0 : 0;
14740 low = INTVAL (x) & 0xffffffff;
14741 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
14745 /* TOC entries are always Pmode-sized, but since this
14746 is a bigendian machine then if we're putting smaller
14747 integer constants in the TOC we have to pad them.
14748 (This is still a win over putting the constants in
14749 a separate constant pool, because then we'd have
14750 to have both a TOC entry _and_ the actual constant.)
14752 For a 32-bit target, CONST_INT values are loaded and shifted
14753 entirely within `low' and can be stored in one TOC entry. */
14755 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
14756 abort ();/* It would be easy to make this work, but it doesn't now. */
14758 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
14760 #if HOST_BITS_PER_WIDE_INT == 32
14761 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
14762 POINTER_SIZE, &low, &high, 0);
14765 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
14766 high = (HOST_WIDE_INT) low >> 32;
14773 if (TARGET_MINIMAL_TOC)
14774 fputs (DOUBLE_INT_ASM_OP, file);
14776 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14777 (long) high & 0xffffffff, (long) low & 0xffffffff);
14778 fprintf (file, "0x%lx%08lx\n",
14779 (long) high & 0xffffffff, (long) low & 0xffffffff);
14784 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
14786 if (TARGET_MINIMAL_TOC)
14787 fputs ("\t.long ", file);
14789 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14790 (long) high & 0xffffffff, (long) low & 0xffffffff);
14791 fprintf (file, "0x%lx,0x%lx\n",
14792 (long) high & 0xffffffff, (long) low & 0xffffffff);
14796 if (TARGET_MINIMAL_TOC)
14797 fputs ("\t.long ", file);
14799 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
14800 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
14806 if (GET_CODE (x) == CONST)
14808 if (GET_CODE (XEXP (x, 0)) != PLUS)
14811 base = XEXP (XEXP (x, 0), 0);
14812 offset = INTVAL (XEXP (XEXP (x, 0), 1));
14815 if (GET_CODE (base) == SYMBOL_REF)
14816 name = XSTR (base, 0);
14817 else if (GET_CODE (base) == LABEL_REF)
14818 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
14819 else if (GET_CODE (base) == CODE_LABEL)
14820 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
14824 real_name = (*targetm.strip_name_encoding) (name);
14825 if (TARGET_MINIMAL_TOC)
14826 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
14829 fprintf (file, "\t.tc %s", real_name);
14832 fprintf (file, ".N%d", - offset);
14834 fprintf (file, ".P%d", offset);
14836 fputs ("[TC],", file);
14839 /* Currently C++ toc references to vtables can be emitted before it
14840 is decided whether the vtable is public or private. If this is
14841 the case, then the linker will eventually complain that there is
14842 a TOC reference to an unknown section. Thus, for vtables only,
14843 we emit the TOC reference to reference the symbol and not the
14845 if (VTABLE_NAME_P (name))
14847 RS6000_OUTPUT_BASENAME (file, name);
14849 fprintf (file, "%d", offset);
14850 else if (offset > 0)
14851 fprintf (file, "+%d", offset);
14854 output_addr_const (file, x);
14858 /* Output an assembler pseudo-op to write an ASCII string of N characters
14859 starting at P to FILE.
14861 On the RS/6000, we have to do this using the .byte operation and
14862 write out special characters outside the quoted string.
14863 Also, the assembler is broken; very long strings are truncated,
14864 so we must artificially break them up early. */
14867 output_ascii (FILE *file, const char *p, int n)
14870 int i, count_string;
14871 const char *for_string = "\t.byte \"";
14872 const char *for_decimal = "\t.byte ";
14873 const char *to_close = NULL;
14876 for (i = 0; i < n; i++)
14879 if (c >= ' ' && c < 0177)
14882 fputs (for_string, file);
14885 /* Write two quotes to get one. */
14893 for_decimal = "\"\n\t.byte ";
14897 if (count_string >= 512)
14899 fputs (to_close, file);
14901 for_string = "\t.byte \"";
14902 for_decimal = "\t.byte ";
14910 fputs (for_decimal, file);
14911 fprintf (file, "%d", c);
14913 for_string = "\n\t.byte \"";
14914 for_decimal = ", ";
14920 /* Now close the string if we have written one. Then end the line. */
14922 fputs (to_close, file);
14925 /* Generate a unique section name for FILENAME for a section type
14926 represented by SECTION_DESC. Output goes into BUF.
14928 SECTION_DESC can be any string, as long as it is different for each
14929 possible section type.
14931 We name the section in the same manner as xlc. The name begins with an
14932 underscore followed by the filename (after stripping any leading directory
14933 names) with the last period replaced by the string SECTION_DESC. If
14934 FILENAME does not contain a period, SECTION_DESC is appended to the end of
14938 rs6000_gen_section_name (char **buf, const char *filename,
14939 const char *section_desc)
14941 const char *q, *after_last_slash, *last_period = 0;
14945 after_last_slash = filename;
14946 for (q = filename; *q; q++)
14949 after_last_slash = q + 1;
14950 else if (*q == '.')
14954 len = strlen (after_last_slash) + strlen (section_desc) + 2;
14955 *buf = (char *) xmalloc (len);
14960 for (q = after_last_slash; *q; q++)
14962 if (q == last_period)
14964 strcpy (p, section_desc);
14965 p += strlen (section_desc);
14969 else if (ISALNUM (*q))
14973 if (last_period == 0)
14974 strcpy (p, section_desc);
14979 /* Emit profile function. */
14982 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
14984 if (TARGET_PROFILE_KERNEL)
14987 if (DEFAULT_ABI == ABI_AIX)
14989 #ifndef NO_PROFILE_COUNTERS
14990 # define NO_PROFILE_COUNTERS 0
14992 if (NO_PROFILE_COUNTERS)
14993 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
14997 const char *label_name;
15000 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15001 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
15002 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
15004 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
15008 else if (DEFAULT_ABI == ABI_DARWIN)
15010 const char *mcount_name = RS6000_MCOUNT;
15011 int caller_addr_regno = LINK_REGISTER_REGNUM;
15013 /* Be conservative and always set this, at least for now. */
15014 current_function_uses_pic_offset_table = 1;
15017 /* For PIC code, set up a stub and collect the caller's address
15018 from r0, which is where the prologue puts it. */
15019 if (MACHOPIC_INDIRECT
15020 && current_function_uses_pic_offset_table)
15021 caller_addr_regno = 0;
15023 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
15025 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
15029 /* Write function profiler code. */
15032 output_function_profiler (FILE *file, int labelno)
15037 switch (DEFAULT_ABI)
15046 warning ("no profiling of 64-bit code for this ABI");
15049 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15050 fprintf (file, "\tmflr %s\n", reg_names[0]);
15053 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
15054 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15055 reg_names[0], save_lr, reg_names[1]);
15056 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15057 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
15058 assemble_name (file, buf);
15059 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
15061 else if (flag_pic > 1)
15063 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15064 reg_names[0], save_lr, reg_names[1]);
15065 /* Now, we need to get the address of the label. */
15066 fputs ("\tbl 1f\n\t.long ", file);
15067 assemble_name (file, buf);
15068 fputs ("-.\n1:", file);
15069 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
15070 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
15071 reg_names[0], reg_names[11]);
15072 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
15073 reg_names[0], reg_names[0], reg_names[11]);
15077 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
15078 assemble_name (file, buf);
15079 fputs ("@ha\n", file);
15080 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15081 reg_names[0], save_lr, reg_names[1]);
15082 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
15083 assemble_name (file, buf);
15084 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
15087 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15088 fprintf (file, "\tbl %s%s\n",
15089 RS6000_MCOUNT, flag_pic ? "@plt" : "");
15094 if (!TARGET_PROFILE_KERNEL)
15096 /* Don't do anything, done in output_profile_hook (). */
15103 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
15104 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
15106 if (cfun->static_chain_decl != NULL)
15108 asm_fprintf (file, "\tstd %s,24(%s)\n",
15109 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15110 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15111 asm_fprintf (file, "\tld %s,24(%s)\n",
15112 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15115 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15122 /* Power4 load update and store update instructions are cracked into a
15123 load or store and an integer insn which are executed in the same cycle.
15124 Branches have their own dispatch slot which does not count against the
15125 GCC issue rate, but it changes the program flow so there are no other
15126 instructions to issue in this cycle. */
15129 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
15130 int verbose ATTRIBUTE_UNUSED,
15131 rtx insn, int more)
15133 if (GET_CODE (PATTERN (insn)) == USE
15134 || GET_CODE (PATTERN (insn)) == CLOBBER)
15137 if (rs6000_sched_groups)
15139 if (is_microcoded_insn (insn))
15141 else if (is_cracked_insn (insn))
15142 return more > 2 ? more - 2 : 0;
15148 /* Adjust the cost of a scheduling dependency. Return the new cost of
15149 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15152 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
15155 if (! recog_memoized (insn))
15158 if (REG_NOTE_KIND (link) != 0)
15161 if (REG_NOTE_KIND (link) == 0)
15163 /* Data dependency; DEP_INSN writes a register that INSN reads
15164 some cycles later. */
15165 switch (get_attr_type (insn))
15168 /* Tell the first scheduling pass about the latency between
15169 a mtctr and bctr (and mtlr and br/blr). The first
15170 scheduling pass will not know about this latency since
15171 the mtctr instruction, which has the latency associated
15172 to it, will be generated by reload. */
15173 return TARGET_POWER ? 5 : 4;
15175 /* Leave some extra cycles between a compare and its
15176 dependent branch, to inhibit expensive mispredicts. */
15177 if ((rs6000_cpu_attr == CPU_PPC603
15178 || rs6000_cpu_attr == CPU_PPC604
15179 || rs6000_cpu_attr == CPU_PPC604E
15180 || rs6000_cpu_attr == CPU_PPC620
15181 || rs6000_cpu_attr == CPU_PPC630
15182 || rs6000_cpu_attr == CPU_PPC750
15183 || rs6000_cpu_attr == CPU_PPC7400
15184 || rs6000_cpu_attr == CPU_PPC7450
15185 || rs6000_cpu_attr == CPU_POWER4
15186 || rs6000_cpu_attr == CPU_POWER5)
15187 && recog_memoized (dep_insn)
15188 && (INSN_CODE (dep_insn) >= 0)
15189 && (get_attr_type (dep_insn) == TYPE_CMP
15190 || get_attr_type (dep_insn) == TYPE_COMPARE
15191 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
15192 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
15193 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
15194 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
15195 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
15196 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
15201 /* Fall out to return default cost. */
15207 /* The function returns a true if INSN is microcoded.
15208 Return false otherwise. */
15211 is_microcoded_insn (rtx insn)
15213 if (!insn || !INSN_P (insn)
15214 || GET_CODE (PATTERN (insn)) == USE
15215 || GET_CODE (PATTERN (insn)) == CLOBBER)
15218 if (rs6000_sched_groups)
15220 enum attr_type type = get_attr_type (insn);
15221 if (type == TYPE_LOAD_EXT_U
15222 || type == TYPE_LOAD_EXT_UX
15223 || type == TYPE_LOAD_UX
15224 || type == TYPE_STORE_UX
15225 || type == TYPE_MFCR)
15232 /* The function returns a nonzero value if INSN can be scheduled only
15233 as the first insn in a dispatch group ("dispatch-slot restricted").
15234 In this case, the returned value indicates how many dispatch slots
15235 the insn occupies (at the beginning of the group).
15236 Return 0 otherwise. */
15239 is_dispatch_slot_restricted (rtx insn)
15241 enum attr_type type;
15243 if (!rs6000_sched_groups)
15247 || insn == NULL_RTX
15248 || GET_CODE (insn) == NOTE
15249 || GET_CODE (PATTERN (insn)) == USE
15250 || GET_CODE (PATTERN (insn)) == CLOBBER)
15253 type = get_attr_type (insn);
15260 case TYPE_DELAYED_CR:
15261 case TYPE_CR_LOGICAL:
15269 if (rs6000_cpu == PROCESSOR_POWER5
15270 && is_cracked_insn (insn))
15276 /* The function returns true if INSN is cracked into 2 instructions
15277 by the processor (and therefore occupies 2 issue slots). */
15280 is_cracked_insn (rtx insn)
15282 if (!insn || !INSN_P (insn)
15283 || GET_CODE (PATTERN (insn)) == USE
15284 || GET_CODE (PATTERN (insn)) == CLOBBER)
15287 if (rs6000_sched_groups)
15289 enum attr_type type = get_attr_type (insn);
15290 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
15291 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
15292 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
15293 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
15294 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
15295 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
15296 || type == TYPE_IDIV || type == TYPE_LDIV
15297 || type == TYPE_INSERT_WORD)
15304 /* The function returns true if INSN can be issued only from
15305 the branch slot. */
15308 is_branch_slot_insn (rtx insn)
15310 if (!insn || !INSN_P (insn)
15311 || GET_CODE (PATTERN (insn)) == USE
15312 || GET_CODE (PATTERN (insn)) == CLOBBER)
15315 if (rs6000_sched_groups)
15317 enum attr_type type = get_attr_type (insn);
15318 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
15326 /* A C statement (sans semicolon) to update the integer scheduling
15327 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15328 INSN earlier, reduce the priority to execute INSN later. Do not
15329 define this macro if you do not need to adjust the scheduling
15330 priorities of insns. */
15333 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
15335 /* On machines (like the 750) which have asymmetric integer units,
15336 where one integer unit can do multiply and divides and the other
15337 can't, reduce the priority of multiply/divide so it is scheduled
15338 before other integer operations. */
15341 if (! INSN_P (insn))
15344 if (GET_CODE (PATTERN (insn)) == USE)
15347 switch (rs6000_cpu_attr) {
15349 switch (get_attr_type (insn))
15356 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
15357 priority, priority);
15358 if (priority >= 0 && priority < 0x01000000)
15365 if (is_dispatch_slot_restricted (insn)
15366 && reload_completed
15367 && current_sched_info->sched_max_insns_priority
15368 && rs6000_sched_restricted_insns_priority)
15371 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
15372 if (rs6000_sched_restricted_insns_priority == 1)
15373 /* Attach highest priority to insn. This means that in
15374 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15375 precede 'priority' (critical path) considerations. */
15376 return current_sched_info->sched_max_insns_priority;
15377 else if (rs6000_sched_restricted_insns_priority == 2)
15378 /* Increase priority of insn by a minimal amount. This means that in
15379 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
15380 precede dispatch-slot restriction considerations. */
15381 return (priority + 1);
15387 /* Return how many instructions the machine can issue per cycle. */
15390 rs6000_issue_rate (void)
15392 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15393 if (!reload_completed)
15396 switch (rs6000_cpu_attr) {
15397 case CPU_RIOS1: /* ? */
15399 case CPU_PPC601: /* ? */
15422 /* Return how many instructions to look ahead for better insn
15426 rs6000_use_sched_lookahead (void)
15428 if (rs6000_cpu_attr == CPU_PPC8540)
15433 /* Determine is PAT refers to memory. */
15436 is_mem_ref (rtx pat)
15442 if (GET_CODE (pat) == MEM)
15445 /* Recursively process the pattern. */
15446 fmt = GET_RTX_FORMAT (GET_CODE (pat));
15448 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
15451 ret |= is_mem_ref (XEXP (pat, i));
15452 else if (fmt[i] == 'E')
15453 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
15454 ret |= is_mem_ref (XVECEXP (pat, i, j));
15460 /* Determine if PAT is a PATTERN of a load insn. */
15463 is_load_insn1 (rtx pat)
15465 if (!pat || pat == NULL_RTX)
15468 if (GET_CODE (pat) == SET)
15469 return is_mem_ref (SET_SRC (pat));
15471 if (GET_CODE (pat) == PARALLEL)
15475 for (i = 0; i < XVECLEN (pat, 0); i++)
15476 if (is_load_insn1 (XVECEXP (pat, 0, i)))
15483 /* Determine if INSN loads from memory. */
15486 is_load_insn (rtx insn)
15488 if (!insn || !INSN_P (insn))
15491 if (GET_CODE (insn) == CALL_INSN)
15494 return is_load_insn1 (PATTERN (insn));
15497 /* Determine if PAT is a PATTERN of a store insn. */
15500 is_store_insn1 (rtx pat)
15502 if (!pat || pat == NULL_RTX)
15505 if (GET_CODE (pat) == SET)
15506 return is_mem_ref (SET_DEST (pat));
15508 if (GET_CODE (pat) == PARALLEL)
15512 for (i = 0; i < XVECLEN (pat, 0); i++)
15513 if (is_store_insn1 (XVECEXP (pat, 0, i)))
15520 /* Determine if INSN stores to memory. */
15523 is_store_insn (rtx insn)
15525 if (!insn || !INSN_P (insn))
15528 return is_store_insn1 (PATTERN (insn));
15531 /* Returns whether the dependence between INSN and NEXT is considered
15532 costly by the given target. */
15535 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
15537 /* If the flag is not enbled - no dependence is considered costly;
15538 allow all dependent insns in the same group.
15539 This is the most aggressive option. */
15540 if (rs6000_sched_costly_dep == no_dep_costly)
15543 /* If the flag is set to 1 - a dependence is always considered costly;
15544 do not allow dependent instructions in the same group.
15545 This is the most conservative option. */
15546 if (rs6000_sched_costly_dep == all_deps_costly)
15549 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15550 && is_load_insn (next)
15551 && is_store_insn (insn))
15552 /* Prevent load after store in the same group. */
15555 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15556 && is_load_insn (next)
15557 && is_store_insn (insn)
15558 && (!link || (int) REG_NOTE_KIND (link) == 0))
15559 /* Prevent load after store in the same group if it is a true dependence. */
15562 /* The flag is set to X; dependences with latency >= X are considered costly,
15563 and will not be scheduled in the same group. */
15564 if (rs6000_sched_costly_dep <= max_dep_latency
15565 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15571 /* Return the next insn after INSN that is found before TAIL is reached,
15572 skipping any "non-active" insns - insns that will not actually occupy
15573 an issue slot. Return NULL_RTX if such an insn is not found. */
15576 get_next_active_insn (rtx insn, rtx tail)
15580 if (!insn || insn == tail)
15583 next_insn = NEXT_INSN (insn);
15586 && next_insn != tail
15587 && (GET_CODE(next_insn) == NOTE
15588 || GET_CODE (PATTERN (next_insn)) == USE
15589 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
15591 next_insn = NEXT_INSN (next_insn);
15594 if (!next_insn || next_insn == tail)
15600 /* Return whether the presence of INSN causes a dispatch group termination
15601 of group WHICH_GROUP.
15603 If WHICH_GROUP == current_group, this function will return true if INSN
15604 causes the termination of the current group (i.e, the dispatch group to
15605 which INSN belongs). This means that INSN will be the last insn in the
15606 group it belongs to.
15608 If WHICH_GROUP == previous_group, this function will return true if INSN
15609 causes the termination of the previous group (i.e, the dispatch group that
15610 precedes the group to which INSN belongs). This means that INSN will be
15611 the first insn in the group it belongs to). */
15614 insn_terminates_group_p (rtx insn, enum group_termination which_group)
15616 enum attr_type type;
15621 type = get_attr_type (insn);
15623 if (is_microcoded_insn (insn))
15626 if (which_group == current_group)
15628 if (is_branch_slot_insn (insn))
15632 else if (which_group == previous_group)
15634 if (is_dispatch_slot_restricted (insn))
15642 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15643 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15646 is_costly_group (rtx *group_insns, rtx next_insn)
15651 int issue_rate = rs6000_issue_rate ();
15653 for (i = 0; i < issue_rate; i++)
15655 rtx insn = group_insns[i];
15658 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
15660 rtx next = XEXP (link, 0);
15661 if (next == next_insn)
15663 cost = insn_cost (insn, link, next_insn);
15664 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
15673 /* Utility of the function redefine_groups.
15674 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15675 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15676 to keep it "far" (in a separate group) from GROUP_INSNS, following
15677 one of the following schemes, depending on the value of the flag
15678 -minsert_sched_nops = X:
15679 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15680 in order to force NEXT_INSN into a separate group.
15681 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15682 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15683 insertion (has a group just ended, how many vacant issue slots remain in the
15684 last group, and how many dispatch groups were encountered so far). */
15687 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
15688 bool *group_end, int can_issue_more, int *group_count)
15692 int issue_rate = rs6000_issue_rate ();
15693 bool end = *group_end;
15696 if (next_insn == NULL_RTX)
15697 return can_issue_more;
15699 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
15700 return can_issue_more;
15702 force = is_costly_group (group_insns, next_insn);
15704 return can_issue_more;
15706 if (sched_verbose > 6)
15707 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
15708 *group_count ,can_issue_more);
15710 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
15713 can_issue_more = 0;
15715 /* Since only a branch can be issued in the last issue_slot, it is
15716 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15717 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15718 in this case the last nop will start a new group and the branch will be
15719 forced to the new group. */
15720 if (can_issue_more && !is_branch_slot_insn (next_insn))
15723 while (can_issue_more > 0)
15726 emit_insn_before (nop, next_insn);
15734 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
15736 int n_nops = rs6000_sched_insert_nops;
15738 /* Nops can't be issued from the branch slot, so the effective
15739 issue_rate for nops is 'issue_rate - 1'. */
15740 if (can_issue_more == 0)
15741 can_issue_more = issue_rate;
15743 if (can_issue_more == 0)
15745 can_issue_more = issue_rate - 1;
15748 for (i = 0; i < issue_rate; i++)
15750 group_insns[i] = 0;
15757 emit_insn_before (nop, next_insn);
15758 if (can_issue_more == issue_rate - 1) /* new group begins */
15761 if (can_issue_more == 0)
15763 can_issue_more = issue_rate - 1;
15766 for (i = 0; i < issue_rate; i++)
15768 group_insns[i] = 0;
15774 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15777 *group_end = /* Is next_insn going to start a new group? */
15779 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15780 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15781 || (can_issue_more < issue_rate &&
15782 insn_terminates_group_p (next_insn, previous_group)));
15783 if (*group_end && end)
15786 if (sched_verbose > 6)
15787 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
15788 *group_count, can_issue_more);
15789 return can_issue_more;
15792 return can_issue_more;
15795 /* This function tries to synch the dispatch groups that the compiler "sees"
15796 with the dispatch groups that the processor dispatcher is expected to
15797 form in practice. It tries to achieve this synchronization by forcing the
15798 estimated processor grouping on the compiler (as opposed to the function
15799 'pad_goups' which tries to force the scheduler's grouping on the processor).
15801 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15802 examines the (estimated) dispatch groups that will be formed by the processor
15803 dispatcher. It marks these group boundaries to reflect the estimated
15804 processor grouping, overriding the grouping that the scheduler had marked.
15805 Depending on the value of the flag '-minsert-sched-nops' this function can
15806 force certain insns into separate groups or force a certain distance between
15807 them by inserting nops, for example, if there exists a "costly dependence"
15810 The function estimates the group boundaries that the processor will form as
15811 folllows: It keeps track of how many vacant issue slots are available after
15812 each insn. A subsequent insn will start a new group if one of the following
15814 - no more vacant issue slots remain in the current dispatch group.
15815 - only the last issue slot, which is the branch slot, is vacant, but the next
15816 insn is not a branch.
15817 - only the last 2 or less issue slots, including the branch slot, are vacant,
15818 which means that a cracked insn (which occupies two issue slots) can't be
15819 issued in this group.
15820 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15821 start a new group. */
15824 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15826 rtx insn, next_insn;
15828 int can_issue_more;
15831 int group_count = 0;
15835 issue_rate = rs6000_issue_rate ();
15836 group_insns = alloca (issue_rate * sizeof (rtx));
15837 for (i = 0; i < issue_rate; i++)
15839 group_insns[i] = 0;
15841 can_issue_more = issue_rate;
15843 insn = get_next_active_insn (prev_head_insn, tail);
15846 while (insn != NULL_RTX)
15848 slot = (issue_rate - can_issue_more);
15849 group_insns[slot] = insn;
15851 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15852 if (insn_terminates_group_p (insn, current_group))
15853 can_issue_more = 0;
15855 next_insn = get_next_active_insn (insn, tail);
15856 if (next_insn == NULL_RTX)
15857 return group_count + 1;
15859 group_end = /* Is next_insn going to start a new group? */
15860 (can_issue_more == 0
15861 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15862 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15863 || (can_issue_more < issue_rate &&
15864 insn_terminates_group_p (next_insn, previous_group)));
15866 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
15867 next_insn, &group_end, can_issue_more, &group_count);
15872 can_issue_more = 0;
15873 for (i = 0; i < issue_rate; i++)
15875 group_insns[i] = 0;
15879 if (GET_MODE (next_insn) == TImode && can_issue_more)
15880 PUT_MODE(next_insn, VOIDmode);
15881 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
15882 PUT_MODE (next_insn, TImode);
15885 if (can_issue_more == 0)
15886 can_issue_more = issue_rate;
15889 return group_count;
15892 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
15893 dispatch group boundaries that the scheduler had marked. Pad with nops
15894 any dispatch groups which have vacant issue slots, in order to force the
15895 scheduler's grouping on the processor dispatcher. The function
15896 returns the number of dispatch groups found. */
15899 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15901 rtx insn, next_insn;
15904 int can_issue_more;
15906 int group_count = 0;
15908 /* Initialize issue_rate. */
15909 issue_rate = rs6000_issue_rate ();
15910 can_issue_more = issue_rate;
15912 insn = get_next_active_insn (prev_head_insn, tail);
15913 next_insn = get_next_active_insn (insn, tail);
15915 while (insn != NULL_RTX)
15918 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15920 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
15922 if (next_insn == NULL_RTX)
15927 /* If the scheduler had marked group termination at this location
15928 (between insn and next_indn), and neither insn nor next_insn will
15929 force group termination, pad the group with nops to force group
15932 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
15933 && !insn_terminates_group_p (insn, current_group)
15934 && !insn_terminates_group_p (next_insn, previous_group))
15936 if (!is_branch_slot_insn(next_insn))
15939 while (can_issue_more)
15942 emit_insn_before (nop, next_insn);
15947 can_issue_more = issue_rate;
15952 next_insn = get_next_active_insn (insn, tail);
15955 return group_count;
15958 /* The following function is called at the end of scheduling BB.
15959 After reload, it inserts nops at insn group bundling. */
15962 rs6000_sched_finish (FILE *dump, int sched_verbose)
15967 fprintf (dump, "=== Finishing schedule.\n");
15969 if (reload_completed && rs6000_sched_groups)
15971 if (rs6000_sched_insert_nops == sched_finish_none)
15974 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
15975 n_groups = pad_groups (dump, sched_verbose,
15976 current_sched_info->prev_head,
15977 current_sched_info->next_tail);
15979 n_groups = redefine_groups (dump, sched_verbose,
15980 current_sched_info->prev_head,
15981 current_sched_info->next_tail);
15983 if (sched_verbose >= 6)
15985 fprintf (dump, "ngroups = %d\n", n_groups);
15986 print_rtl (dump, current_sched_info->prev_head);
15987 fprintf (dump, "Done finish_sched\n");
15992 /* Length in units of the trampoline for entering a nested function. */
15995 rs6000_trampoline_size (void)
15999 switch (DEFAULT_ABI)
16005 ret = (TARGET_32BIT) ? 12 : 24;
16010 ret = (TARGET_32BIT) ? 40 : 48;
16017 /* Emit RTL insns to initialize the variable parts of a trampoline.
16018 FNADDR is an RTX for the address of the function's pure code.
16019 CXT is an RTX for the static chain value for the function. */
16022 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
16024 enum machine_mode pmode = Pmode;
16025 int regsize = (TARGET_32BIT) ? 4 : 8;
16026 rtx ctx_reg = force_reg (pmode, cxt);
16028 switch (DEFAULT_ABI)
16033 /* Macros to shorten the code expansions below. */
16034 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16035 #define MEM_PLUS(addr,offset) \
16036 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16038 /* Under AIX, just build the 3 word function descriptor */
16041 rtx fn_reg = gen_reg_rtx (pmode);
16042 rtx toc_reg = gen_reg_rtx (pmode);
16043 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
16044 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
16045 emit_move_insn (MEM_DEREF (addr), fn_reg);
16046 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
16047 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
16051 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16054 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
16055 FALSE, VOIDmode, 4,
16057 GEN_INT (rs6000_trampoline_size ()), SImode,
16067 /* Table of valid machine attributes. */
16069 const struct attribute_spec rs6000_attribute_table[] =
16071 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16072 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
16073 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16074 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16075 { NULL, 0, 0, false, false, false, NULL }
16078 /* Handle the "altivec" attribute. The attribute may have
16079 arguments as follows:
16081 __attribute__((altivec(vector__)))
16082 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16083 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16085 and may appear more than once (e.g., 'vector bool char') in a
16086 given declaration. */
16089 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
16090 int flags ATTRIBUTE_UNUSED,
16091 bool *no_add_attrs)
16093 tree type = *node, result = NULL_TREE;
16094 enum machine_mode mode;
16097 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
16098 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
16099 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
16102 while (POINTER_TYPE_P (type)
16103 || TREE_CODE (type) == FUNCTION_TYPE
16104 || TREE_CODE (type) == METHOD_TYPE
16105 || TREE_CODE (type) == ARRAY_TYPE)
16106 type = TREE_TYPE (type);
16108 mode = TYPE_MODE (type);
16110 if (rs6000_warn_altivec_long
16111 && (type == long_unsigned_type_node || type == long_integer_type_node))
16112 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
16114 switch (altivec_type)
16117 unsigned_p = TYPE_UNSIGNED (type);
16121 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
16124 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
16127 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
16129 case SFmode: result = V4SF_type_node; break;
16130 /* If the user says 'vector int bool', we may be handed the 'bool'
16131 attribute _before_ the 'vector' attribute, and so select the proper
16132 type in the 'b' case below. */
16133 case V4SImode: case V8HImode: case V16QImode: result = type;
16140 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
16141 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
16142 case QImode: case V16QImode: result = bool_V16QI_type_node;
16149 case V8HImode: result = pixel_V8HI_type_node;
16155 if (result && result != type && TYPE_READONLY (type))
16156 result = build_qualified_type (result, TYPE_QUAL_CONST);
16158 *no_add_attrs = true; /* No need to hang on to the attribute. */
16161 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
16163 *node = reconstruct_complex_type (*node, result);
16168 /* AltiVec defines four built-in scalar types that serve as vector
16169 elements; we must teach the compiler how to mangle them. */
16171 static const char *
16172 rs6000_mangle_fundamental_type (tree type)
16174 if (type == bool_char_type_node) return "U6__boolc";
16175 if (type == bool_short_type_node) return "U6__bools";
16176 if (type == pixel_type_node) return "u7__pixel";
16177 if (type == bool_int_type_node) return "U6__booli";
16179 /* For all other types, use normal C++ mangling. */
16183 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16184 struct attribute_spec.handler. */
16187 rs6000_handle_longcall_attribute (tree *node, tree name,
16188 tree args ATTRIBUTE_UNUSED,
16189 int flags ATTRIBUTE_UNUSED,
16190 bool *no_add_attrs)
16192 if (TREE_CODE (*node) != FUNCTION_TYPE
16193 && TREE_CODE (*node) != FIELD_DECL
16194 && TREE_CODE (*node) != TYPE_DECL)
16196 warning ("`%s' attribute only applies to functions",
16197 IDENTIFIER_POINTER (name));
16198 *no_add_attrs = true;
16204 /* Set longcall attributes on all functions declared when
16205 rs6000_default_long_calls is true. */
16207 rs6000_set_default_type_attributes (tree type)
16209 if (rs6000_default_long_calls
16210 && (TREE_CODE (type) == FUNCTION_TYPE
16211 || TREE_CODE (type) == METHOD_TYPE))
16212 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
16214 TYPE_ATTRIBUTES (type));
16217 /* Return a reference suitable for calling a function with the
16218 longcall attribute. */
16221 rs6000_longcall_ref (rtx call_ref)
16223 const char *call_name;
16226 if (GET_CODE (call_ref) != SYMBOL_REF)
16229 /* System V adds '.' to the internal name, so skip them. */
16230 call_name = XSTR (call_ref, 0);
16231 if (*call_name == '.')
16233 while (*call_name == '.')
16236 node = get_identifier (call_name);
16237 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
16240 return force_reg (Pmode, call_ref);
16243 #ifdef USING_ELFOS_H
16245 /* A C statement or statements to switch to the appropriate section
16246 for output of RTX in mode MODE. You can assume that RTX is some
16247 kind of constant in RTL. The argument MODE is redundant except in
16248 the case of a `const_int' rtx. Select the section by calling
16249 `text_section' or one of the alternatives for other sections.
16251 Do not define this macro if you put all constants in the read-only
16255 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
16256 unsigned HOST_WIDE_INT align)
16258 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16261 default_elf_select_rtx_section (mode, x, align);
16264 /* A C statement or statements to switch to the appropriate
16265 section for output of DECL. DECL is either a `VAR_DECL' node
16266 or a constant of some sort. RELOC indicates whether forming
16267 the initial value of DECL requires link-time relocations. */
16270 rs6000_elf_select_section (tree decl, int reloc,
16271 unsigned HOST_WIDE_INT align)
16273 /* Pretend that we're always building for a shared library when
16274 ABI_AIX, because otherwise we end up with dynamic relocations
16275 in read-only sections. This happens for function pointers,
16276 references to vtables in typeinfo, and probably other cases. */
16277 default_elf_select_section_1 (decl, reloc, align,
16278 flag_pic || DEFAULT_ABI == ABI_AIX);
16281 /* A C statement to build up a unique section name, expressed as a
16282 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16283 RELOC indicates whether the initial value of EXP requires
16284 link-time relocations. If you do not define this macro, GCC will use
16285 the symbol name prefixed by `.' as the section name. Note - this
16286 macro can now be called for uninitialized data items as well as
16287 initialized data and functions. */
16290 rs6000_elf_unique_section (tree decl, int reloc)
16292 /* As above, pretend that we're always building for a shared library
16293 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16294 default_unique_section_1 (decl, reloc,
16295 flag_pic || DEFAULT_ABI == ABI_AIX);
16298 /* For a SYMBOL_REF, set generic flags and then perform some
16299 target-specific processing.
16301 When the AIX ABI is requested on a non-AIX system, replace the
16302 function name with the real name (with a leading .) rather than the
16303 function descriptor name. This saves a lot of overriding code to
16304 read the prefixes. */
16307 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
16309 default_encode_section_info (decl, rtl, first);
16312 && TREE_CODE (decl) == FUNCTION_DECL
16314 && DEFAULT_ABI == ABI_AIX)
16316 rtx sym_ref = XEXP (rtl, 0);
16317 size_t len = strlen (XSTR (sym_ref, 0));
16318 char *str = alloca (len + 2);
16320 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
16321 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
16326 rs6000_elf_in_small_data_p (tree decl)
16328 if (rs6000_sdata == SDATA_NONE)
16331 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
16333 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
16334 if (strcmp (section, ".sdata") == 0
16335 || strcmp (section, ".sdata2") == 0
16336 || strcmp (section, ".sbss") == 0
16337 || strcmp (section, ".sbss2") == 0
16338 || strcmp (section, ".PPC.EMB.sdata0") == 0
16339 || strcmp (section, ".PPC.EMB.sbss0") == 0)
16344 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
16347 && (unsigned HOST_WIDE_INT) size <= g_switch_value
16348 /* If it's not public, and we're not going to reference it there,
16349 there's no need to put it in the small data section. */
16350 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
16357 #endif /* USING_ELFOS_H */
16360 /* Return a REG that occurs in ADDR with coefficient 1.
16361 ADDR can be effectively incremented by incrementing REG.
16363 r0 is special and we must not select it as an address
16364 register by this routine since our caller will try to
16365 increment the returned register via an "la" instruction. */
16368 find_addr_reg (rtx addr)
16370 while (GET_CODE (addr) == PLUS)
16372 if (GET_CODE (XEXP (addr, 0)) == REG
16373 && REGNO (XEXP (addr, 0)) != 0)
16374 addr = XEXP (addr, 0);
16375 else if (GET_CODE (XEXP (addr, 1)) == REG
16376 && REGNO (XEXP (addr, 1)) != 0)
16377 addr = XEXP (addr, 1);
16378 else if (CONSTANT_P (XEXP (addr, 0)))
16379 addr = XEXP (addr, 1);
16380 else if (CONSTANT_P (XEXP (addr, 1)))
16381 addr = XEXP (addr, 0);
16385 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
16391 rs6000_fatal_bad_address (rtx op)
16393 fatal_insn ("bad address", op);
16399 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
16400 reference and a constant. */
16403 symbolic_operand (rtx op)
16405 switch (GET_CODE (op))
16412 return (GET_CODE (op) == SYMBOL_REF ||
16413 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
16414 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
16415 && GET_CODE (XEXP (op, 1)) == CONST_INT);
16424 static tree branch_island_list = 0;
16426 /* Remember to generate a branch island for far calls to the given
16430 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
16432 tree branch_island = build_tree_list (function_name, label_name);
16433 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
16434 TREE_CHAIN (branch_island) = branch_island_list;
16435 branch_island_list = branch_island;
16438 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
16439 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
16440 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
16441 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16443 /* Generate far-jump branch islands for everything on the
16444 branch_island_list. Invoked immediately after the last instruction
16445 of the epilogue has been emitted; the branch-islands must be
16446 appended to, and contiguous with, the function body. Mach-O stubs
16447 are generated in machopic_output_stub(). */
16450 macho_branch_islands (void)
16453 tree branch_island;
16455 for (branch_island = branch_island_list;
16457 branch_island = TREE_CHAIN (branch_island))
16459 const char *label =
16460 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
16462 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
16463 char name_buf[512];
16464 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16465 if (name[0] == '*' || name[0] == '&')
16466 strcpy (name_buf, name+1);
16470 strcpy (name_buf+1, name);
16472 strcpy (tmp_buf, "\n");
16473 strcat (tmp_buf, label);
16474 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16475 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16476 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16477 BRANCH_ISLAND_LINE_NUMBER(branch_island));
16478 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16481 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
16482 strcat (tmp_buf, label);
16483 strcat (tmp_buf, "_pic\n");
16484 strcat (tmp_buf, label);
16485 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
16487 strcat (tmp_buf, "\taddis r11,r11,ha16(");
16488 strcat (tmp_buf, name_buf);
16489 strcat (tmp_buf, " - ");
16490 strcat (tmp_buf, label);
16491 strcat (tmp_buf, "_pic)\n");
16493 strcat (tmp_buf, "\tmtlr r0\n");
16495 strcat (tmp_buf, "\taddi r12,r11,lo16(");
16496 strcat (tmp_buf, name_buf);
16497 strcat (tmp_buf, " - ");
16498 strcat (tmp_buf, label);
16499 strcat (tmp_buf, "_pic)\n");
16501 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
16505 strcat (tmp_buf, ":\nlis r12,hi16(");
16506 strcat (tmp_buf, name_buf);
16507 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
16508 strcat (tmp_buf, name_buf);
16509 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
16511 output_asm_insn (tmp_buf, 0);
16512 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16513 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16514 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16515 BRANCH_ISLAND_LINE_NUMBER (branch_island));
16516 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16519 branch_island_list = 0;
16522 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16523 already there or not. */
16526 no_previous_def (tree function_name)
16528 tree branch_island;
16529 for (branch_island = branch_island_list;
16531 branch_island = TREE_CHAIN (branch_island))
16532 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16537 /* GET_PREV_LABEL gets the label name from the previous definition of
16541 get_prev_label (tree function_name)
16543 tree branch_island;
16544 for (branch_island = branch_island_list;
16546 branch_island = TREE_CHAIN (branch_island))
16547 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16548 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16552 /* INSN is either a function call or a millicode call. It may have an
16553 unconditional jump in its delay slot.
16555 CALL_DEST is the routine we are calling. */
16558 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
16560 static char buf[256];
16561 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16562 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16565 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
16567 if (no_previous_def (funname))
16569 int line_number = 0;
16570 rtx label_rtx = gen_label_rtx ();
16571 char *label_buf, temp_buf[256];
16572 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
16573 CODE_LABEL_NUMBER (label_rtx));
16574 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
16575 labelname = get_identifier (label_buf);
16576 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
16578 line_number = NOTE_LINE_NUMBER (insn);
16579 add_compiler_branch_island (labelname, funname, line_number);
16582 labelname = get_prev_label (funname);
16584 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16585 instruction will reach 'foo', otherwise link as 'bl L42'".
16586 "L42" should be a 'branch island', that will do a far jump to
16587 'foo'. Branch islands are generated in
16588 macho_branch_islands(). */
16589 sprintf (buf, "jbsr %%z%d,%.246s",
16590 dest_operand_number, IDENTIFIER_POINTER (labelname));
16593 sprintf (buf, "bl %%z%d", dest_operand_number);
16597 #endif /* TARGET_MACHO */
16599 /* Generate PIC and indirect symbol stubs. */
16602 machopic_output_stub (FILE *file, const char *symb, const char *stub)
16604 unsigned int length;
16605 char *symbol_name, *lazy_ptr_name;
16606 char *local_label_0;
16607 static int label = 0;
16609 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16610 symb = (*targetm.strip_name_encoding) (symb);
16613 length = strlen (symb);
16614 symbol_name = alloca (length + 32);
16615 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
16617 lazy_ptr_name = alloca (length + 32);
16618 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
16621 machopic_picsymbol_stub1_section ();
16623 machopic_symbol_stub1_section ();
16627 fprintf (file, "\t.align 5\n");
16629 fprintf (file, "%s:\n", stub);
16630 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16633 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
16634 sprintf (local_label_0, "\"L%011d$spb\"", label);
16636 fprintf (file, "\tmflr r0\n");
16637 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
16638 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
16639 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
16640 lazy_ptr_name, local_label_0);
16641 fprintf (file, "\tmtlr r0\n");
16642 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16643 lazy_ptr_name, local_label_0);
16644 fprintf (file, "\tmtctr r12\n");
16645 fprintf (file, "\tbctr\n");
16649 fprintf (file, "\t.align 4\n");
16651 fprintf (file, "%s:\n", stub);
16652 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16654 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
16655 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
16656 fprintf (file, "\tmtctr r12\n");
16657 fprintf (file, "\tbctr\n");
16660 machopic_lazy_symbol_ptr_section ();
16661 fprintf (file, "%s:\n", lazy_ptr_name);
16662 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16663 fprintf (file, "%sdyld_stub_binding_helper\n",
16664 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
16667 /* Legitimize PIC addresses. If the address is already
16668 position-independent, we return ORIG. Newly generated
16669 position-independent addresses go into a reg. This is REG if non
16670 zero, otherwise we allocate register(s) as necessary. */
16672 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16675 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
16680 if (reg == NULL && ! reload_in_progress && ! reload_completed)
16681 reg = gen_reg_rtx (Pmode);
16683 if (GET_CODE (orig) == CONST)
16685 if (GET_CODE (XEXP (orig, 0)) == PLUS
16686 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
16689 if (GET_CODE (XEXP (orig, 0)) == PLUS)
16691 /* Use a different reg for the intermediate value, as
16692 it will be marked UNCHANGING. */
16693 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16696 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16699 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16705 if (GET_CODE (offset) == CONST_INT)
16707 if (SMALL_INT (offset))
16708 return plus_constant (base, INTVAL (offset));
16709 else if (! reload_in_progress && ! reload_completed)
16710 offset = force_reg (Pmode, offset);
16713 rtx mem = force_const_mem (Pmode, orig);
16714 return machopic_legitimize_pic_address (mem, Pmode, reg);
16717 return gen_rtx_PLUS (Pmode, base, offset);
16720 /* Fall back on generic machopic code. */
16721 return machopic_legitimize_pic_address (orig, mode, reg);
16724 /* This is just a placeholder to make linking work without having to
16725 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16726 ever needed for Darwin (not too likely!) this would have to get a
16727 real definition. */
16734 #endif /* TARGET_MACHO */
16737 static unsigned int
16738 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
16740 return default_section_type_flags_1 (decl, name, reloc,
16741 flag_pic || DEFAULT_ABI == ABI_AIX);
16744 /* Record an element in the table of global constructors. SYMBOL is
16745 a SYMBOL_REF of the function to be called; PRIORITY is a number
16746 between 0 and MAX_INIT_PRIORITY.
16748 This differs from default_named_section_asm_out_constructor in
16749 that we have special handling for -mrelocatable. */
16752 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
16754 const char *section = ".ctors";
16757 if (priority != DEFAULT_INIT_PRIORITY)
16759 sprintf (buf, ".ctors.%.5u",
16760 /* Invert the numbering so the linker puts us in the proper
16761 order; constructors are run from right to left, and the
16762 linker sorts in increasing order. */
16763 MAX_INIT_PRIORITY - priority);
16767 named_section_flags (section, SECTION_WRITE);
16768 assemble_align (POINTER_SIZE);
16770 if (TARGET_RELOCATABLE)
16772 fputs ("\t.long (", asm_out_file);
16773 output_addr_const (asm_out_file, symbol);
16774 fputs (")@fixup\n", asm_out_file);
16777 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16781 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
16783 const char *section = ".dtors";
16786 if (priority != DEFAULT_INIT_PRIORITY)
16788 sprintf (buf, ".dtors.%.5u",
16789 /* Invert the numbering so the linker puts us in the proper
16790 order; constructors are run from right to left, and the
16791 linker sorts in increasing order. */
16792 MAX_INIT_PRIORITY - priority);
16796 named_section_flags (section, SECTION_WRITE);
16797 assemble_align (POINTER_SIZE);
16799 if (TARGET_RELOCATABLE)
16801 fputs ("\t.long (", asm_out_file);
16802 output_addr_const (asm_out_file, symbol);
16803 fputs (")@fixup\n", asm_out_file);
16806 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16810 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
16814 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
16815 ASM_OUTPUT_LABEL (file, name);
16816 fputs (DOUBLE_INT_ASM_OP, file);
16817 rs6000_output_function_entry (file, name);
16818 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
16821 fputs ("\t.size\t", file);
16822 assemble_name (file, name);
16823 fputs (",24\n\t.type\t.", file);
16824 assemble_name (file, name);
16825 fputs (",@function\n", file);
16826 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
16828 fputs ("\t.globl\t.", file);
16829 assemble_name (file, name);
16834 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
16835 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16836 rs6000_output_function_entry (file, name);
16837 fputs (":\n", file);
16841 if (TARGET_RELOCATABLE
16842 && (get_pool_size () != 0 || current_function_profile)
16847 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
16849 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
16850 fprintf (file, "\t.long ");
16851 assemble_name (file, buf);
16853 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
16854 assemble_name (file, buf);
16858 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
16859 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16861 if (DEFAULT_ABI == ABI_AIX)
16863 const char *desc_name, *orig_name;
16865 orig_name = (*targetm.strip_name_encoding) (name);
16866 desc_name = orig_name;
16867 while (*desc_name == '.')
16870 if (TREE_PUBLIC (decl))
16871 fprintf (file, "\t.globl %s\n", desc_name);
16873 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
16874 fprintf (file, "%s:\n", desc_name);
16875 fprintf (file, "\t.long %s\n", orig_name);
16876 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
16877 if (DEFAULT_ABI == ABI_AIX)
16878 fputs ("\t.long 0\n", file);
16879 fprintf (file, "\t.previous\n");
16881 ASM_OUTPUT_LABEL (file, name);
16887 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
16889 fputs (GLOBAL_ASM_OP, stream);
16890 RS6000_OUTPUT_BASENAME (stream, name);
16891 putc ('\n', stream);
16895 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
16898 static const char * const suffix[3] = { "PR", "RO", "RW" };
16900 if (flags & SECTION_CODE)
16902 else if (flags & SECTION_WRITE)
16907 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
16908 (flags & SECTION_CODE) ? "." : "",
16909 name, suffix[smclass], flags & SECTION_ENTSIZE);
16913 rs6000_xcoff_select_section (tree decl, int reloc,
16914 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16916 if (decl_readonly_section_1 (decl, reloc, 1))
16918 if (TREE_PUBLIC (decl))
16919 read_only_data_section ();
16921 read_only_private_data_section ();
16925 if (TREE_PUBLIC (decl))
16928 private_data_section ();
16933 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
16937 /* Use select_section for private and uninitialized data. */
16938 if (!TREE_PUBLIC (decl)
16939 || DECL_COMMON (decl)
16940 || DECL_INITIAL (decl) == NULL_TREE
16941 || DECL_INITIAL (decl) == error_mark_node
16942 || (flag_zero_initialized_in_bss
16943 && initializer_zerop (DECL_INITIAL (decl))))
16946 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
16947 name = (*targetm.strip_name_encoding) (name);
16948 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
16951 /* Select section for constant in constant pool.
16953 On RS/6000, all constants are in the private read-only data area.
16954 However, if this is being placed in the TOC it must be output as a
16958 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
16959 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16961 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16964 read_only_private_data_section ();
16967 /* Remove any trailing [DS] or the like from the symbol name. */
16969 static const char *
16970 rs6000_xcoff_strip_name_encoding (const char *name)
16975 len = strlen (name);
16976 if (name[len - 1] == ']')
16977 return ggc_alloc_string (name, len - 4);
16982 /* Section attributes. AIX is always PIC. */
16984 static unsigned int
16985 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
16987 unsigned int align;
16988 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
16990 /* Align to at least UNIT size. */
16991 if (flags & SECTION_CODE)
16992 align = MIN_UNITS_PER_WORD;
16994 /* Increase alignment of large objects if not already stricter. */
16995 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
16996 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
16997 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
16999 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
17002 /* Output at beginning of assembler file.
17004 Initialize the section names for the RS/6000 at this point.
17006 Specify filename, including full path, to assembler.
17008 We want to go into the TOC section so at least one .toc will be emitted.
17009 Also, in order to output proper .bs/.es pairs, we need at least one static
17010 [RW] section emitted.
17012 Finally, declare mcount when profiling to make the assembler happy. */
17015 rs6000_xcoff_file_start (void)
17017 rs6000_gen_section_name (&xcoff_bss_section_name,
17018 main_input_filename, ".bss_");
17019 rs6000_gen_section_name (&xcoff_private_data_section_name,
17020 main_input_filename, ".rw_");
17021 rs6000_gen_section_name (&xcoff_read_only_section_name,
17022 main_input_filename, ".ro_");
17024 fputs ("\t.file\t", asm_out_file);
17025 output_quoted_string (asm_out_file, main_input_filename);
17026 fputc ('\n', asm_out_file);
17028 if (write_symbols != NO_DEBUG)
17029 private_data_section ();
17032 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
17033 rs6000_file_start ();
17036 /* Output at end of assembler file.
17037 On the RS/6000, referencing data should automatically pull in text. */
17040 rs6000_xcoff_file_end (void)
17043 fputs ("_section_.text:\n", asm_out_file);
17045 fputs (TARGET_32BIT
17046 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17049 #endif /* TARGET_XCOFF */
17052 /* Cross-module name binding. Darwin does not support overriding
17053 functions at dynamic-link time. */
17056 rs6000_binds_local_p (tree decl)
17058 return default_binds_local_p_1 (decl, 0);
17062 /* Compute a (partial) cost for rtx X. Return true if the complete
17063 cost has been computed, and false if subexpressions should be
17064 scanned. In either case, *TOTAL contains the cost result. */
17067 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
17070 enum machine_mode mode = GET_MODE (x);
17074 /* On the RS/6000, if it is valid in the insn, it is free. */
17076 if (((outer_code == SET
17077 || outer_code == PLUS
17078 || outer_code == MINUS)
17079 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17080 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17081 || ((outer_code == IOR || outer_code == XOR)
17082 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17083 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17084 || ((outer_code == DIV || outer_code == UDIV
17085 || outer_code == MOD || outer_code == UMOD)
17086 && exact_log2 (INTVAL (x)) >= 0)
17087 || (outer_code == AND
17088 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17089 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17090 || mask_operand (x, VOIDmode)))
17091 || outer_code == ASHIFT
17092 || outer_code == ASHIFTRT
17093 || outer_code == LSHIFTRT
17094 || outer_code == ROTATE
17095 || outer_code == ROTATERT
17096 || outer_code == ZERO_EXTRACT
17097 || (outer_code == MULT
17098 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17099 || (outer_code == COMPARE
17100 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17101 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K'))))
17106 else if ((outer_code == PLUS
17107 && reg_or_add_cint64_operand (x, VOIDmode))
17108 || (outer_code == MINUS
17109 && reg_or_sub_cint64_operand (x, VOIDmode))
17110 || ((outer_code == SET
17111 || outer_code == IOR
17112 || outer_code == XOR)
17114 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
17116 *total = COSTS_N_INSNS (1);
17123 && ((outer_code == AND
17124 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17125 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17126 || mask64_operand (x, DImode)))
17127 || ((outer_code == IOR || outer_code == XOR)
17128 && CONST_DOUBLE_HIGH (x) == 0
17129 && (CONST_DOUBLE_LOW (x)
17130 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
17135 else if (mode == DImode
17136 && (outer_code == SET
17137 || outer_code == IOR
17138 || outer_code == XOR)
17139 && CONST_DOUBLE_HIGH (x) == 0)
17141 *total = COSTS_N_INSNS (1);
17150 /* When optimizing for size, MEM should be slightly more expensive
17151 than generating address, e.g., (plus (reg) (const)).
17152 L1 cache latecy is about two instructions. */
17153 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17161 if (mode == DFmode)
17163 if (GET_CODE (XEXP (x, 0)) == MULT)
17165 /* FNMA accounted in outer NEG. */
17166 if (outer_code == NEG)
17167 *total = rs6000_cost->dmul - rs6000_cost->fp;
17169 *total = rs6000_cost->dmul;
17172 *total = rs6000_cost->fp;
17174 else if (mode == SFmode)
17176 /* FNMA accounted in outer NEG. */
17177 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17180 *total = rs6000_cost->fp;
17182 else if (GET_CODE (XEXP (x, 0)) == MULT)
17184 /* The rs6000 doesn't have shift-and-add instructions. */
17185 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
17186 *total += COSTS_N_INSNS (1);
17189 *total = COSTS_N_INSNS (1);
17193 if (mode == DFmode)
17195 if (GET_CODE (XEXP (x, 0)) == MULT)
17197 /* FNMA accounted in outer NEG. */
17198 if (outer_code == NEG)
17201 *total = rs6000_cost->dmul;
17204 *total = rs6000_cost->fp;
17206 else if (mode == SFmode)
17208 /* FNMA accounted in outer NEG. */
17209 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17212 *total = rs6000_cost->fp;
17214 else if (GET_CODE (XEXP (x, 0)) == MULT)
17216 /* The rs6000 doesn't have shift-and-sub instructions. */
17217 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
17218 *total += COSTS_N_INSNS (1);
17221 *total = COSTS_N_INSNS (1);
17225 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
17227 if (INTVAL (XEXP (x, 1)) >= -256
17228 && INTVAL (XEXP (x, 1)) <= 255)
17229 *total = rs6000_cost->mulsi_const9;
17231 *total = rs6000_cost->mulsi_const;
17233 /* FMA accounted in outer PLUS/MINUS. */
17234 else if ((mode == DFmode || mode == SFmode)
17235 && (outer_code == PLUS || outer_code == MINUS))
17237 else if (mode == DFmode)
17238 *total = rs6000_cost->dmul;
17239 else if (mode == SFmode)
17240 *total = rs6000_cost->fp;
17241 else if (mode == DImode)
17242 *total = rs6000_cost->muldi;
17244 *total = rs6000_cost->mulsi;
17249 if (FLOAT_MODE_P (mode))
17251 *total = mode == DFmode ? rs6000_cost->ddiv
17252 : rs6000_cost->sdiv;
17259 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17260 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
17262 if (code == DIV || code == MOD)
17264 *total = COSTS_N_INSNS (2);
17267 *total = COSTS_N_INSNS (1);
17271 if (GET_MODE (XEXP (x, 1)) == DImode)
17272 *total = rs6000_cost->divdi;
17274 *total = rs6000_cost->divsi;
17276 /* Add in shift and subtract for MOD. */
17277 if (code == MOD || code == UMOD)
17278 *total += COSTS_N_INSNS (2);
17282 *total = COSTS_N_INSNS (4);
17286 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
17297 *total = COSTS_N_INSNS (1);
17305 /* Handle mul_highpart. */
17306 if (outer_code == TRUNCATE
17307 && GET_CODE (XEXP (x, 0)) == MULT)
17309 if (mode == DImode)
17310 *total = rs6000_cost->muldi;
17312 *total = rs6000_cost->mulsi;
17315 else if (outer_code == AND)
17318 *total = COSTS_N_INSNS (1);
17323 if (GET_CODE (XEXP (x, 0)) == MEM)
17326 *total = COSTS_N_INSNS (1);
17332 if (!FLOAT_MODE_P (mode))
17334 *total = COSTS_N_INSNS (1);
17340 case UNSIGNED_FLOAT:
17344 case FLOAT_TRUNCATE:
17345 *total = rs6000_cost->fp;
17349 switch (XINT (x, 1))
17352 *total = rs6000_cost->fp;
17364 *total = COSTS_N_INSNS (1);
17367 else if (FLOAT_MODE_P (mode)
17368 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
17370 *total = rs6000_cost->fp;
17383 /* A C expression returning the cost of moving data from a register of class
17384 CLASS1 to one of CLASS2. */
17387 rs6000_register_move_cost (enum machine_mode mode,
17388 enum reg_class from, enum reg_class to)
17390 /* Moves from/to GENERAL_REGS. */
17391 if (reg_classes_intersect_p (to, GENERAL_REGS)
17392 || reg_classes_intersect_p (from, GENERAL_REGS))
17394 if (! reg_classes_intersect_p (to, GENERAL_REGS))
17397 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
17398 return (rs6000_memory_move_cost (mode, from, 0)
17399 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
17401 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
17402 else if (from == CR_REGS)
17406 /* A move will cost one instruction per GPR moved. */
17407 return 2 * HARD_REGNO_NREGS (0, mode);
17410 /* Moving between two similar registers is just one instruction. */
17411 else if (reg_classes_intersect_p (to, from))
17412 return mode == TFmode ? 4 : 2;
17414 /* Everything else has to go through GENERAL_REGS. */
17416 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
17417 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
17420 /* A C expressions returning the cost of moving data of MODE from a register to
17424 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
17425 int in ATTRIBUTE_UNUSED)
17427 if (reg_classes_intersect_p (class, GENERAL_REGS))
17428 return 4 * HARD_REGNO_NREGS (0, mode);
17429 else if (reg_classes_intersect_p (class, FLOAT_REGS))
17430 return 4 * HARD_REGNO_NREGS (32, mode);
17431 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
17432 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
17434 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
17437 /* Return an RTX representing where to find the function value of a
17438 function returning MODE. */
17440 rs6000_complex_function_value (enum machine_mode mode)
17442 unsigned int regno;
17444 enum machine_mode inner = GET_MODE_INNER (mode);
17445 unsigned int inner_bytes = GET_MODE_SIZE (inner);
17447 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
17448 regno = FP_ARG_RETURN;
17451 regno = GP_ARG_RETURN;
17453 /* 32-bit is OK since it'll go in r3/r4. */
17454 if (TARGET_32BIT && inner_bytes >= 4)
17455 return gen_rtx_REG (mode, regno);
17458 if (inner_bytes >= 8)
17459 return gen_rtx_REG (mode, regno);
17461 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
17463 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
17464 GEN_INT (inner_bytes));
17465 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
17468 /* Define how to find the value returned by a function.
17469 VALTYPE is the data type of the value (as a tree).
17470 If the precise function being called is known, FUNC is its FUNCTION_DECL;
17471 otherwise, FUNC is 0.
17473 On the SPE, both FPs and vectors are returned in r3.
17475 On RS/6000 an integer value is in r3 and a floating-point value is in
17476 fp1, unless -msoft-float. */
17479 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
17481 enum machine_mode mode;
17482 unsigned int regno;
17484 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
17486 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17487 return gen_rtx_PARALLEL (DImode,
17489 gen_rtx_EXPR_LIST (VOIDmode,
17490 gen_rtx_REG (SImode, GP_ARG_RETURN),
17492 gen_rtx_EXPR_LIST (VOIDmode,
17493 gen_rtx_REG (SImode,
17494 GP_ARG_RETURN + 1),
17498 if ((INTEGRAL_TYPE_P (valtype)
17499 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
17500 || POINTER_TYPE_P (valtype))
17501 mode = TARGET_32BIT ? SImode : DImode;
17503 mode = TYPE_MODE (valtype);
17505 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
17506 regno = FP_ARG_RETURN;
17507 else if (TREE_CODE (valtype) == COMPLEX_TYPE
17508 && targetm.calls.split_complex_arg)
17509 return rs6000_complex_function_value (mode);
17510 else if (TREE_CODE (valtype) == VECTOR_TYPE
17511 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
17512 && ALTIVEC_VECTOR_MODE(mode))
17513 regno = ALTIVEC_ARG_RETURN;
17515 regno = GP_ARG_RETURN;
17517 return gen_rtx_REG (mode, regno);
17520 /* Define how to find the value returned by a library function
17521 assuming the value has mode MODE. */
17523 rs6000_libcall_value (enum machine_mode mode)
17525 unsigned int regno;
17527 if (GET_MODE_CLASS (mode) == MODE_FLOAT
17528 && TARGET_HARD_FLOAT && TARGET_FPRS)
17529 regno = FP_ARG_RETURN;
17530 else if (ALTIVEC_VECTOR_MODE (mode)
17531 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
17532 regno = ALTIVEC_ARG_RETURN;
17533 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
17534 return rs6000_complex_function_value (mode);
17536 regno = GP_ARG_RETURN;
17538 return gen_rtx_REG (mode, regno);
17541 /* Define the offset between two registers, FROM to be eliminated and its
17542 replacement TO, at the start of a routine. */
17544 rs6000_initial_elimination_offset (int from, int to)
17546 rs6000_stack_t *info = rs6000_stack_info ();
17547 HOST_WIDE_INT offset;
17549 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17550 offset = info->push_p ? 0 : -info->total_size;
17551 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
17552 offset = info->total_size;
17553 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17554 offset = info->push_p ? info->total_size : 0;
17555 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
17563 /* Return true if TYPE is of type __ev64_opaque__. */
17566 is_ev64_opaque_type (tree type)
17569 && (type == opaque_V2SI_type_node
17570 || type == opaque_V2SF_type_node
17571 || type == opaque_p_V2SI_type_node));
17575 rs6000_dwarf_register_span (rtx reg)
17579 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
17582 regno = REGNO (reg);
17584 /* The duality of the SPE register size wreaks all kinds of havoc.
17585 This is a way of distinguishing r0 in 32-bits from r0 in
17588 gen_rtx_PARALLEL (VOIDmode,
17591 gen_rtx_REG (SImode, regno + 1200),
17592 gen_rtx_REG (SImode, regno))
17594 gen_rtx_REG (SImode, regno),
17595 gen_rtx_REG (SImode, regno + 1200)));
17598 /* Map internal gcc register numbers to DWARF2 register numbers. */
17601 rs6000_dbx_register_number (unsigned int regno)
17603 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
17605 if (regno == MQ_REGNO)
17607 if (regno == LINK_REGISTER_REGNUM)
17609 if (regno == COUNT_REGISTER_REGNUM)
17611 if (CR_REGNO_P (regno))
17612 return regno - CR0_REGNO + 86;
17613 if (regno == XER_REGNO)
17615 if (ALTIVEC_REGNO_P (regno))
17616 return regno - FIRST_ALTIVEC_REGNO + 1124;
17617 if (regno == VRSAVE_REGNO)
17619 if (regno == VSCR_REGNO)
17621 if (regno == SPE_ACC_REGNO)
17623 if (regno == SPEFSCR_REGNO)
17625 /* SPE high reg number. We get these values of regno from
17626 rs6000_dwarf_register_span. */
17627 if (regno >= 1200 && regno < 1232)
17633 /* target hook eh_return_filter_mode */
17634 static enum machine_mode
17635 rs6000_eh_return_filter_mode (void)
17637 return TARGET_32BIT ? SImode : word_mode;
17640 /* Target hook for vector_mode_supported_p. */
17642 rs6000_vector_mode_supported_p (enum machine_mode mode)
17645 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
17648 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
17655 #include "gt-rs6000.h"