1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
59 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62 #include "gstab.h" /* for N_SLINE */
65 #ifndef TARGET_NO_PROTOTYPE
66 #define TARGET_NO_PROTOTYPE 0
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
72 /* Structure used to define the rs6000 stack */
73 typedef struct rs6000_stack {
74 int first_gp_reg_save; /* first callee saved GP register used */
75 int first_fp_reg_save; /* first callee saved FP register used */
76 int first_altivec_reg_save; /* first callee saved AltiVec register used */
77 int lr_save_p; /* true if the link reg needs to be saved */
78 int cr_save_p; /* true if the CR reg needs to be saved */
79 unsigned int vrsave_mask; /* mask of vec registers to save */
80 int toc_save_p; /* true if the TOC needs to be saved */
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
83 int world_save_p; /* true if we're saving *everything*:
84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
93 int toc_save_offset; /* offset to save the TOC pointer */
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* A C structure for machine-specific, per-function data.
117 This is added to the cfun structure. */
118 typedef struct machine_function GTY(())
120 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
121 int ra_needs_full_frame;
122 /* Some local-dynamic symbol. */
123 const char *some_ld_name;
124 /* Whether the instruction chain has been scanned already. */
125 int insn_chain_scanned_p;
126 /* Flags if __builtin_return_address (0) was used. */
128 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
129 varargs save area. */
130 HOST_WIDE_INT varargs_save_offset;
133 /* Target cpu type */
135 enum processor_type rs6000_cpu;
136 struct rs6000_cpu_select rs6000_select[3] =
138 /* switch name, tune arch */
139 { (const char *)0, "--with-cpu=", 1, 1 },
140 { (const char *)0, "-mcpu=", 1, 1 },
141 { (const char *)0, "-mtune=", 1, 0 },
144 /* Always emit branch hint bits. */
145 static GTY(()) bool rs6000_always_hint;
147 /* Schedule instructions for group formation. */
148 static GTY(()) bool rs6000_sched_groups;
150 /* Support for -msched-costly-dep option. */
151 const char *rs6000_sched_costly_dep_str;
152 enum rs6000_dependence_cost rs6000_sched_costly_dep;
154 /* Support for -minsert-sched-nops option. */
155 const char *rs6000_sched_insert_nops_str;
156 enum rs6000_nop_insertion rs6000_sched_insert_nops;
158 /* Support targetm.vectorize.builtin_mask_for_load. */
159 static GTY(()) tree altivec_builtin_mask_for_load;
161 /* Size of long double */
162 int rs6000_long_double_type_size;
164 /* Whether -mabi=altivec has appeared */
165 int rs6000_altivec_abi;
167 /* Nonzero if we want SPE ABI extensions. */
170 /* Nonzero if floating point operations are done in the GPRs. */
171 int rs6000_float_gprs = 0;
173 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
174 int rs6000_darwin64_abi;
176 /* Set to nonzero once AIX common-mode calls have been defined. */
177 static GTY(()) int common_mode_defined;
179 /* Save information from a "cmpxx" operation until the branch or scc is
181 rtx rs6000_compare_op0, rs6000_compare_op1;
182 int rs6000_compare_fp_p;
184 /* Label number of label created for -mrelocatable, to call to so we can
185 get the address of the GOT section */
186 int rs6000_pic_labelno;
189 /* Which abi to adhere to */
190 const char *rs6000_abi_name;
192 /* Semantics of the small data area */
193 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195 /* Which small data model to use */
196 const char *rs6000_sdata_name = (char *)0;
198 /* Counter for labels which are to be placed in .fixup. */
199 int fixuplabelno = 0;
202 /* Bit size of immediate TLS offsets and string from which it is decoded. */
203 int rs6000_tls_size = 32;
204 const char *rs6000_tls_size_string;
206 /* ABI enumeration available for subtarget to use. */
207 enum rs6000_abi rs6000_current_abi;
209 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
213 const char *rs6000_debug_name;
214 int rs6000_debug_stack; /* debug stack applications */
215 int rs6000_debug_arg; /* debug argument handling */
217 /* Value is TRUE if register/mode pair is acceptable. */
218 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
220 /* Built in types. */
222 tree rs6000_builtin_types[RS6000_BTI_MAX];
223 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
225 const char *rs6000_traceback_name;
227 traceback_default = 0,
233 /* Flag to say the TOC is initialized */
235 char toc_label_name[10];
237 /* Control alignment for fields within structures. */
238 /* String from -malign-XXXXX. */
239 int rs6000_alignment_flags;
241 /* True for any options that were explicitly set. */
243 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
244 bool alignment; /* True if -malign- was used. */
245 bool abi; /* True if -mabi= was used. */
246 bool spe; /* True if -mspe= was used. */
247 bool float_gprs; /* True if -mfloat-gprs= was used. */
248 bool isel; /* True if -misel was used. */
249 bool long_double; /* True if -mlong-double- was used. */
250 } rs6000_explicit_options;
252 struct builtin_description
254 /* mask is not const because we're going to alter it below. This
255 nonsense will go away when we rewrite the -march infrastructure
256 to give us more target flag bits. */
258 const enum insn_code icode;
259 const char *const name;
260 const enum rs6000_builtins code;
263 /* Target cpu costs. */
265 struct processor_costs {
266 const int mulsi; /* cost of SImode multiplication. */
267 const int mulsi_const; /* cost of SImode multiplication by constant. */
268 const int mulsi_const9; /* cost of SImode mult by short constant. */
269 const int muldi; /* cost of DImode multiplication. */
270 const int divsi; /* cost of SImode division. */
271 const int divdi; /* cost of DImode division. */
272 const int fp; /* cost of simple SFmode and DFmode insns. */
273 const int dmul; /* cost of DFmode multiplication (and fmadd). */
274 const int sdiv; /* cost of SFmode division (fdivs). */
275 const int ddiv; /* cost of DFmode division (fdiv). */
278 const struct processor_costs *rs6000_cost;
280 /* Processor costs (relative to an add) */
282 /* Instruction size costs on 32bit processors. */
284 struct processor_costs size32_cost = {
285 COSTS_N_INSNS (1), /* mulsi */
286 COSTS_N_INSNS (1), /* mulsi_const */
287 COSTS_N_INSNS (1), /* mulsi_const9 */
288 COSTS_N_INSNS (1), /* muldi */
289 COSTS_N_INSNS (1), /* divsi */
290 COSTS_N_INSNS (1), /* divdi */
291 COSTS_N_INSNS (1), /* fp */
292 COSTS_N_INSNS (1), /* dmul */
293 COSTS_N_INSNS (1), /* sdiv */
294 COSTS_N_INSNS (1), /* ddiv */
297 /* Instruction size costs on 64bit processors. */
299 struct processor_costs size64_cost = {
300 COSTS_N_INSNS (1), /* mulsi */
301 COSTS_N_INSNS (1), /* mulsi_const */
302 COSTS_N_INSNS (1), /* mulsi_const9 */
303 COSTS_N_INSNS (1), /* muldi */
304 COSTS_N_INSNS (1), /* divsi */
305 COSTS_N_INSNS (1), /* divdi */
306 COSTS_N_INSNS (1), /* fp */
307 COSTS_N_INSNS (1), /* dmul */
308 COSTS_N_INSNS (1), /* sdiv */
309 COSTS_N_INSNS (1), /* ddiv */
312 /* Instruction costs on RIOS1 processors. */
314 struct processor_costs rios1_cost = {
315 COSTS_N_INSNS (5), /* mulsi */
316 COSTS_N_INSNS (4), /* mulsi_const */
317 COSTS_N_INSNS (3), /* mulsi_const9 */
318 COSTS_N_INSNS (5), /* muldi */
319 COSTS_N_INSNS (19), /* divsi */
320 COSTS_N_INSNS (19), /* divdi */
321 COSTS_N_INSNS (2), /* fp */
322 COSTS_N_INSNS (2), /* dmul */
323 COSTS_N_INSNS (19), /* sdiv */
324 COSTS_N_INSNS (19), /* ddiv */
327 /* Instruction costs on RIOS2 processors. */
329 struct processor_costs rios2_cost = {
330 COSTS_N_INSNS (2), /* mulsi */
331 COSTS_N_INSNS (2), /* mulsi_const */
332 COSTS_N_INSNS (2), /* mulsi_const9 */
333 COSTS_N_INSNS (2), /* muldi */
334 COSTS_N_INSNS (13), /* divsi */
335 COSTS_N_INSNS (13), /* divdi */
336 COSTS_N_INSNS (2), /* fp */
337 COSTS_N_INSNS (2), /* dmul */
338 COSTS_N_INSNS (17), /* sdiv */
339 COSTS_N_INSNS (17), /* ddiv */
342 /* Instruction costs on RS64A processors. */
344 struct processor_costs rs64a_cost = {
345 COSTS_N_INSNS (20), /* mulsi */
346 COSTS_N_INSNS (12), /* mulsi_const */
347 COSTS_N_INSNS (8), /* mulsi_const9 */
348 COSTS_N_INSNS (34), /* muldi */
349 COSTS_N_INSNS (65), /* divsi */
350 COSTS_N_INSNS (67), /* divdi */
351 COSTS_N_INSNS (4), /* fp */
352 COSTS_N_INSNS (4), /* dmul */
353 COSTS_N_INSNS (31), /* sdiv */
354 COSTS_N_INSNS (31), /* ddiv */
357 /* Instruction costs on MPCCORE processors. */
359 struct processor_costs mpccore_cost = {
360 COSTS_N_INSNS (2), /* mulsi */
361 COSTS_N_INSNS (2), /* mulsi_const */
362 COSTS_N_INSNS (2), /* mulsi_const9 */
363 COSTS_N_INSNS (2), /* muldi */
364 COSTS_N_INSNS (6), /* divsi */
365 COSTS_N_INSNS (6), /* divdi */
366 COSTS_N_INSNS (4), /* fp */
367 COSTS_N_INSNS (5), /* dmul */
368 COSTS_N_INSNS (10), /* sdiv */
369 COSTS_N_INSNS (17), /* ddiv */
372 /* Instruction costs on PPC403 processors. */
374 struct processor_costs ppc403_cost = {
375 COSTS_N_INSNS (4), /* mulsi */
376 COSTS_N_INSNS (4), /* mulsi_const */
377 COSTS_N_INSNS (4), /* mulsi_const9 */
378 COSTS_N_INSNS (4), /* muldi */
379 COSTS_N_INSNS (33), /* divsi */
380 COSTS_N_INSNS (33), /* divdi */
381 COSTS_N_INSNS (11), /* fp */
382 COSTS_N_INSNS (11), /* dmul */
383 COSTS_N_INSNS (11), /* sdiv */
384 COSTS_N_INSNS (11), /* ddiv */
387 /* Instruction costs on PPC405 processors. */
389 struct processor_costs ppc405_cost = {
390 COSTS_N_INSNS (5), /* mulsi */
391 COSTS_N_INSNS (4), /* mulsi_const */
392 COSTS_N_INSNS (3), /* mulsi_const9 */
393 COSTS_N_INSNS (5), /* muldi */
394 COSTS_N_INSNS (35), /* divsi */
395 COSTS_N_INSNS (35), /* divdi */
396 COSTS_N_INSNS (11), /* fp */
397 COSTS_N_INSNS (11), /* dmul */
398 COSTS_N_INSNS (11), /* sdiv */
399 COSTS_N_INSNS (11), /* ddiv */
402 /* Instruction costs on PPC440 processors. */
404 struct processor_costs ppc440_cost = {
405 COSTS_N_INSNS (3), /* mulsi */
406 COSTS_N_INSNS (2), /* mulsi_const */
407 COSTS_N_INSNS (2), /* mulsi_const9 */
408 COSTS_N_INSNS (3), /* muldi */
409 COSTS_N_INSNS (34), /* divsi */
410 COSTS_N_INSNS (34), /* divdi */
411 COSTS_N_INSNS (5), /* fp */
412 COSTS_N_INSNS (5), /* dmul */
413 COSTS_N_INSNS (19), /* sdiv */
414 COSTS_N_INSNS (33), /* ddiv */
417 /* Instruction costs on PPC601 processors. */
419 struct processor_costs ppc601_cost = {
420 COSTS_N_INSNS (5), /* mulsi */
421 COSTS_N_INSNS (5), /* mulsi_const */
422 COSTS_N_INSNS (5), /* mulsi_const9 */
423 COSTS_N_INSNS (5), /* muldi */
424 COSTS_N_INSNS (36), /* divsi */
425 COSTS_N_INSNS (36), /* divdi */
426 COSTS_N_INSNS (4), /* fp */
427 COSTS_N_INSNS (5), /* dmul */
428 COSTS_N_INSNS (17), /* sdiv */
429 COSTS_N_INSNS (31), /* ddiv */
432 /* Instruction costs on PPC603 processors. */
434 struct processor_costs ppc603_cost = {
435 COSTS_N_INSNS (5), /* mulsi */
436 COSTS_N_INSNS (3), /* mulsi_const */
437 COSTS_N_INSNS (2), /* mulsi_const9 */
438 COSTS_N_INSNS (5), /* muldi */
439 COSTS_N_INSNS (37), /* divsi */
440 COSTS_N_INSNS (37), /* divdi */
441 COSTS_N_INSNS (3), /* fp */
442 COSTS_N_INSNS (4), /* dmul */
443 COSTS_N_INSNS (18), /* sdiv */
444 COSTS_N_INSNS (33), /* ddiv */
447 /* Instruction costs on PPC604 processors. */
449 struct processor_costs ppc604_cost = {
450 COSTS_N_INSNS (4), /* mulsi */
451 COSTS_N_INSNS (4), /* mulsi_const */
452 COSTS_N_INSNS (4), /* mulsi_const9 */
453 COSTS_N_INSNS (4), /* muldi */
454 COSTS_N_INSNS (20), /* divsi */
455 COSTS_N_INSNS (20), /* divdi */
456 COSTS_N_INSNS (3), /* fp */
457 COSTS_N_INSNS (3), /* dmul */
458 COSTS_N_INSNS (18), /* sdiv */
459 COSTS_N_INSNS (32), /* ddiv */
462 /* Instruction costs on PPC604e processors. */
464 struct processor_costs ppc604e_cost = {
465 COSTS_N_INSNS (2), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (2), /* muldi */
469 COSTS_N_INSNS (20), /* divsi */
470 COSTS_N_INSNS (20), /* divdi */
471 COSTS_N_INSNS (3), /* fp */
472 COSTS_N_INSNS (3), /* dmul */
473 COSTS_N_INSNS (18), /* sdiv */
474 COSTS_N_INSNS (32), /* ddiv */
477 /* Instruction costs on PPC620 processors. */
479 struct processor_costs ppc620_cost = {
480 COSTS_N_INSNS (5), /* mulsi */
481 COSTS_N_INSNS (4), /* mulsi_const */
482 COSTS_N_INSNS (3), /* mulsi_const9 */
483 COSTS_N_INSNS (7), /* muldi */
484 COSTS_N_INSNS (21), /* divsi */
485 COSTS_N_INSNS (37), /* divdi */
486 COSTS_N_INSNS (3), /* fp */
487 COSTS_N_INSNS (3), /* dmul */
488 COSTS_N_INSNS (18), /* sdiv */
489 COSTS_N_INSNS (32), /* ddiv */
492 /* Instruction costs on PPC630 processors. */
494 struct processor_costs ppc630_cost = {
495 COSTS_N_INSNS (5), /* mulsi */
496 COSTS_N_INSNS (4), /* mulsi_const */
497 COSTS_N_INSNS (3), /* mulsi_const9 */
498 COSTS_N_INSNS (7), /* muldi */
499 COSTS_N_INSNS (21), /* divsi */
500 COSTS_N_INSNS (37), /* divdi */
501 COSTS_N_INSNS (3), /* fp */
502 COSTS_N_INSNS (3), /* dmul */
503 COSTS_N_INSNS (17), /* sdiv */
504 COSTS_N_INSNS (21), /* ddiv */
507 /* Instruction costs on PPC750 and PPC7400 processors. */
509 struct processor_costs ppc750_cost = {
510 COSTS_N_INSNS (5), /* mulsi */
511 COSTS_N_INSNS (3), /* mulsi_const */
512 COSTS_N_INSNS (2), /* mulsi_const9 */
513 COSTS_N_INSNS (5), /* muldi */
514 COSTS_N_INSNS (17), /* divsi */
515 COSTS_N_INSNS (17), /* divdi */
516 COSTS_N_INSNS (3), /* fp */
517 COSTS_N_INSNS (3), /* dmul */
518 COSTS_N_INSNS (17), /* sdiv */
519 COSTS_N_INSNS (31), /* ddiv */
522 /* Instruction costs on PPC7450 processors. */
524 struct processor_costs ppc7450_cost = {
525 COSTS_N_INSNS (4), /* mulsi */
526 COSTS_N_INSNS (3), /* mulsi_const */
527 COSTS_N_INSNS (3), /* mulsi_const9 */
528 COSTS_N_INSNS (4), /* muldi */
529 COSTS_N_INSNS (23), /* divsi */
530 COSTS_N_INSNS (23), /* divdi */
531 COSTS_N_INSNS (5), /* fp */
532 COSTS_N_INSNS (5), /* dmul */
533 COSTS_N_INSNS (21), /* sdiv */
534 COSTS_N_INSNS (35), /* ddiv */
537 /* Instruction costs on PPC8540 processors. */
539 struct processor_costs ppc8540_cost = {
540 COSTS_N_INSNS (4), /* mulsi */
541 COSTS_N_INSNS (4), /* mulsi_const */
542 COSTS_N_INSNS (4), /* mulsi_const9 */
543 COSTS_N_INSNS (4), /* muldi */
544 COSTS_N_INSNS (19), /* divsi */
545 COSTS_N_INSNS (19), /* divdi */
546 COSTS_N_INSNS (4), /* fp */
547 COSTS_N_INSNS (4), /* dmul */
548 COSTS_N_INSNS (29), /* sdiv */
549 COSTS_N_INSNS (29), /* ddiv */
552 /* Instruction costs on POWER4 and POWER5 processors. */
554 struct processor_costs power4_cost = {
555 COSTS_N_INSNS (3), /* mulsi */
556 COSTS_N_INSNS (2), /* mulsi_const */
557 COSTS_N_INSNS (2), /* mulsi_const9 */
558 COSTS_N_INSNS (4), /* muldi */
559 COSTS_N_INSNS (18), /* divsi */
560 COSTS_N_INSNS (34), /* divdi */
561 COSTS_N_INSNS (3), /* fp */
562 COSTS_N_INSNS (3), /* dmul */
563 COSTS_N_INSNS (17), /* sdiv */
564 COSTS_N_INSNS (17), /* ddiv */
568 static bool rs6000_function_ok_for_sibcall (tree, tree);
569 static const char *rs6000_invalid_within_doloop (rtx);
570 static rtx rs6000_generate_compare (enum rtx_code);
571 static void rs6000_maybe_dead (rtx);
572 static void rs6000_emit_stack_tie (void);
573 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
574 static rtx spe_synthesize_frame_save (rtx);
575 static bool spe_func_has_64bit_regs_p (void);
576 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
578 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
579 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
580 static unsigned rs6000_hash_constant (rtx);
581 static unsigned toc_hash_function (const void *);
582 static int toc_hash_eq (const void *, const void *);
583 static int constant_pool_expr_1 (rtx, int *, int *);
584 static bool constant_pool_expr_p (rtx);
585 static bool legitimate_indexed_address_p (rtx, int);
586 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
587 static struct machine_function * rs6000_init_machine_status (void);
588 static bool rs6000_assemble_integer (rtx, unsigned int, int);
589 static bool no_global_regs_above (int);
590 #ifdef HAVE_GAS_HIDDEN
591 static void rs6000_assemble_visibility (tree, int);
593 static int rs6000_ra_ever_killed (void);
594 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
595 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
596 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
597 static const char *rs6000_mangle_fundamental_type (tree);
598 extern const struct attribute_spec rs6000_attribute_table[];
599 static void rs6000_set_default_type_attributes (tree);
600 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
601 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
602 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
604 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
605 static bool rs6000_return_in_memory (tree, tree);
606 static void rs6000_file_start (void);
608 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
609 static void rs6000_elf_asm_out_constructor (rtx, int);
610 static void rs6000_elf_asm_out_destructor (rtx, int);
611 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
612 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
613 static void rs6000_elf_unique_section (tree, int);
614 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
615 unsigned HOST_WIDE_INT);
616 static void rs6000_elf_encode_section_info (tree, rtx, int)
620 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
621 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
622 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
623 static void rs6000_xcoff_unique_section (tree, int);
624 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
625 unsigned HOST_WIDE_INT);
626 static const char * rs6000_xcoff_strip_name_encoding (const char *);
627 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
628 static void rs6000_xcoff_file_start (void);
629 static void rs6000_xcoff_file_end (void);
631 static int rs6000_variable_issue (FILE *, int, rtx, int);
632 static bool rs6000_rtx_costs (rtx, int, int, int *);
633 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
634 static bool is_microcoded_insn (rtx);
635 static int is_dispatch_slot_restricted (rtx);
636 static bool is_cracked_insn (rtx);
637 static bool is_branch_slot_insn (rtx);
638 static int rs6000_adjust_priority (rtx, int);
639 static int rs6000_issue_rate (void);
640 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
641 static rtx get_next_active_insn (rtx, rtx);
642 static bool insn_terminates_group_p (rtx , enum group_termination);
643 static bool is_costly_group (rtx *, rtx);
644 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
645 static int redefine_groups (FILE *, int, rtx, rtx);
646 static int pad_groups (FILE *, int, rtx, rtx);
647 static void rs6000_sched_finish (FILE *, int);
648 static int rs6000_use_sched_lookahead (void);
649 static tree rs6000_builtin_mask_for_load (void);
651 static void def_builtin (int, const char *, tree, int);
652 static void rs6000_init_builtins (void);
653 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
654 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
655 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
656 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
657 static void altivec_init_builtins (void);
658 static void rs6000_common_init_builtins (void);
659 static void rs6000_init_libfuncs (void);
661 static void enable_mask_for_builtins (struct builtin_description *, int,
662 enum rs6000_builtins,
663 enum rs6000_builtins);
664 static tree build_opaque_vector_type (tree, int);
665 static void spe_init_builtins (void);
666 static rtx spe_expand_builtin (tree, rtx, bool *);
667 static rtx spe_expand_stv_builtin (enum insn_code, tree);
668 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
669 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
670 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
671 static rs6000_stack_t *rs6000_stack_info (void);
672 static void debug_stack_info (rs6000_stack_t *);
674 static rtx altivec_expand_builtin (tree, rtx, bool *);
675 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
676 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
677 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
678 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
679 static rtx altivec_expand_predicate_builtin (enum insn_code,
680 const char *, tree, rtx);
681 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
682 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
683 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
684 static rtx altivec_expand_vec_set_builtin (tree);
685 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
686 static int get_element_number (tree, tree);
687 static bool rs6000_handle_option (size_t, const char *, int);
688 static void rs6000_parse_tls_size_option (void);
689 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
690 static int first_altivec_reg_to_save (void);
691 static unsigned int compute_vrsave_mask (void);
692 static void compute_save_world_info (rs6000_stack_t *info_ptr);
693 static void is_altivec_return_reg (rtx, void *);
694 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
695 int easy_vector_constant (rtx, enum machine_mode);
696 static bool rs6000_is_opaque_type (tree);
697 static rtx rs6000_dwarf_register_span (rtx);
698 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
699 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
700 static rtx rs6000_tls_get_addr (void);
701 static rtx rs6000_got_sym (void);
702 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
703 static const char *rs6000_get_some_local_dynamic_name (void);
704 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
705 static rtx rs6000_complex_function_value (enum machine_mode);
706 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
707 enum machine_mode, tree);
708 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
710 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
711 tree, HOST_WIDE_INT);
712 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
715 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
718 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
719 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
720 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
721 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
722 enum machine_mode, tree,
724 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
726 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
728 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
730 static void macho_branch_islands (void);
731 static void add_compiler_branch_island (tree, tree, int);
732 static int no_previous_def (tree function_name);
733 static tree get_prev_label (tree function_name);
734 static void rs6000_darwin_file_start (void);
737 static tree rs6000_build_builtin_va_list (void);
738 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
739 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
740 static bool rs6000_vector_mode_supported_p (enum machine_mode);
741 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
743 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
745 static int get_vsel_insn (enum machine_mode);
746 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
747 static tree rs6000_stack_protect_fail (void);
749 const int INSN_NOT_AVAILABLE = -1;
750 static enum machine_mode rs6000_eh_return_filter_mode (void);
752 /* Hash table stuff for keeping track of TOC entries. */
754 struct toc_hash_struct GTY(())
756 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
757 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
759 enum machine_mode key_mode;
763 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
765 /* Default register names. */
766 char rs6000_reg_names[][8] =
768 "0", "1", "2", "3", "4", "5", "6", "7",
769 "8", "9", "10", "11", "12", "13", "14", "15",
770 "16", "17", "18", "19", "20", "21", "22", "23",
771 "24", "25", "26", "27", "28", "29", "30", "31",
772 "0", "1", "2", "3", "4", "5", "6", "7",
773 "8", "9", "10", "11", "12", "13", "14", "15",
774 "16", "17", "18", "19", "20", "21", "22", "23",
775 "24", "25", "26", "27", "28", "29", "30", "31",
776 "mq", "lr", "ctr","ap",
777 "0", "1", "2", "3", "4", "5", "6", "7",
779 /* AltiVec registers. */
780 "0", "1", "2", "3", "4", "5", "6", "7",
781 "8", "9", "10", "11", "12", "13", "14", "15",
782 "16", "17", "18", "19", "20", "21", "22", "23",
783 "24", "25", "26", "27", "28", "29", "30", "31",
786 "spe_acc", "spefscr",
787 /* Soft frame pointer. */
791 #ifdef TARGET_REGNAMES
792 static const char alt_reg_names[][8] =
794 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
795 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
796 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
797 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
798 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
799 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
800 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
801 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
802 "mq", "lr", "ctr", "ap",
803 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
805 /* AltiVec registers. */
806 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
807 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
808 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
809 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
812 "spe_acc", "spefscr",
813 /* Soft frame pointer. */
818 #ifndef MASK_STRICT_ALIGN
819 #define MASK_STRICT_ALIGN 0
821 #ifndef TARGET_PROFILE_KERNEL
822 #define TARGET_PROFILE_KERNEL 0
825 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
826 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
828 /* Initialize the GCC target structure. */
829 #undef TARGET_ATTRIBUTE_TABLE
830 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
831 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
832 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
834 #undef TARGET_ASM_ALIGNED_DI_OP
835 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
837 /* Default unaligned ops are only provided for ELF. Find the ops needed
838 for non-ELF systems. */
839 #ifndef OBJECT_FORMAT_ELF
841 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
843 #undef TARGET_ASM_UNALIGNED_HI_OP
844 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
845 #undef TARGET_ASM_UNALIGNED_SI_OP
846 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
847 #undef TARGET_ASM_UNALIGNED_DI_OP
848 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
851 #undef TARGET_ASM_UNALIGNED_HI_OP
852 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
853 #undef TARGET_ASM_UNALIGNED_SI_OP
854 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
855 #undef TARGET_ASM_UNALIGNED_DI_OP
856 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
857 #undef TARGET_ASM_ALIGNED_DI_OP
858 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
862 /* This hook deals with fixups for relocatable code and DI-mode objects
864 #undef TARGET_ASM_INTEGER
865 #define TARGET_ASM_INTEGER rs6000_assemble_integer
867 #ifdef HAVE_GAS_HIDDEN
868 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
869 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
872 #undef TARGET_HAVE_TLS
873 #define TARGET_HAVE_TLS HAVE_AS_TLS
875 #undef TARGET_CANNOT_FORCE_CONST_MEM
876 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
878 #undef TARGET_ASM_FUNCTION_PROLOGUE
879 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
880 #undef TARGET_ASM_FUNCTION_EPILOGUE
881 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
883 #undef TARGET_SCHED_VARIABLE_ISSUE
884 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
886 #undef TARGET_SCHED_ISSUE_RATE
887 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
888 #undef TARGET_SCHED_ADJUST_COST
889 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
890 #undef TARGET_SCHED_ADJUST_PRIORITY
891 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
892 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
893 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
894 #undef TARGET_SCHED_FINISH
895 #define TARGET_SCHED_FINISH rs6000_sched_finish
897 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
898 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
900 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
901 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
903 #undef TARGET_INIT_BUILTINS
904 #define TARGET_INIT_BUILTINS rs6000_init_builtins
906 #undef TARGET_EXPAND_BUILTIN
907 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
909 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
910 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
912 #undef TARGET_INIT_LIBFUNCS
913 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
916 #undef TARGET_BINDS_LOCAL_P
917 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
920 #undef TARGET_ASM_OUTPUT_MI_THUNK
921 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
923 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
924 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
926 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
927 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
929 #undef TARGET_INVALID_WITHIN_DOLOOP
930 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
932 #undef TARGET_RTX_COSTS
933 #define TARGET_RTX_COSTS rs6000_rtx_costs
934 #undef TARGET_ADDRESS_COST
935 #define TARGET_ADDRESS_COST hook_int_rtx_0
937 #undef TARGET_VECTOR_OPAQUE_P
938 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
940 #undef TARGET_DWARF_REGISTER_SPAN
941 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
943 /* On rs6000, function arguments are promoted, as are function return
945 #undef TARGET_PROMOTE_FUNCTION_ARGS
946 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
947 #undef TARGET_PROMOTE_FUNCTION_RETURN
948 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
950 #undef TARGET_RETURN_IN_MEMORY
951 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
953 #undef TARGET_SETUP_INCOMING_VARARGS
954 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
956 /* Always strict argument naming on rs6000. */
957 #undef TARGET_STRICT_ARGUMENT_NAMING
958 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
959 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
960 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
961 #undef TARGET_SPLIT_COMPLEX_ARG
962 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
963 #undef TARGET_MUST_PASS_IN_STACK
964 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
965 #undef TARGET_PASS_BY_REFERENCE
966 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
967 #undef TARGET_ARG_PARTIAL_BYTES
968 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
970 #undef TARGET_BUILD_BUILTIN_VA_LIST
971 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
973 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
974 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
976 #undef TARGET_EH_RETURN_FILTER_MODE
977 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
979 #undef TARGET_VECTOR_MODE_SUPPORTED_P
980 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
982 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
983 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
985 #undef TARGET_HANDLE_OPTION
986 #define TARGET_HANDLE_OPTION rs6000_handle_option
988 #undef TARGET_DEFAULT_TARGET_FLAGS
989 #define TARGET_DEFAULT_TARGET_FLAGS \
990 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
992 #undef TARGET_STACK_PROTECT_FAIL
993 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
995 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
996 The PowerPC architecture requires only weak consistency among
997 processors--that is, memory accesses between processors need not be
998 sequentially consistent and memory accesses among processors can occur
999 in any order. The ability to order memory accesses weakly provides
1000 opportunities for more efficient use of the system bus. Unless a
1001 dependency exists, the 604e allows read operations to precede store
1003 #undef TARGET_RELAXED_ORDERING
1004 #define TARGET_RELAXED_ORDERING true
1007 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1008 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1011 struct gcc_target targetm = TARGET_INITIALIZER;
1014 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1017 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1019 /* The GPRs can hold any mode, but values bigger than one register
1020 cannot go past R31. */
1021 if (INT_REGNO_P (regno))
1022 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1024 /* The float registers can only hold floating modes and DImode. */
1025 if (FP_REGNO_P (regno))
1027 (SCALAR_FLOAT_MODE_P (mode)
1028 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1029 || (GET_MODE_CLASS (mode) == MODE_INT
1030 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1032 /* The CR register can only hold CC modes. */
1033 if (CR_REGNO_P (regno))
1034 return GET_MODE_CLASS (mode) == MODE_CC;
1036 if (XER_REGNO_P (regno))
1037 return mode == PSImode;
1039 /* AltiVec only in AldyVec registers. */
1040 if (ALTIVEC_REGNO_P (regno))
1041 return ALTIVEC_VECTOR_MODE (mode);
1043 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1044 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1047 /* We cannot put TImode anywhere except general register and it must be
1048 able to fit within the register set. */
1050 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1053 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1055 rs6000_init_hard_regno_mode_ok (void)
1059 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1060 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1061 if (rs6000_hard_regno_mode_ok (r, m))
1062 rs6000_hard_regno_mode_ok_p[m][r] = true;
1065 /* If not otherwise specified by a target, make 'long double' equivalent to
1068 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1069 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1072 /* Override command line options. Mostly we process the processor
1073 type and sometimes adjust other TARGET_ options. */
1076 rs6000_override_options (const char *default_cpu)
1079 struct rs6000_cpu_select *ptr;
1082 /* Simplifications for entries below. */
1085 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1086 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1089 /* This table occasionally claims that a processor does not support
1090 a particular feature even though it does, but the feature is slower
1091 than the alternative. Thus, it shouldn't be relied on as a
1092 complete description of the processor's support.
1094 Please keep this list in order, and don't forget to update the
1095 documentation in invoke.texi when adding a new processor or
1099 const char *const name; /* Canonical processor name. */
1100 const enum processor_type processor; /* Processor type enum value. */
1101 const int target_enable; /* Target flags to enable. */
1102 } const processor_target_table[]
1103 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1104 {"403", PROCESSOR_PPC403,
1105 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1106 {"405", PROCESSOR_PPC405,
1107 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1108 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_MULHW},
1109 {"440", PROCESSOR_PPC440,
1110 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1111 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_MULHW},
1112 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1113 {"601", PROCESSOR_PPC601,
1114 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1115 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1116 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1117 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1118 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1119 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1120 {"620", PROCESSOR_PPC620,
1121 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1122 {"630", PROCESSOR_PPC630,
1123 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1124 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1125 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1126 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1127 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1128 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1129 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1130 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1131 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1132 /* 8548 has a dummy entry for now. */
1133 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1134 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1135 {"970", PROCESSOR_POWER4,
1136 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1137 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1138 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1139 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1140 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1141 {"G5", PROCESSOR_POWER4,
1142 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1143 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1144 {"power2", PROCESSOR_POWER,
1145 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1146 {"power3", PROCESSOR_PPC630,
1147 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1148 {"power4", PROCESSOR_POWER4,
1149 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1150 {"power5", PROCESSOR_POWER5,
1151 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1152 | MASK_MFCRF | MASK_POPCNTB},
1153 {"power5+", PROCESSOR_POWER5,
1154 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1155 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1156 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1157 {"powerpc64", PROCESSOR_POWERPC64,
1158 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1159 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1160 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1161 {"rios2", PROCESSOR_RIOS2,
1162 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1163 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1164 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1165 {"rs64", PROCESSOR_RS64A,
1166 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1169 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1171 /* Some OSs don't support saving the high part of 64-bit registers on
1172 context switch. Other OSs don't support saving Altivec registers.
1173 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1174 settings; if the user wants either, the user must explicitly specify
1175 them and we won't interfere with the user's specification. */
1178 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1179 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1180 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1181 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW)
1184 rs6000_init_hard_regno_mode_ok ();
1186 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1187 #ifdef OS_MISSING_POWERPC64
1188 if (OS_MISSING_POWERPC64)
1189 set_masks &= ~MASK_POWERPC64;
1191 #ifdef OS_MISSING_ALTIVEC
1192 if (OS_MISSING_ALTIVEC)
1193 set_masks &= ~MASK_ALTIVEC;
1196 /* Don't override by the processor default if given explicitly. */
1197 set_masks &= ~target_flags_explicit;
1199 /* Identify the processor type. */
1200 rs6000_select[0].string = default_cpu;
1201 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1203 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1205 ptr = &rs6000_select[i];
1206 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1208 for (j = 0; j < ptt_size; j++)
1209 if (! strcmp (ptr->string, processor_target_table[j].name))
1211 if (ptr->set_tune_p)
1212 rs6000_cpu = processor_target_table[j].processor;
1214 if (ptr->set_arch_p)
1216 target_flags &= ~set_masks;
1217 target_flags |= (processor_target_table[j].target_enable
1224 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1231 /* If we are optimizing big endian systems for space, use the load/store
1232 multiple and string instructions. */
1233 if (BYTES_BIG_ENDIAN && optimize_size)
1234 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1236 /* Don't allow -mmultiple or -mstring on little endian systems
1237 unless the cpu is a 750, because the hardware doesn't support the
1238 instructions used in little endian mode, and causes an alignment
1239 trap. The 750 does not cause an alignment trap (except when the
1240 target is unaligned). */
1242 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1244 if (TARGET_MULTIPLE)
1246 target_flags &= ~MASK_MULTIPLE;
1247 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1248 warning (0, "-mmultiple is not supported on little endian systems");
1253 target_flags &= ~MASK_STRING;
1254 if ((target_flags_explicit & MASK_STRING) != 0)
1255 warning (0, "-mstring is not supported on little endian systems");
1259 /* Set debug flags */
1260 if (rs6000_debug_name)
1262 if (! strcmp (rs6000_debug_name, "all"))
1263 rs6000_debug_stack = rs6000_debug_arg = 1;
1264 else if (! strcmp (rs6000_debug_name, "stack"))
1265 rs6000_debug_stack = 1;
1266 else if (! strcmp (rs6000_debug_name, "arg"))
1267 rs6000_debug_arg = 1;
1269 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1272 if (rs6000_traceback_name)
1274 if (! strncmp (rs6000_traceback_name, "full", 4))
1275 rs6000_traceback = traceback_full;
1276 else if (! strncmp (rs6000_traceback_name, "part", 4))
1277 rs6000_traceback = traceback_part;
1278 else if (! strncmp (rs6000_traceback_name, "no", 2))
1279 rs6000_traceback = traceback_none;
1281 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1282 rs6000_traceback_name);
1285 if (!rs6000_explicit_options.long_double)
1286 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1288 /* Set Altivec ABI as default for powerpc64 linux. */
1289 if (TARGET_ELF && TARGET_64BIT)
1291 rs6000_altivec_abi = 1;
1292 TARGET_ALTIVEC_VRSAVE = 1;
1295 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1296 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1298 rs6000_darwin64_abi = 1;
1300 darwin_one_byte_bool = 1;
1302 /* Default to natural alignment, for better performance. */
1303 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1306 /* Handle -mtls-size option. */
1307 rs6000_parse_tls_size_option ();
1309 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1310 SUBTARGET_OVERRIDE_OPTIONS;
1312 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1313 SUBSUBTARGET_OVERRIDE_OPTIONS;
1315 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1316 SUB3TARGET_OVERRIDE_OPTIONS;
1322 error ("AltiVec and E500 instructions cannot coexist");
1324 /* The e500 does not have string instructions, and we set
1325 MASK_STRING above when optimizing for size. */
1326 if ((target_flags & MASK_STRING) != 0)
1327 target_flags = target_flags & ~MASK_STRING;
1329 else if (rs6000_select[1].string != NULL)
1331 /* For the powerpc-eabispe configuration, we set all these by
1332 default, so let's unset them if we manually set another
1333 CPU that is not the E500. */
1334 if (!rs6000_explicit_options.abi)
1336 if (!rs6000_explicit_options.spe)
1338 if (!rs6000_explicit_options.float_gprs)
1339 rs6000_float_gprs = 0;
1340 if (!rs6000_explicit_options.isel)
1342 if (!rs6000_explicit_options.long_double)
1343 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1346 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1347 && rs6000_cpu != PROCESSOR_POWER5);
1348 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1349 || rs6000_cpu == PROCESSOR_POWER5);
1351 rs6000_sched_restricted_insns_priority
1352 = (rs6000_sched_groups ? 1 : 0);
1354 /* Handle -msched-costly-dep option. */
1355 rs6000_sched_costly_dep
1356 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1358 if (rs6000_sched_costly_dep_str)
1360 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1361 rs6000_sched_costly_dep = no_dep_costly;
1362 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1363 rs6000_sched_costly_dep = all_deps_costly;
1364 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1365 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1366 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1367 rs6000_sched_costly_dep = store_to_load_dep_costly;
1369 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1372 /* Handle -minsert-sched-nops option. */
1373 rs6000_sched_insert_nops
1374 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1376 if (rs6000_sched_insert_nops_str)
1378 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1379 rs6000_sched_insert_nops = sched_finish_none;
1380 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1381 rs6000_sched_insert_nops = sched_finish_pad_groups;
1382 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1383 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1385 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1388 #ifdef TARGET_REGNAMES
1389 /* If the user desires alternate register names, copy in the
1390 alternate names now. */
1391 if (TARGET_REGNAMES)
1392 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1395 /* Set aix_struct_return last, after the ABI is determined.
1396 If -maix-struct-return or -msvr4-struct-return was explicitly
1397 used, don't override with the ABI default. */
1398 if (!rs6000_explicit_options.aix_struct_ret)
1399 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1401 if (TARGET_LONG_DOUBLE_128
1402 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1403 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1406 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1408 /* We can only guarantee the availability of DI pseudo-ops when
1409 assembling for 64-bit targets. */
1412 targetm.asm_out.aligned_op.di = NULL;
1413 targetm.asm_out.unaligned_op.di = NULL;
1416 /* Set branch target alignment, if not optimizing for size. */
1419 if (rs6000_sched_groups)
1421 if (align_functions <= 0)
1422 align_functions = 16;
1423 if (align_jumps <= 0)
1425 if (align_loops <= 0)
1428 if (align_jumps_max_skip <= 0)
1429 align_jumps_max_skip = 15;
1430 if (align_loops_max_skip <= 0)
1431 align_loops_max_skip = 15;
1434 /* Arrange to save and restore machine status around nested functions. */
1435 init_machine_status = rs6000_init_machine_status;
1437 /* We should always be splitting complex arguments, but we can't break
1438 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1439 if (DEFAULT_ABI != ABI_AIX)
1440 targetm.calls.split_complex_arg = NULL;
1442 /* Initialize rs6000_cost with the appropriate target costs. */
1444 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1448 case PROCESSOR_RIOS1:
1449 rs6000_cost = &rios1_cost;
1452 case PROCESSOR_RIOS2:
1453 rs6000_cost = &rios2_cost;
1456 case PROCESSOR_RS64A:
1457 rs6000_cost = &rs64a_cost;
1460 case PROCESSOR_MPCCORE:
1461 rs6000_cost = &mpccore_cost;
1464 case PROCESSOR_PPC403:
1465 rs6000_cost = &ppc403_cost;
1468 case PROCESSOR_PPC405:
1469 rs6000_cost = &ppc405_cost;
1472 case PROCESSOR_PPC440:
1473 rs6000_cost = &ppc440_cost;
1476 case PROCESSOR_PPC601:
1477 rs6000_cost = &ppc601_cost;
1480 case PROCESSOR_PPC603:
1481 rs6000_cost = &ppc603_cost;
1484 case PROCESSOR_PPC604:
1485 rs6000_cost = &ppc604_cost;
1488 case PROCESSOR_PPC604e:
1489 rs6000_cost = &ppc604e_cost;
1492 case PROCESSOR_PPC620:
1493 rs6000_cost = &ppc620_cost;
1496 case PROCESSOR_PPC630:
1497 rs6000_cost = &ppc630_cost;
1500 case PROCESSOR_PPC750:
1501 case PROCESSOR_PPC7400:
1502 rs6000_cost = &ppc750_cost;
1505 case PROCESSOR_PPC7450:
1506 rs6000_cost = &ppc7450_cost;
1509 case PROCESSOR_PPC8540:
1510 rs6000_cost = &ppc8540_cost;
1513 case PROCESSOR_POWER4:
1514 case PROCESSOR_POWER5:
1515 rs6000_cost = &power4_cost;
1523 /* Implement targetm.vectorize.builtin_mask_for_load. */
1525 rs6000_builtin_mask_for_load (void)
1528 return altivec_builtin_mask_for_load;
1533 /* Handle generic options of the form -mfoo=yes/no.
1534 NAME is the option name.
1535 VALUE is the option value.
1536 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1537 whether the option value is 'yes' or 'no' respectively. */
1539 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1543 else if (!strcmp (value, "yes"))
1545 else if (!strcmp (value, "no"))
1548 error ("unknown -m%s= option specified: '%s'", name, value);
1551 /* Validate and record the size specified with the -mtls-size option. */
1554 rs6000_parse_tls_size_option (void)
1556 if (rs6000_tls_size_string == 0)
1558 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1559 rs6000_tls_size = 16;
1560 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1561 rs6000_tls_size = 32;
1562 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1563 rs6000_tls_size = 64;
1565 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1569 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1571 if (DEFAULT_ABI == ABI_DARWIN)
1572 /* The Darwin libraries never set errno, so we might as well
1573 avoid calling them when that's the only reason we would. */
1574 flag_errno_math = 0;
1576 /* Double growth factor to counter reduced min jump length. */
1577 set_param_value ("max-grow-copy-bb-insns", 16);
1580 /* Implement TARGET_HANDLE_OPTION. */
1583 rs6000_handle_option (size_t code, const char *arg, int value)
1588 target_flags &= ~(MASK_POWER | MASK_POWER2
1589 | MASK_MULTIPLE | MASK_STRING);
1590 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1591 | MASK_MULTIPLE | MASK_STRING);
1593 case OPT_mno_powerpc:
1594 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1595 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1596 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1597 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1600 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1601 | MASK_NO_SUM_IN_TOC);
1602 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1603 | MASK_NO_SUM_IN_TOC);
1604 #ifdef TARGET_USES_SYSV4_OPT
1605 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1606 just the same as -mminimal-toc. */
1607 target_flags |= MASK_MINIMAL_TOC;
1608 target_flags_explicit |= MASK_MINIMAL_TOC;
1612 #ifdef TARGET_USES_SYSV4_OPT
1614 /* Make -mtoc behave like -mminimal-toc. */
1615 target_flags |= MASK_MINIMAL_TOC;
1616 target_flags_explicit |= MASK_MINIMAL_TOC;
1620 #ifdef TARGET_USES_AIX64_OPT
1625 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1626 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1627 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1630 #ifdef TARGET_USES_AIX64_OPT
1635 target_flags &= ~MASK_POWERPC64;
1636 target_flags_explicit |= MASK_POWERPC64;
1639 case OPT_minsert_sched_nops_:
1640 rs6000_sched_insert_nops_str = arg;
1643 case OPT_mminimal_toc:
1646 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1647 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1654 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1655 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1662 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1663 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1667 case OPT_mpowerpc_gpopt:
1668 case OPT_mpowerpc_gfxopt:
1671 target_flags |= MASK_POWERPC;
1672 target_flags_explicit |= MASK_POWERPC;
1676 case OPT_maix_struct_return:
1677 case OPT_msvr4_struct_return:
1678 rs6000_explicit_options.aix_struct_ret = true;
1682 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1686 rs6000_explicit_options.isel = true;
1687 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1691 rs6000_explicit_options.spe = true;
1692 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1693 /* No SPE means 64-bit long doubles, even if an E500. */
1695 rs6000_long_double_type_size = 64;
1699 rs6000_debug_name = arg;
1702 #ifdef TARGET_USES_SYSV4_OPT
1704 rs6000_abi_name = arg;
1708 rs6000_sdata_name = arg;
1711 case OPT_mtls_size_:
1712 rs6000_tls_size_string = arg;
1715 case OPT_mrelocatable:
1718 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1719 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1723 case OPT_mrelocatable_lib:
1726 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1727 | MASK_NO_FP_IN_TOC;
1728 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1729 | MASK_NO_FP_IN_TOC;
1733 target_flags &= ~MASK_RELOCATABLE;
1734 target_flags_explicit |= MASK_RELOCATABLE;
1740 rs6000_explicit_options.abi = true;
1741 if (!strcmp (arg, "altivec"))
1743 rs6000_altivec_abi = 1;
1746 else if (! strcmp (arg, "no-altivec"))
1747 rs6000_altivec_abi = 0;
1748 else if (! strcmp (arg, "spe"))
1751 rs6000_altivec_abi = 0;
1752 if (!TARGET_SPE_ABI)
1753 error ("not configured for ABI: '%s'", arg);
1755 else if (! strcmp (arg, "no-spe"))
1758 /* These are here for testing during development only, do not
1759 document in the manual please. */
1760 else if (! strcmp (arg, "d64"))
1762 rs6000_darwin64_abi = 1;
1763 warning (0, "Using darwin64 ABI");
1765 else if (! strcmp (arg, "d32"))
1767 rs6000_darwin64_abi = 0;
1768 warning (0, "Using old darwin ABI");
1773 error ("unknown ABI specified: '%s'", arg);
1779 rs6000_select[1].string = arg;
1783 rs6000_select[2].string = arg;
1786 case OPT_mtraceback_:
1787 rs6000_traceback_name = arg;
1790 case OPT_mfloat_gprs_:
1791 rs6000_explicit_options.float_gprs = true;
1792 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1793 rs6000_float_gprs = 1;
1794 else if (! strcmp (arg, "double"))
1795 rs6000_float_gprs = 2;
1796 else if (! strcmp (arg, "no"))
1797 rs6000_float_gprs = 0;
1800 error ("invalid option for -mfloat-gprs: '%s'", arg);
1805 case OPT_mlong_double_:
1806 rs6000_explicit_options.long_double = true;
1807 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1808 if (value != 64 && value != 128)
1810 error ("Unknown switch -mlong-double-%s", arg);
1811 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1815 rs6000_long_double_type_size = value;
1818 case OPT_msched_costly_dep_:
1819 rs6000_sched_costly_dep_str = arg;
1823 rs6000_explicit_options.alignment = true;
1824 if (! strcmp (arg, "power"))
1826 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1827 some C library functions, so warn about it. The flag may be
1828 useful for performance studies from time to time though, so
1829 don't disable it entirely. */
1830 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1831 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1832 " it is incompatible with the installed C and C++ libraries");
1833 rs6000_alignment_flags = MASK_ALIGN_POWER;
1835 else if (! strcmp (arg, "natural"))
1836 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1839 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1847 /* Do anything needed at the start of the asm file. */
1850 rs6000_file_start (void)
1854 const char *start = buffer;
1855 struct rs6000_cpu_select *ptr;
1856 const char *default_cpu = TARGET_CPU_DEFAULT;
1857 FILE *file = asm_out_file;
1859 default_file_start ();
1861 #ifdef TARGET_BI_ARCH
1862 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1866 if (flag_verbose_asm)
1868 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1869 rs6000_select[0].string = default_cpu;
1871 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1873 ptr = &rs6000_select[i];
1874 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1876 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1881 if (PPC405_ERRATUM77)
1883 fprintf (file, "%s PPC405CR_ERRATUM77", start);
1887 #ifdef USING_ELFOS_H
1888 switch (rs6000_sdata)
1890 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1891 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1892 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1893 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1896 if (rs6000_sdata && g_switch_value)
1898 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1908 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1916 /* Return nonzero if this function is known to have a null epilogue. */
1919 direct_return (void)
1921 if (reload_completed)
1923 rs6000_stack_t *info = rs6000_stack_info ();
1925 if (info->first_gp_reg_save == 32
1926 && info->first_fp_reg_save == 64
1927 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1928 && ! info->lr_save_p
1929 && ! info->cr_save_p
1930 && info->vrsave_mask == 0
1938 /* Return the number of instructions it takes to form a constant in an
1939 integer register. */
1942 num_insns_constant_wide (HOST_WIDE_INT value)
1944 /* signed constant loadable with {cal|addi} */
1945 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1948 /* constant loadable with {cau|addis} */
1949 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1952 #if HOST_BITS_PER_WIDE_INT == 64
1953 else if (TARGET_POWERPC64)
1955 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1956 HOST_WIDE_INT high = value >> 31;
1958 if (high == 0 || high == -1)
1964 return num_insns_constant_wide (high) + 1;
1966 return (num_insns_constant_wide (high)
1967 + num_insns_constant_wide (low) + 1);
1976 num_insns_constant (rtx op, enum machine_mode mode)
1978 HOST_WIDE_INT low, high;
1980 switch (GET_CODE (op))
1983 #if HOST_BITS_PER_WIDE_INT == 64
1984 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1985 && mask64_operand (op, mode))
1989 return num_insns_constant_wide (INTVAL (op));
1997 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1998 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1999 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2002 if (mode == VOIDmode || mode == DImode)
2004 high = CONST_DOUBLE_HIGH (op);
2005 low = CONST_DOUBLE_LOW (op);
2012 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2013 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2014 high = l[WORDS_BIG_ENDIAN == 0];
2015 low = l[WORDS_BIG_ENDIAN != 0];
2019 return (num_insns_constant_wide (low)
2020 + num_insns_constant_wide (high));
2023 if ((high == 0 && low >= 0)
2024 || (high == -1 && low < 0))
2025 return num_insns_constant_wide (low);
2027 else if (mask64_operand (op, mode))
2031 return num_insns_constant_wide (high) + 1;
2034 return (num_insns_constant_wide (high)
2035 + num_insns_constant_wide (low) + 1);
2044 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2045 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2046 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2047 all items are set to the same value and contain COPIES replicas of the
2048 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2049 operand and the others are set to the value of the operand's msb. */
2052 vspltis_constant (rtx op, unsigned step, unsigned copies)
2054 enum machine_mode mode = GET_MODE (op);
2055 enum machine_mode inner = GET_MODE_INNER (mode);
2058 unsigned nunits = GET_MODE_NUNITS (mode);
2059 unsigned bitsize = GET_MODE_BITSIZE (inner);
2060 unsigned mask = GET_MODE_MASK (inner);
2062 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2063 HOST_WIDE_INT val = INTVAL (last);
2064 HOST_WIDE_INT splat_val = val;
2065 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2067 /* Construct the value to be splatted, if possible. If not, return 0. */
2068 for (i = 2; i <= copies; i *= 2)
2070 HOST_WIDE_INT small_val;
2072 small_val = splat_val >> bitsize;
2074 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2076 splat_val = small_val;
2079 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2080 if (EASY_VECTOR_15 (splat_val))
2083 /* Also check if we can splat, and then add the result to itself. Do so if
2084 the value is positive, of if the splat instruction is using OP's mode;
2085 for splat_val < 0, the splat and the add should use the same mode. */
2086 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2087 && (splat_val >= 0 || (step == 1 && copies == 1)))
2093 /* Check if VAL is present in every STEP-th element, and the
2094 other elements are filled with its most significant bit. */
2095 for (i = 0; i < nunits - 1; ++i)
2097 HOST_WIDE_INT desired_val;
2098 if (((i + 1) & (step - 1)) == 0)
2101 desired_val = msb_val;
2103 if (desired_val != INTVAL (CONST_VECTOR_ELT (op, i)))
2111 /* Return true if OP is of the given MODE and can be synthesized
2112 with a vspltisb, vspltish or vspltisw. */
2115 easy_altivec_constant (rtx op, enum machine_mode mode)
2117 unsigned step, copies;
2119 if (mode == VOIDmode)
2120 mode = GET_MODE (op);
2121 else if (mode != GET_MODE (op))
2124 /* Start with a vspltisw. */
2125 step = GET_MODE_NUNITS (mode) / 4;
2128 if (vspltis_constant (op, step, copies))
2131 /* Then try with a vspltish. */
2137 if (vspltis_constant (op, step, copies))
2140 /* And finally a vspltisb. */
2146 if (vspltis_constant (op, step, copies))
2152 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2153 result is OP. Abort if it is not possible. */
2156 gen_easy_altivec_constant (rtx op)
2158 enum machine_mode mode = GET_MODE (op);
2159 int nunits = GET_MODE_NUNITS (mode);
2160 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2161 unsigned step = nunits / 4;
2162 unsigned copies = 1;
2164 /* Start with a vspltisw. */
2165 if (vspltis_constant (op, step, copies))
2166 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2168 /* Then try with a vspltish. */
2174 if (vspltis_constant (op, step, copies))
2175 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2177 /* And finally a vspltisb. */
2183 if (vspltis_constant (op, step, copies))
2184 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2190 output_vec_const_move (rtx *operands)
2193 enum machine_mode mode;
2198 mode = GET_MODE (dest);
2203 if (zero_constant (vec, mode))
2204 return "vxor %0,%0,%0";
2206 splat_vec = gen_easy_altivec_constant (vec);
2207 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2208 operands[1] = XEXP (splat_vec, 0);
2209 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2212 switch (GET_MODE (splat_vec))
2215 return "vspltisw %0,%1";
2218 return "vspltish %0,%1";
2221 return "vspltisb %0,%1";
2228 gcc_assert (TARGET_SPE);
2230 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2231 pattern of V1DI, V4HI, and V2SF.
2233 FIXME: We should probably return # and add post reload
2234 splitters for these, but this way is so easy ;-). */
2235 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2236 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2237 operands[1] = CONST_VECTOR_ELT (vec, 0);
2238 operands[2] = CONST_VECTOR_ELT (vec, 1);
2240 return "li %0,%1\n\tevmergelo %0,%0,%0";
2242 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2245 /* Initialize vector TARGET to VALS. */
2248 rs6000_expand_vector_init (rtx target, rtx vals)
2250 enum machine_mode mode = GET_MODE (target);
2251 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2252 int n_elts = GET_MODE_NUNITS (mode);
2253 int n_var = 0, one_var = -1;
2254 bool all_same = true, all_const_zero = true;
2258 for (i = 0; i < n_elts; ++i)
2260 x = XVECEXP (vals, 0, i);
2261 if (!CONSTANT_P (x))
2262 ++n_var, one_var = i;
2263 else if (x != CONST0_RTX (inner_mode))
2264 all_const_zero = false;
2266 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2272 if (mode != V4SFmode && all_const_zero)
2274 /* Zero register. */
2275 emit_insn (gen_rtx_SET (VOIDmode, target,
2276 gen_rtx_XOR (mode, target, target)));
2279 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2281 /* Splat immediate. */
2282 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2286 ; /* Splat vector element. */
2289 /* Load from constant pool. */
2290 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2295 /* Store value to stack temp. Load vector element. Splat. */
2298 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2299 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2300 XVECEXP (vals, 0, 0));
2301 x = gen_rtx_UNSPEC (VOIDmode,
2302 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2303 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2305 gen_rtx_SET (VOIDmode,
2308 x = gen_rtx_VEC_SELECT (inner_mode, target,
2309 gen_rtx_PARALLEL (VOIDmode,
2310 gen_rtvec (1, const0_rtx)));
2311 emit_insn (gen_rtx_SET (VOIDmode, target,
2312 gen_rtx_VEC_DUPLICATE (mode, x)));
2316 /* One field is non-constant. Load constant then overwrite
2320 rtx copy = copy_rtx (vals);
2322 /* Load constant part of vector, substitute neighboring value for
2324 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2325 rs6000_expand_vector_init (target, copy);
2327 /* Insert variable. */
2328 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2332 /* Construct the vector in memory one field at a time
2333 and load the whole vector. */
2334 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2335 for (i = 0; i < n_elts; i++)
2336 emit_move_insn (adjust_address_nv (mem, inner_mode,
2337 i * GET_MODE_SIZE (inner_mode)),
2338 XVECEXP (vals, 0, i));
2339 emit_move_insn (target, mem);
2342 /* Set field ELT of TARGET to VAL. */
2345 rs6000_expand_vector_set (rtx target, rtx val, int elt)
2347 enum machine_mode mode = GET_MODE (target);
2348 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2349 rtx reg = gen_reg_rtx (mode);
2351 int width = GET_MODE_SIZE (inner_mode);
2354 /* Load single variable value. */
2355 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2356 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2357 x = gen_rtx_UNSPEC (VOIDmode,
2358 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2359 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2361 gen_rtx_SET (VOIDmode,
2365 /* Linear sequence. */
2366 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2367 for (i = 0; i < 16; ++i)
2368 XVECEXP (mask, 0, i) = GEN_INT (i);
2370 /* Set permute mask to insert element into target. */
2371 for (i = 0; i < width; ++i)
2372 XVECEXP (mask, 0, elt*width + i)
2373 = GEN_INT (i + 0x10);
2374 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2375 x = gen_rtx_UNSPEC (mode,
2376 gen_rtvec (3, target, reg,
2377 force_reg (V16QImode, x)),
2379 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2382 /* Extract field ELT from VEC into TARGET. */
2385 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2387 enum machine_mode mode = GET_MODE (vec);
2388 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2391 /* Allocate mode-sized buffer. */
2392 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2394 /* Add offset to field within buffer matching vector element. */
2395 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2397 /* Store single field into mode-sized buffer. */
2398 x = gen_rtx_UNSPEC (VOIDmode,
2399 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2400 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2402 gen_rtx_SET (VOIDmode,
2405 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2408 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2409 implement ANDing by the mask IN. */
2411 build_mask64_2_operands (rtx in, rtx *out)
2413 #if HOST_BITS_PER_WIDE_INT >= 64
2414 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2417 gcc_assert (GET_CODE (in) == CONST_INT);
2422 /* Assume c initially something like 0x00fff000000fffff. The idea
2423 is to rotate the word so that the middle ^^^^^^ group of zeros
2424 is at the MS end and can be cleared with an rldicl mask. We then
2425 rotate back and clear off the MS ^^ group of zeros with a
2427 c = ~c; /* c == 0xff000ffffff00000 */
2428 lsb = c & -c; /* lsb == 0x0000000000100000 */
2429 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2430 c = ~c; /* c == 0x00fff000000fffff */
2431 c &= -lsb; /* c == 0x00fff00000000000 */
2432 lsb = c & -c; /* lsb == 0x0000100000000000 */
2433 c = ~c; /* c == 0xff000fffffffffff */
2434 c &= -lsb; /* c == 0xff00000000000000 */
2436 while ((lsb >>= 1) != 0)
2437 shift++; /* shift == 44 on exit from loop */
2438 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2439 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2440 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2444 /* Assume c initially something like 0xff000f0000000000. The idea
2445 is to rotate the word so that the ^^^ middle group of zeros
2446 is at the LS end and can be cleared with an rldicr mask. We then
2447 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2449 lsb = c & -c; /* lsb == 0x0000010000000000 */
2450 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2451 c = ~c; /* c == 0x00fff0ffffffffff */
2452 c &= -lsb; /* c == 0x00fff00000000000 */
2453 lsb = c & -c; /* lsb == 0x0000100000000000 */
2454 c = ~c; /* c == 0xff000fffffffffff */
2455 c &= -lsb; /* c == 0xff00000000000000 */
2457 while ((lsb >>= 1) != 0)
2458 shift++; /* shift == 44 on exit from loop */
2459 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2460 m1 >>= shift; /* m1 == 0x0000000000000fff */
2461 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2464 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2465 masks will be all 1's. We are guaranteed more than one transition. */
2466 out[0] = GEN_INT (64 - shift);
2467 out[1] = GEN_INT (m1);
2468 out[2] = GEN_INT (shift);
2469 out[3] = GEN_INT (m2);
2477 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2480 invalid_e500_subreg (rtx op, enum machine_mode mode)
2482 /* Reject (subreg:SI (reg:DF)). */
2483 if (GET_CODE (op) == SUBREG
2485 && REG_P (SUBREG_REG (op))
2486 && GET_MODE (SUBREG_REG (op)) == DFmode)
2489 /* Reject (subreg:DF (reg:DI)). */
2490 if (GET_CODE (op) == SUBREG
2492 && REG_P (SUBREG_REG (op))
2493 && GET_MODE (SUBREG_REG (op)) == DImode)
2499 /* Darwin, AIX increases natural record alignment to doubleword if the first
2500 field is an FP double while the FP fields remain word aligned. */
2503 rs6000_special_round_type_align (tree type, int computed, int specified)
2505 tree field = TYPE_FIELDS (type);
2507 /* Skip all non field decls */
2508 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2509 field = TREE_CHAIN (field);
2511 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2512 return MAX (computed, specified);
2514 return MAX (MAX (computed, specified), 64);
2517 /* Return 1 for an operand in small memory on V.4/eabi. */
2520 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2521 enum machine_mode mode ATTRIBUTE_UNUSED)
2526 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2529 if (DEFAULT_ABI != ABI_V4)
2532 if (GET_CODE (op) == SYMBOL_REF)
2535 else if (GET_CODE (op) != CONST
2536 || GET_CODE (XEXP (op, 0)) != PLUS
2537 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2538 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2543 rtx sum = XEXP (op, 0);
2544 HOST_WIDE_INT summand;
2546 /* We have to be careful here, because it is the referenced address
2547 that must be 32k from _SDA_BASE_, not just the symbol. */
2548 summand = INTVAL (XEXP (sum, 1));
2549 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2552 sym_ref = XEXP (sum, 0);
2555 return SYMBOL_REF_SMALL_P (sym_ref);
2561 /* Return true if either operand is a general purpose register. */
2564 gpr_or_gpr_p (rtx op0, rtx op1)
2566 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2567 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2571 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2574 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2576 switch (GET_CODE (op))
2579 if (RS6000_SYMBOL_REF_TLS_P (op))
2581 else if (CONSTANT_POOL_ADDRESS_P (op))
2583 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2591 else if (! strcmp (XSTR (op, 0), toc_label_name))
2600 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2601 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2603 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2612 constant_pool_expr_p (rtx op)
2616 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2620 toc_relative_expr_p (rtx op)
2624 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2628 legitimate_constant_pool_address_p (rtx x)
2631 && GET_CODE (x) == PLUS
2632 && GET_CODE (XEXP (x, 0)) == REG
2633 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2634 && constant_pool_expr_p (XEXP (x, 1)));
2638 rs6000_legitimate_small_data_p (enum machine_mode mode, rtx x)
2640 return (DEFAULT_ABI == ABI_V4
2641 && !flag_pic && !TARGET_TOC
2642 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2643 && small_data_operand (x, mode));
2646 /* SPE offset addressing is limited to 5-bits worth of double words. */
2647 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2650 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2652 unsigned HOST_WIDE_INT offset, extra;
2654 if (GET_CODE (x) != PLUS)
2656 if (GET_CODE (XEXP (x, 0)) != REG)
2658 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2660 if (legitimate_constant_pool_address_p (x))
2662 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2665 offset = INTVAL (XEXP (x, 1));
2673 /* AltiVec vector modes. Only reg+reg addressing is valid and
2674 constant offset zero should not occur due to canonicalization.
2675 Allow any offset when not strict before reload. */
2682 /* SPE vector modes. */
2683 return SPE_CONST_OFFSET_OK (offset);
2686 if (TARGET_E500_DOUBLE)
2687 return SPE_CONST_OFFSET_OK (offset);
2690 /* On e500v2, we may have:
2692 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2694 Which gets addressed with evldd instructions. */
2695 if (TARGET_E500_DOUBLE)
2696 return SPE_CONST_OFFSET_OK (offset);
2698 if (mode == DFmode || !TARGET_POWERPC64)
2700 else if (offset & 3)
2706 if (mode == TFmode || !TARGET_POWERPC64)
2708 else if (offset & 3)
2719 return (offset < 0x10000) && (offset + extra < 0x10000);
2723 legitimate_indexed_address_p (rtx x, int strict)
2727 if (GET_CODE (x) != PLUS)
2733 if (REG_P (op0) && REG_P (op1))
2734 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2735 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2736 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2737 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2739 /* Recognize the rtl generated by reload which we know will later be
2740 replaced by a base reg. We rely on nothing but reload generating
2741 this particular pattern, a reasonable assumption because it is not
2743 else if (reload_in_progress
2744 && GET_CODE (op0) == PLUS
2745 && REG_P (XEXP (op0, 0))
2746 && GET_CODE (XEXP (op0, 1)) == CONST_INT
2748 return INT_REG_OK_FOR_INDEX_P (op1, strict);
2753 legitimate_indirect_address_p (rtx x, int strict)
2755 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2759 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2761 if (!TARGET_MACHO || !flag_pic
2762 || mode != SImode || GET_CODE (x) != MEM)
2766 if (GET_CODE (x) != LO_SUM)
2768 if (GET_CODE (XEXP (x, 0)) != REG)
2770 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2774 return CONSTANT_P (x);
2778 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2780 if (GET_CODE (x) != LO_SUM)
2782 if (GET_CODE (XEXP (x, 0)) != REG)
2784 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2786 /* Restrict addressing for DI because of our SUBREG hackery. */
2787 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2791 if (TARGET_ELF || TARGET_MACHO)
2793 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2797 if (GET_MODE_NUNITS (mode) != 1)
2799 if (GET_MODE_BITSIZE (mode) > 64
2800 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2801 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2804 return CONSTANT_P (x);
2811 /* Try machine-dependent ways of modifying an illegitimate address
2812 to be legitimate. If we find one, return the new, valid address.
2813 This is used from only one place: `memory_address' in explow.c.
2815 OLDX is the address as it was before break_out_memory_refs was
2816 called. In some cases it is useful to look at this to decide what
2819 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2821 It is always safe for this function to do nothing. It exists to
2822 recognize opportunities to optimize the output.
2824 On RS/6000, first check for the sum of a register with a constant
2825 integer that is out of range. If so, generate code to add the
2826 constant with the low-order 16 bits masked to the register and force
2827 this result into another register (this can be done with `cau').
2828 Then generate an address of REG+(CONST&0xffff), allowing for the
2829 possibility of bit 16 being a one.
2831 Then check for the sum of a register and something not constant, try to
2832 load the other things into a register and return the sum. */
2835 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2836 enum machine_mode mode)
2838 if (GET_CODE (x) == SYMBOL_REF)
2840 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2842 return rs6000_legitimize_tls_address (x, model);
2845 if (GET_CODE (x) == PLUS
2846 && GET_CODE (XEXP (x, 0)) == REG
2847 && GET_CODE (XEXP (x, 1)) == CONST_INT
2848 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2850 HOST_WIDE_INT high_int, low_int;
2852 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2853 high_int = INTVAL (XEXP (x, 1)) - low_int;
2854 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2855 GEN_INT (high_int)), 0);
2856 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2858 else if (GET_CODE (x) == PLUS
2859 && GET_CODE (XEXP (x, 0)) == REG
2860 && GET_CODE (XEXP (x, 1)) != CONST_INT
2861 && GET_MODE_NUNITS (mode) == 1
2862 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2864 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2866 && (TARGET_POWERPC64 || mode != DImode)
2869 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2870 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2872 else if (ALTIVEC_VECTOR_MODE (mode))
2876 /* Make sure both operands are registers. */
2877 if (GET_CODE (x) == PLUS)
2878 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2879 force_reg (Pmode, XEXP (x, 1)));
2881 reg = force_reg (Pmode, x);
2884 else if (SPE_VECTOR_MODE (mode)
2885 || (TARGET_E500_DOUBLE && (mode == DFmode
2886 || mode == DImode)))
2890 /* We accept [reg + reg] and [reg + OFFSET]. */
2892 if (GET_CODE (x) == PLUS)
2894 rtx op1 = XEXP (x, 0);
2895 rtx op2 = XEXP (x, 1);
2897 op1 = force_reg (Pmode, op1);
2899 if (GET_CODE (op2) != REG
2900 && (GET_CODE (op2) != CONST_INT
2901 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2902 op2 = force_reg (Pmode, op2);
2904 return gen_rtx_PLUS (Pmode, op1, op2);
2907 return force_reg (Pmode, x);
2913 && GET_CODE (x) != CONST_INT
2914 && GET_CODE (x) != CONST_DOUBLE
2916 && GET_MODE_NUNITS (mode) == 1
2917 && (GET_MODE_BITSIZE (mode) <= 32
2918 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2920 rtx reg = gen_reg_rtx (Pmode);
2921 emit_insn (gen_elf_high (reg, x));
2922 return gen_rtx_LO_SUM (Pmode, reg, x);
2924 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2927 && ! MACHO_DYNAMIC_NO_PIC_P
2929 && GET_CODE (x) != CONST_INT
2930 && GET_CODE (x) != CONST_DOUBLE
2932 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2936 rtx reg = gen_reg_rtx (Pmode);
2937 emit_insn (gen_macho_high (reg, x));
2938 return gen_rtx_LO_SUM (Pmode, reg, x);
2941 && constant_pool_expr_p (x)
2942 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2944 return create_TOC_reference (x);
2950 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
2951 We need to emit DTP-relative relocations. */
2954 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2959 fputs ("\t.long\t", file);
2962 fputs (DOUBLE_INT_ASM_OP, file);
2967 output_addr_const (file, x);
2968 fputs ("@dtprel+0x8000", file);
2971 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2973 static GTY(()) rtx rs6000_tls_symbol;
2975 rs6000_tls_get_addr (void)
2977 if (!rs6000_tls_symbol)
2978 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2980 return rs6000_tls_symbol;
2983 /* Construct the SYMBOL_REF for TLS GOT references. */
2985 static GTY(()) rtx rs6000_got_symbol;
2987 rs6000_got_sym (void)
2989 if (!rs6000_got_symbol)
2991 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2992 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2993 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2996 return rs6000_got_symbol;
2999 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3000 this (thread-local) address. */
3003 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3007 dest = gen_reg_rtx (Pmode);
3008 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3014 tlsreg = gen_rtx_REG (Pmode, 13);
3015 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3019 tlsreg = gen_rtx_REG (Pmode, 2);
3020 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3024 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3028 tmp = gen_reg_rtx (Pmode);
3031 tlsreg = gen_rtx_REG (Pmode, 13);
3032 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3036 tlsreg = gen_rtx_REG (Pmode, 2);
3037 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3041 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3043 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3048 rtx r3, got, tga, tmp1, tmp2, eqv;
3050 /* We currently use relocations like @got@tlsgd for tls, which
3051 means the linker will handle allocation of tls entries, placing
3052 them in the .got section. So use a pointer to the .got section,
3053 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3054 or to secondary GOT sections used by 32-bit -fPIC. */
3056 got = gen_rtx_REG (Pmode, 2);
3060 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3063 rtx gsym = rs6000_got_sym ();
3064 got = gen_reg_rtx (Pmode);
3066 rs6000_emit_move (got, gsym, Pmode);
3069 rtx tempLR, tmp3, mem;
3072 tempLR = gen_reg_rtx (Pmode);
3073 tmp1 = gen_reg_rtx (Pmode);
3074 tmp2 = gen_reg_rtx (Pmode);
3075 tmp3 = gen_reg_rtx (Pmode);
3076 mem = gen_const_mem (Pmode, tmp1);
3078 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3079 emit_move_insn (tmp1, tempLR);
3080 emit_move_insn (tmp2, mem);
3081 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3082 last = emit_move_insn (got, tmp3);
3083 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3085 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3087 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3093 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3095 r3 = gen_rtx_REG (Pmode, 3);
3097 insn = gen_tls_gd_64 (r3, got, addr);
3099 insn = gen_tls_gd_32 (r3, got, addr);
3102 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3103 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3104 insn = emit_call_insn (insn);
3105 CONST_OR_PURE_CALL_P (insn) = 1;
3106 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3107 insn = get_insns ();
3109 emit_libcall_block (insn, dest, r3, addr);
3111 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3113 r3 = gen_rtx_REG (Pmode, 3);
3115 insn = gen_tls_ld_64 (r3, got);
3117 insn = gen_tls_ld_32 (r3, got);
3120 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3121 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3122 insn = emit_call_insn (insn);
3123 CONST_OR_PURE_CALL_P (insn) = 1;
3124 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3125 insn = get_insns ();
3127 tmp1 = gen_reg_rtx (Pmode);
3128 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3130 emit_libcall_block (insn, tmp1, r3, eqv);
3131 if (rs6000_tls_size == 16)
3134 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3136 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3138 else if (rs6000_tls_size == 32)
3140 tmp2 = gen_reg_rtx (Pmode);
3142 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3144 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3147 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3149 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3153 tmp2 = gen_reg_rtx (Pmode);
3155 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3157 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3159 insn = gen_rtx_SET (Pmode, dest,
3160 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3166 /* IE, or 64 bit offset LE. */
3167 tmp2 = gen_reg_rtx (Pmode);
3169 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3171 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3174 insn = gen_tls_tls_64 (dest, tmp2, addr);
3176 insn = gen_tls_tls_32 (dest, tmp2, addr);
3184 /* Return 1 if X contains a thread-local symbol. */
3187 rs6000_tls_referenced_p (rtx x)
3189 if (! TARGET_HAVE_TLS)
3192 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3195 /* Return 1 if *X is a thread-local symbol. This is the same as
3196 rs6000_tls_symbol_ref except for the type of the unused argument. */
3199 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3201 return RS6000_SYMBOL_REF_TLS_P (*x);
3204 /* The convention appears to be to define this wherever it is used.
3205 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3206 is now used here. */
3207 #ifndef REG_MODE_OK_FOR_BASE_P
3208 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3211 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3212 replace the input X, or the original X if no replacement is called for.
3213 The output parameter *WIN is 1 if the calling macro should goto WIN,
3216 For RS/6000, we wish to handle large displacements off a base
3217 register by splitting the addend across an addiu/addis and the mem insn.
3218 This cuts number of extra insns needed from 3 to 1.
3220 On Darwin, we use this to generate code for floating point constants.
3221 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3222 The Darwin code is inside #if TARGET_MACHO because only then is
3223 machopic_function_base_name() defined. */
3225 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3226 int opnum, int type,
3227 int ind_levels ATTRIBUTE_UNUSED, int *win)
3229 /* We must recognize output that we have already generated ourselves. */
3230 if (GET_CODE (x) == PLUS
3231 && GET_CODE (XEXP (x, 0)) == PLUS
3232 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3233 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3234 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3236 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3237 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3238 opnum, (enum reload_type)type);
3244 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3245 && GET_CODE (x) == LO_SUM
3246 && GET_CODE (XEXP (x, 0)) == PLUS
3247 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3248 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3249 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3250 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3251 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3252 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3253 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3255 /* Result of previous invocation of this function on Darwin
3256 floating point constant. */
3257 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3258 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3259 opnum, (enum reload_type)type);
3265 /* Force ld/std non-word aligned offset into base register by wrapping
3267 if (GET_CODE (x) == PLUS
3268 && GET_CODE (XEXP (x, 0)) == REG
3269 && REGNO (XEXP (x, 0)) < 32
3270 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3271 && GET_CODE (XEXP (x, 1)) == CONST_INT
3272 && (INTVAL (XEXP (x, 1)) & 3) != 0
3273 && !ALTIVEC_VECTOR_MODE (mode)
3274 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3275 && TARGET_POWERPC64)
3277 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3278 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3279 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3280 opnum, (enum reload_type) type);
3285 if (GET_CODE (x) == PLUS
3286 && GET_CODE (XEXP (x, 0)) == REG
3287 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3288 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3289 && GET_CODE (XEXP (x, 1)) == CONST_INT
3290 && !SPE_VECTOR_MODE (mode)
3291 && !(TARGET_E500_DOUBLE && (mode == DFmode
3293 && !ALTIVEC_VECTOR_MODE (mode))
3295 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3296 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3298 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3300 /* Check for 32-bit overflow. */
3301 if (high + low != val)
3307 /* Reload the high part into a base reg; leave the low part
3308 in the mem directly. */
3310 x = gen_rtx_PLUS (GET_MODE (x),
3311 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3315 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3316 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3317 opnum, (enum reload_type)type);
3322 if (GET_CODE (x) == SYMBOL_REF
3323 && !ALTIVEC_VECTOR_MODE (mode)
3325 && DEFAULT_ABI == ABI_DARWIN
3326 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3328 && DEFAULT_ABI == ABI_V4
3331 /* Don't do this for TFmode, since the result isn't offsettable.
3332 The same goes for DImode without 64-bit gprs. */
3334 && (mode != DImode || TARGET_POWERPC64))
3339 rtx offset = gen_rtx_CONST (Pmode,
3340 gen_rtx_MINUS (Pmode, x,
3341 machopic_function_base_sym ()));
3342 x = gen_rtx_LO_SUM (GET_MODE (x),
3343 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3344 gen_rtx_HIGH (Pmode, offset)), offset);
3348 x = gen_rtx_LO_SUM (GET_MODE (x),
3349 gen_rtx_HIGH (Pmode, x), x);
3351 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3352 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3353 opnum, (enum reload_type)type);
3358 /* Reload an offset address wrapped by an AND that represents the
3359 masking of the lower bits. Strip the outer AND and let reload
3360 convert the offset address into an indirect address. */
3362 && ALTIVEC_VECTOR_MODE (mode)
3363 && GET_CODE (x) == AND
3364 && GET_CODE (XEXP (x, 0)) == PLUS
3365 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3366 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3367 && GET_CODE (XEXP (x, 1)) == CONST_INT
3368 && INTVAL (XEXP (x, 1)) == -16)
3376 && constant_pool_expr_p (x)
3377 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3379 (x) = create_TOC_reference (x);
3387 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3388 that is a valid memory address for an instruction.
3389 The MODE argument is the machine mode for the MEM expression
3390 that wants to use this address.
3392 On the RS/6000, there are four valid address: a SYMBOL_REF that
3393 refers to a constant pool entry of an address (or the sum of it
3394 plus a constant), a short (16-bit signed) constant plus a register,
3395 the sum of two registers, or a register indirect, possibly with an
3396 auto-increment. For DFmode and DImode with a constant plus register,
3397 we must ensure that both words are addressable or PowerPC64 with offset
3400 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3401 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3402 adjacent memory cells are accessed by adding word-sized offsets
3403 during assembly output. */
3405 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3407 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3409 && ALTIVEC_VECTOR_MODE (mode)
3410 && GET_CODE (x) == AND
3411 && GET_CODE (XEXP (x, 1)) == CONST_INT
3412 && INTVAL (XEXP (x, 1)) == -16)
3415 if (RS6000_SYMBOL_REF_TLS_P (x))
3417 if (legitimate_indirect_address_p (x, reg_ok_strict))
3419 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3420 && !ALTIVEC_VECTOR_MODE (mode)
3421 && !SPE_VECTOR_MODE (mode)
3422 /* Restrict addressing for DI because of our SUBREG hackery. */
3423 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3425 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3427 if (rs6000_legitimate_small_data_p (mode, x))
3429 if (legitimate_constant_pool_address_p (x))
3431 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3433 && GET_CODE (x) == PLUS
3434 && GET_CODE (XEXP (x, 0)) == REG
3435 && (XEXP (x, 0) == virtual_stack_vars_rtx
3436 || XEXP (x, 0) == arg_pointer_rtx)
3437 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3439 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3443 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3445 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3446 && (TARGET_POWERPC64 || mode != DImode)
3447 && legitimate_indexed_address_p (x, reg_ok_strict))
3449 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3454 /* Go to LABEL if ADDR (a legitimate address expression)
3455 has an effect that depends on the machine mode it is used for.
3457 On the RS/6000 this is true of all integral offsets (since AltiVec
3458 modes don't allow them) or is a pre-increment or decrement.
3460 ??? Except that due to conceptual problems in offsettable_address_p
3461 we can't really report the problems of integral offsets. So leave
3462 this assuming that the adjustable offset must be valid for the
3463 sub-words of a TFmode operand, which is what we had before. */
3466 rs6000_mode_dependent_address (rtx addr)
3468 switch (GET_CODE (addr))
3471 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3473 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3474 return val + 12 + 0x8000 >= 0x10000;
3483 return TARGET_UPDATE;
3492 /* Return number of consecutive hard regs needed starting at reg REGNO
3493 to hold something of mode MODE.
3494 This is ordinarily the length in words of a value of mode MODE
3495 but can be less for certain modes in special long registers.
3497 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3498 scalar instructions. The upper 32 bits are only available to the
3501 POWER and PowerPC GPRs hold 32 bits worth;
3502 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3505 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3507 if (FP_REGNO_P (regno))
3508 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3510 if (TARGET_E500_DOUBLE && mode == DFmode)
3513 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3514 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3516 if (ALTIVEC_REGNO_P (regno))
3518 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3520 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3523 /* Change register usage conditional on target flags. */
3525 rs6000_conditional_register_usage (void)
3529 /* Set MQ register fixed (already call_used) if not POWER
3530 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3535 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3537 fixed_regs[13] = call_used_regs[13]
3538 = call_really_used_regs[13] = 1;
3540 /* Conditionally disable FPRs. */
3541 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3542 for (i = 32; i < 64; i++)
3543 fixed_regs[i] = call_used_regs[i]
3544 = call_really_used_regs[i] = 1;
3546 /* The TOC register is not killed across calls in a way that is
3547 visible to the compiler. */
3548 if (DEFAULT_ABI == ABI_AIX)
3549 call_really_used_regs[2] = 0;
3551 if (DEFAULT_ABI == ABI_V4
3552 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3554 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3556 if (DEFAULT_ABI == ABI_V4
3557 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3559 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3560 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3561 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3563 if (DEFAULT_ABI == ABI_DARWIN
3564 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3565 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3566 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3567 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3569 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3570 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3571 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3574 global_regs[VSCR_REGNO] = 1;
3578 global_regs[SPEFSCR_REGNO] = 1;
3579 fixed_regs[FIXED_SCRATCH]
3580 = call_used_regs[FIXED_SCRATCH]
3581 = call_really_used_regs[FIXED_SCRATCH] = 1;
3584 if (! TARGET_ALTIVEC)
3586 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3587 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3588 call_really_used_regs[VRSAVE_REGNO] = 1;
3591 if (TARGET_ALTIVEC_ABI)
3592 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3593 call_used_regs[i] = call_really_used_regs[i] = 1;
3596 /* Try to output insns to set TARGET equal to the constant C if it can
3597 be done in less than N insns. Do all computations in MODE.
3598 Returns the place where the output has been placed if it can be
3599 done and the insns have been emitted. If it would take more than N
3600 insns, zero is returned and no insns and emitted. */
3603 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3604 rtx source, int n ATTRIBUTE_UNUSED)
3606 rtx result, insn, set;
3607 HOST_WIDE_INT c0, c1;
3614 dest = gen_reg_rtx (mode);
3615 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3619 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3621 emit_insn (gen_rtx_SET (VOIDmode, result,
3622 GEN_INT (INTVAL (source)
3623 & (~ (HOST_WIDE_INT) 0xffff))));
3624 emit_insn (gen_rtx_SET (VOIDmode, dest,
3625 gen_rtx_IOR (SImode, result,
3626 GEN_INT (INTVAL (source) & 0xffff))));
3631 switch (GET_CODE (source))
3634 c0 = INTVAL (source);
3639 #if HOST_BITS_PER_WIDE_INT >= 64
3640 c0 = CONST_DOUBLE_LOW (source);
3643 c0 = CONST_DOUBLE_LOW (source);
3644 c1 = CONST_DOUBLE_HIGH (source);
3652 result = rs6000_emit_set_long_const (dest, c0, c1);
3659 insn = get_last_insn ();
3660 set = single_set (insn);
3661 if (! CONSTANT_P (SET_SRC (set)))
3662 set_unique_reg_note (insn, REG_EQUAL, source);
3667 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3668 fall back to a straight forward decomposition. We do this to avoid
3669 exponential run times encountered when looking for longer sequences
3670 with rs6000_emit_set_const. */
3672 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3674 if (!TARGET_POWERPC64)
3676 rtx operand1, operand2;
3678 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3680 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3682 emit_move_insn (operand1, GEN_INT (c1));
3683 emit_move_insn (operand2, GEN_INT (c2));
3687 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3690 ud2 = (c1 & 0xffff0000) >> 16;
3691 #if HOST_BITS_PER_WIDE_INT >= 64
3695 ud4 = (c2 & 0xffff0000) >> 16;
3697 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3698 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3701 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3703 emit_move_insn (dest, GEN_INT (ud1));
3706 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3707 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3710 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3713 emit_move_insn (dest, GEN_INT (ud2 << 16));
3715 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3717 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3718 || (ud4 == 0 && ! (ud3 & 0x8000)))
3721 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3724 emit_move_insn (dest, GEN_INT (ud3 << 16));
3727 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3728 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3730 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3735 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3738 emit_move_insn (dest, GEN_INT (ud4 << 16));
3741 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3743 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3745 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3746 GEN_INT (ud2 << 16)));
3748 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3754 /* Helper for the following. Get rid of [r+r] memory refs
3755 in cases where it won't work (TImode, TFmode). */
3758 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3760 if (GET_CODE (operands[0]) == MEM
3761 && GET_CODE (XEXP (operands[0], 0)) != REG
3762 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3763 && ! reload_in_progress)
3765 = replace_equiv_address (operands[0],
3766 copy_addr_to_reg (XEXP (operands[0], 0)));
3768 if (GET_CODE (operands[1]) == MEM
3769 && GET_CODE (XEXP (operands[1], 0)) != REG
3770 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3771 && ! reload_in_progress)
3773 = replace_equiv_address (operands[1],
3774 copy_addr_to_reg (XEXP (operands[1], 0)));
3777 /* Emit a move from SOURCE to DEST in mode MODE. */
3779 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3783 operands[1] = source;
3785 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3786 if (GET_CODE (operands[1]) == CONST_DOUBLE
3787 && ! FLOAT_MODE_P (mode)
3788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3790 /* FIXME. This should never happen. */
3791 /* Since it seems that it does, do the safe thing and convert
3793 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3795 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3796 || FLOAT_MODE_P (mode)
3797 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3798 || CONST_DOUBLE_LOW (operands[1]) < 0)
3799 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3800 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3802 /* Check if GCC is setting up a block move that will end up using FP
3803 registers as temporaries. We must make sure this is acceptable. */
3804 if (GET_CODE (operands[0]) == MEM
3805 && GET_CODE (operands[1]) == MEM
3807 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3808 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3809 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3810 ? 32 : MEM_ALIGN (operands[0])))
3811 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3813 : MEM_ALIGN (operands[1]))))
3814 && ! MEM_VOLATILE_P (operands [0])
3815 && ! MEM_VOLATILE_P (operands [1]))
3817 emit_move_insn (adjust_address (operands[0], SImode, 0),
3818 adjust_address (operands[1], SImode, 0));
3819 emit_move_insn (adjust_address (operands[0], SImode, 4),
3820 adjust_address (operands[1], SImode, 4));
3824 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3825 && !gpc_reg_operand (operands[1], mode))
3826 operands[1] = force_reg (mode, operands[1]);
3828 if (mode == SFmode && ! TARGET_POWERPC
3829 && TARGET_HARD_FLOAT && TARGET_FPRS
3830 && GET_CODE (operands[0]) == MEM)
3834 if (reload_in_progress || reload_completed)
3835 regnum = true_regnum (operands[1]);
3836 else if (GET_CODE (operands[1]) == REG)
3837 regnum = REGNO (operands[1]);
3841 /* If operands[1] is a register, on POWER it may have
3842 double-precision data in it, so truncate it to single
3844 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3847 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3848 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3849 operands[1] = newreg;
3853 /* Recognize the case where operand[1] is a reference to thread-local
3854 data and load its address to a register. */
3855 if (rs6000_tls_referenced_p (operands[1]))
3857 enum tls_model model;
3858 rtx tmp = operands[1];
3861 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3863 addend = XEXP (XEXP (tmp, 0), 1);
3864 tmp = XEXP (XEXP (tmp, 0), 0);
3867 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3868 model = SYMBOL_REF_TLS_MODEL (tmp);
3869 gcc_assert (model != 0);
3871 tmp = rs6000_legitimize_tls_address (tmp, model);
3874 tmp = gen_rtx_PLUS (mode, tmp, addend);
3875 tmp = force_operand (tmp, operands[0]);
3880 /* Handle the case where reload calls us with an invalid address. */
3881 if (reload_in_progress && mode == Pmode
3882 && (! general_operand (operands[1], mode)
3883 || ! nonimmediate_operand (operands[0], mode)))
3886 /* 128-bit constant floating-point values on Darwin should really be
3887 loaded as two parts. */
3888 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3889 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3890 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3892 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3893 know how to get a DFmode SUBREG of a TFmode. */
3894 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3895 simplify_gen_subreg (DImode, operands[1], mode, 0),
3897 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3898 GET_MODE_SIZE (DImode)),
3899 simplify_gen_subreg (DImode, operands[1], mode,
3900 GET_MODE_SIZE (DImode)),
3905 /* FIXME: In the long term, this switch statement should go away
3906 and be replaced by a sequence of tests based on things like
3912 if (CONSTANT_P (operands[1])
3913 && GET_CODE (operands[1]) != CONST_INT)
3914 operands[1] = force_const_mem (mode, operands[1]);
3918 rs6000_eliminate_indexed_memrefs (operands);
3923 if (CONSTANT_P (operands[1])
3924 && ! easy_fp_constant (operands[1], mode))
3925 operands[1] = force_const_mem (mode, operands[1]);
3936 if (CONSTANT_P (operands[1])
3937 && !easy_vector_constant (operands[1], mode))
3938 operands[1] = force_const_mem (mode, operands[1]);
3943 /* Use default pattern for address of ELF small data */
3946 && DEFAULT_ABI == ABI_V4
3947 && (GET_CODE (operands[1]) == SYMBOL_REF
3948 || GET_CODE (operands[1]) == CONST)
3949 && small_data_operand (operands[1], mode))
3951 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3955 if (DEFAULT_ABI == ABI_V4
3956 && mode == Pmode && mode == SImode
3957 && flag_pic == 1 && got_operand (operands[1], mode))
3959 emit_insn (gen_movsi_got (operands[0], operands[1]));
3963 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3967 && CONSTANT_P (operands[1])
3968 && GET_CODE (operands[1]) != HIGH
3969 && GET_CODE (operands[1]) != CONST_INT)
3971 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3973 /* If this is a function address on -mcall-aixdesc,
3974 convert it to the address of the descriptor. */
3975 if (DEFAULT_ABI == ABI_AIX
3976 && GET_CODE (operands[1]) == SYMBOL_REF
3977 && XSTR (operands[1], 0)[0] == '.')
3979 const char *name = XSTR (operands[1], 0);
3981 while (*name == '.')
3983 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3984 CONSTANT_POOL_ADDRESS_P (new_ref)
3985 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3986 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3987 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3988 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3989 operands[1] = new_ref;
3992 if (DEFAULT_ABI == ABI_DARWIN)
3995 if (MACHO_DYNAMIC_NO_PIC_P)
3997 /* Take care of any required data indirection. */
3998 operands[1] = rs6000_machopic_legitimize_pic_address (
3999 operands[1], mode, operands[0]);
4000 if (operands[0] != operands[1])
4001 emit_insn (gen_rtx_SET (VOIDmode,
4002 operands[0], operands[1]));
4006 emit_insn (gen_macho_high (target, operands[1]));
4007 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4011 emit_insn (gen_elf_high (target, operands[1]));
4012 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4016 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4017 and we have put it in the TOC, we just need to make a TOC-relative
4020 && GET_CODE (operands[1]) == SYMBOL_REF
4021 && constant_pool_expr_p (operands[1])
4022 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4023 get_pool_mode (operands[1])))
4025 operands[1] = create_TOC_reference (operands[1]);
4027 else if (mode == Pmode
4028 && CONSTANT_P (operands[1])
4029 && ((GET_CODE (operands[1]) != CONST_INT
4030 && ! easy_fp_constant (operands[1], mode))
4031 || (GET_CODE (operands[1]) == CONST_INT
4032 && num_insns_constant (operands[1], mode) > 2)
4033 || (GET_CODE (operands[0]) == REG
4034 && FP_REGNO_P (REGNO (operands[0]))))
4035 && GET_CODE (operands[1]) != HIGH
4036 && ! legitimate_constant_pool_address_p (operands[1])
4037 && ! toc_relative_expr_p (operands[1]))
4039 /* Emit a USE operation so that the constant isn't deleted if
4040 expensive optimizations are turned on because nobody
4041 references it. This should only be done for operands that
4042 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4043 This should not be done for operands that contain LABEL_REFs.
4044 For now, we just handle the obvious case. */
4045 if (GET_CODE (operands[1]) != LABEL_REF)
4046 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4049 /* Darwin uses a special PIC legitimizer. */
4050 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4053 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4055 if (operands[0] != operands[1])
4056 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4061 /* If we are to limit the number of things we put in the TOC and
4062 this is a symbol plus a constant we can add in one insn,
4063 just put the symbol in the TOC and add the constant. Don't do
4064 this if reload is in progress. */
4065 if (GET_CODE (operands[1]) == CONST
4066 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4067 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4068 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4069 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4070 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4071 && ! side_effects_p (operands[0]))
4074 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4075 rtx other = XEXP (XEXP (operands[1], 0), 1);
4077 sym = force_reg (mode, sym);
4079 emit_insn (gen_addsi3 (operands[0], sym, other));
4081 emit_insn (gen_adddi3 (operands[0], sym, other));
4085 operands[1] = force_const_mem (mode, operands[1]);
4088 && constant_pool_expr_p (XEXP (operands[1], 0))
4089 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4090 get_pool_constant (XEXP (operands[1], 0)),
4091 get_pool_mode (XEXP (operands[1], 0))))
4094 = gen_const_mem (mode,
4095 create_TOC_reference (XEXP (operands[1], 0)));
4096 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4102 rs6000_eliminate_indexed_memrefs (operands);
4106 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4108 gen_rtx_SET (VOIDmode,
4109 operands[0], operands[1]),
4110 gen_rtx_CLOBBER (VOIDmode,
4111 gen_rtx_SCRATCH (SImode)))));
4120 /* Above, we may have called force_const_mem which may have returned
4121 an invalid address. If we can, fix this up; otherwise, reload will
4122 have to deal with it. */
4123 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4124 operands[1] = validize_mem (operands[1]);
4127 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4130 /* Nonzero if we can use a floating-point register to pass this arg. */
4131 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4132 (SCALAR_FLOAT_MODE_P (MODE) \
4133 && (CUM)->fregno <= FP_ARG_MAX_REG \
4134 && TARGET_HARD_FLOAT && TARGET_FPRS)
4136 /* Nonzero if we can use an AltiVec register to pass this arg. */
4137 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4138 (ALTIVEC_VECTOR_MODE (MODE) \
4139 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4140 && TARGET_ALTIVEC_ABI \
4143 /* Return a nonzero value to say to return the function value in
4144 memory, just as large structures are always returned. TYPE will be
4145 the data type of the value, and FNTYPE will be the type of the
4146 function doing the returning, or @code{NULL} for libcalls.
4148 The AIX ABI for the RS/6000 specifies that all structures are
4149 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4150 specifies that structures <= 8 bytes are returned in r3/r4, but a
4151 draft put them in memory, and GCC used to implement the draft
4152 instead of the final standard. Therefore, aix_struct_return
4153 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4154 compatibility can change DRAFT_V4_STRUCT_RET to override the
4155 default, and -m switches get the final word. See
4156 rs6000_override_options for more details.
4158 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4159 long double support is enabled. These values are returned in memory.
4161 int_size_in_bytes returns -1 for variable size objects, which go in
4162 memory always. The cast to unsigned makes -1 > 8. */
4165 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4167 /* In the darwin64 abi, try to use registers for larger structs
4169 if (rs6000_darwin64_abi
4170 && TREE_CODE (type) == RECORD_TYPE
4171 && int_size_in_bytes (type) > 0)
4173 CUMULATIVE_ARGS valcum;
4177 valcum.fregno = FP_ARG_MIN_REG;
4178 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4179 /* Do a trial code generation as if this were going to be passed
4180 as an argument; if any part goes in memory, we return NULL. */
4181 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4184 /* Otherwise fall through to more conventional ABI rules. */
4187 if (AGGREGATE_TYPE_P (type)
4188 && (aix_struct_return
4189 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4192 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4193 modes only exist for GCC vector types if -maltivec. */
4194 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4195 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4198 /* Return synthetic vectors in memory. */
4199 if (TREE_CODE (type) == VECTOR_TYPE
4200 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4202 static bool warned_for_return_big_vectors = false;
4203 if (!warned_for_return_big_vectors)
4205 warning (0, "GCC vector returned by reference: "
4206 "non-standard ABI extension with no compatibility guarantee");
4207 warned_for_return_big_vectors = true;
4212 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4218 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4219 for a call to a function whose data type is FNTYPE.
4220 For a library call, FNTYPE is 0.
4222 For incoming args we set the number of arguments in the prototype large
4223 so we never return a PARALLEL. */
4226 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4227 rtx libname ATTRIBUTE_UNUSED, int incoming,
4228 int libcall, int n_named_args)
4230 static CUMULATIVE_ARGS zero_cumulative;
4232 *cum = zero_cumulative;
4234 cum->fregno = FP_ARG_MIN_REG;
4235 cum->vregno = ALTIVEC_ARG_MIN_REG;
4236 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4237 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4238 ? CALL_LIBCALL : CALL_NORMAL);
4239 cum->sysv_gregno = GP_ARG_MIN_REG;
4240 cum->stdarg = fntype
4241 && (TYPE_ARG_TYPES (fntype) != 0
4242 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4243 != void_type_node));
4245 cum->nargs_prototype = 0;
4246 if (incoming || cum->prototype)
4247 cum->nargs_prototype = n_named_args;
4249 /* Check for a longcall attribute. */
4250 if ((!fntype && rs6000_default_long_calls)
4252 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4253 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4254 cum->call_cookie |= CALL_LONG;
4256 if (TARGET_DEBUG_ARG)
4258 fprintf (stderr, "\ninit_cumulative_args:");
4261 tree ret_type = TREE_TYPE (fntype);
4262 fprintf (stderr, " ret code = %s,",
4263 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4266 if (cum->call_cookie & CALL_LONG)
4267 fprintf (stderr, " longcall,");
4269 fprintf (stderr, " proto = %d, nargs = %d\n",
4270 cum->prototype, cum->nargs_prototype);
4275 && TARGET_ALTIVEC_ABI
4276 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4278 error ("cannot return value in vector register because"
4279 " altivec instructions are disabled, use -maltivec"
4284 /* Return true if TYPE must be passed on the stack and not in registers. */
4287 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4289 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4290 return must_pass_in_stack_var_size (mode, type);
4292 return must_pass_in_stack_var_size_or_pad (mode, type);
4295 /* If defined, a C expression which determines whether, and in which
4296 direction, to pad out an argument with extra space. The value
4297 should be of type `enum direction': either `upward' to pad above
4298 the argument, `downward' to pad below, or `none' to inhibit
4301 For the AIX ABI structs are always stored left shifted in their
4305 function_arg_padding (enum machine_mode mode, tree type)
4307 #ifndef AGGREGATE_PADDING_FIXED
4308 #define AGGREGATE_PADDING_FIXED 0
4310 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4311 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4314 if (!AGGREGATE_PADDING_FIXED)
4316 /* GCC used to pass structures of the same size as integer types as
4317 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4318 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4319 passed padded downward, except that -mstrict-align further
4320 muddied the water in that multi-component structures of 2 and 4
4321 bytes in size were passed padded upward.
4323 The following arranges for best compatibility with previous
4324 versions of gcc, but removes the -mstrict-align dependency. */
4325 if (BYTES_BIG_ENDIAN)
4327 HOST_WIDE_INT size = 0;
4329 if (mode == BLKmode)
4331 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4332 size = int_size_in_bytes (type);
4335 size = GET_MODE_SIZE (mode);
4337 if (size == 1 || size == 2 || size == 4)
4343 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4345 if (type != 0 && AGGREGATE_TYPE_P (type))
4349 /* Fall back to the default. */
4350 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4353 /* If defined, a C expression that gives the alignment boundary, in bits,
4354 of an argument with the specified mode and type. If it is not defined,
4355 PARM_BOUNDARY is used for all arguments.
4357 V.4 wants long longs to be double word aligned.
4358 Doubleword align SPE vectors.
4359 Quadword align Altivec vectors.
4360 Quadword align large synthetic vector types. */
4363 function_arg_boundary (enum machine_mode mode, tree type)
4365 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4367 else if (SPE_VECTOR_MODE (mode)
4368 || (type && TREE_CODE (type) == VECTOR_TYPE
4369 && int_size_in_bytes (type) >= 8
4370 && int_size_in_bytes (type) < 16))
4372 else if (ALTIVEC_VECTOR_MODE (mode)
4373 || (type && TREE_CODE (type) == VECTOR_TYPE
4374 && int_size_in_bytes (type) >= 16))
4376 else if (rs6000_darwin64_abi && mode == BLKmode
4377 && type && TYPE_ALIGN (type) > 64)
4380 return PARM_BOUNDARY;
4383 /* For a function parm of MODE and TYPE, return the starting word in
4384 the parameter area. NWORDS of the parameter area are already used. */
4387 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4390 unsigned int parm_offset;
4392 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4393 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4394 return nwords + (-(parm_offset + nwords) & align);
4397 /* Compute the size (in words) of a function argument. */
4399 static unsigned long
4400 rs6000_arg_size (enum machine_mode mode, tree type)
4404 if (mode != BLKmode)
4405 size = GET_MODE_SIZE (mode);
4407 size = int_size_in_bytes (type);
4410 return (size + 3) >> 2;
4412 return (size + 7) >> 3;
4415 /* Use this to flush pending int fields. */
4418 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4419 HOST_WIDE_INT bitpos)
4421 unsigned int startbit, endbit;
4422 int intregs, intoffset;
4423 enum machine_mode mode;
4425 if (cum->intoffset == -1)
4428 intoffset = cum->intoffset;
4429 cum->intoffset = -1;
4431 if (intoffset % BITS_PER_WORD != 0)
4433 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4435 if (mode == BLKmode)
4437 /* We couldn't find an appropriate mode, which happens,
4438 e.g., in packed structs when there are 3 bytes to load.
4439 Back intoffset back to the beginning of the word in this
4441 intoffset = intoffset & -BITS_PER_WORD;
4445 startbit = intoffset & -BITS_PER_WORD;
4446 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4447 intregs = (endbit - startbit) / BITS_PER_WORD;
4448 cum->words += intregs;
4451 /* The darwin64 ABI calls for us to recurse down through structs,
4452 looking for elements passed in registers. Unfortunately, we have
4453 to track int register count here also because of misalignments
4454 in powerpc alignment mode. */
4457 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4459 HOST_WIDE_INT startbitpos)
4463 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4464 if (TREE_CODE (f) == FIELD_DECL)
4466 HOST_WIDE_INT bitpos = startbitpos;
4467 tree ftype = TREE_TYPE (f);
4468 enum machine_mode mode = TYPE_MODE (ftype);
4470 if (DECL_SIZE (f) != 0
4471 && host_integerp (bit_position (f), 1))
4472 bitpos += int_bit_position (f);
4474 /* ??? FIXME: else assume zero offset. */
4476 if (TREE_CODE (ftype) == RECORD_TYPE)
4477 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4478 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4480 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4481 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4482 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4484 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4486 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4490 else if (cum->intoffset == -1)
4491 cum->intoffset = bitpos;
4495 /* Update the data in CUM to advance over an argument
4496 of mode MODE and data type TYPE.
4497 (TYPE is null for libcalls where that information may not be available.)
4499 Note that for args passed by reference, function_arg will be called
4500 with MODE and TYPE set to that of the pointer to the arg, not the arg
4504 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4505 tree type, int named, int depth)
4509 /* Only tick off an argument if we're not recursing. */
4511 cum->nargs_prototype--;
4513 if (TARGET_ALTIVEC_ABI
4514 && (ALTIVEC_VECTOR_MODE (mode)
4515 || (type && TREE_CODE (type) == VECTOR_TYPE
4516 && int_size_in_bytes (type) == 16)))
4520 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4523 if (!TARGET_ALTIVEC)
4524 error ("cannot pass argument in vector register because"
4525 " altivec instructions are disabled, use -maltivec"
4528 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4529 even if it is going to be passed in a vector register.
4530 Darwin does the same for variable-argument functions. */
4531 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4532 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4542 /* Vector parameters must be 16-byte aligned. This places
4543 them at 2 mod 4 in terms of words in 32-bit mode, since
4544 the parameter save area starts at offset 24 from the
4545 stack. In 64-bit mode, they just have to start on an
4546 even word, since the parameter save area is 16-byte
4547 aligned. Space for GPRs is reserved even if the argument
4548 will be passed in memory. */
4550 align = (2 - cum->words) & 3;
4552 align = cum->words & 1;
4553 cum->words += align + rs6000_arg_size (mode, type);
4555 if (TARGET_DEBUG_ARG)
4557 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4559 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4560 cum->nargs_prototype, cum->prototype,
4561 GET_MODE_NAME (mode));
4565 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4567 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4570 else if (rs6000_darwin64_abi
4572 && TREE_CODE (type) == RECORD_TYPE
4573 && (size = int_size_in_bytes (type)) > 0)
4575 /* Variable sized types have size == -1 and are
4576 treated as if consisting entirely of ints.
4577 Pad to 16 byte boundary if needed. */
4578 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4579 && (cum->words % 2) != 0)
4581 /* For varargs, we can just go up by the size of the struct. */
4583 cum->words += (size + 7) / 8;
4586 /* It is tempting to say int register count just goes up by
4587 sizeof(type)/8, but this is wrong in a case such as
4588 { int; double; int; } [powerpc alignment]. We have to
4589 grovel through the fields for these too. */
4591 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4592 rs6000_darwin64_record_arg_advance_flush (cum,
4593 size * BITS_PER_UNIT);
4596 else if (DEFAULT_ABI == ABI_V4)
4598 if (TARGET_HARD_FLOAT && TARGET_FPRS
4599 && (mode == SFmode || mode == DFmode))
4601 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4606 cum->words += cum->words & 1;
4607 cum->words += rs6000_arg_size (mode, type);
4612 int n_words = rs6000_arg_size (mode, type);
4613 int gregno = cum->sysv_gregno;
4615 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4616 (r7,r8) or (r9,r10). As does any other 2 word item such
4617 as complex int due to a historical mistake. */
4619 gregno += (1 - gregno) & 1;
4621 /* Multi-reg args are not split between registers and stack. */
4622 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4624 /* Long long and SPE vectors are aligned on the stack.
4625 So are other 2 word items such as complex int due to
4626 a historical mistake. */
4628 cum->words += cum->words & 1;
4629 cum->words += n_words;
4632 /* Note: continuing to accumulate gregno past when we've started
4633 spilling to the stack indicates the fact that we've started
4634 spilling to the stack to expand_builtin_saveregs. */
4635 cum->sysv_gregno = gregno + n_words;
4638 if (TARGET_DEBUG_ARG)
4640 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4641 cum->words, cum->fregno);
4642 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4643 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4644 fprintf (stderr, "mode = %4s, named = %d\n",
4645 GET_MODE_NAME (mode), named);
4650 int n_words = rs6000_arg_size (mode, type);
4651 int start_words = cum->words;
4652 int align_words = rs6000_parm_start (mode, type, start_words);
4654 cum->words = align_words + n_words;
4656 if (SCALAR_FLOAT_MODE_P (mode)
4657 && TARGET_HARD_FLOAT && TARGET_FPRS)
4658 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4660 if (TARGET_DEBUG_ARG)
4662 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4663 cum->words, cum->fregno);
4664 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4665 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4666 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4667 named, align_words - start_words, depth);
4673 spe_build_register_parallel (enum machine_mode mode, int gregno)
4680 r1 = gen_rtx_REG (DImode, gregno);
4681 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4682 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4685 r1 = gen_rtx_REG (DImode, gregno);
4686 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4687 r3 = gen_rtx_REG (DImode, gregno + 2);
4688 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4689 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4696 /* Determine where to put a SIMD argument on the SPE. */
4698 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4701 int gregno = cum->sysv_gregno;
4703 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4704 are passed and returned in a pair of GPRs for ABI compatibility. */
4705 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4707 int n_words = rs6000_arg_size (mode, type);
4709 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4711 gregno += (1 - gregno) & 1;
4713 /* Multi-reg args are not split between registers and stack. */
4714 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4717 return spe_build_register_parallel (mode, gregno);
4721 int n_words = rs6000_arg_size (mode, type);
4723 /* SPE vectors are put in odd registers. */
4724 if (n_words == 2 && (gregno & 1) == 0)
4727 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4730 enum machine_mode m = SImode;
4732 r1 = gen_rtx_REG (m, gregno);
4733 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4734 r2 = gen_rtx_REG (m, gregno + 1);
4735 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4736 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4743 if (gregno <= GP_ARG_MAX_REG)
4744 return gen_rtx_REG (mode, gregno);
4750 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4751 structure between cum->intoffset and bitpos to integer registers. */
4754 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4755 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4757 enum machine_mode mode;
4759 unsigned int startbit, endbit;
4760 int this_regno, intregs, intoffset;
4763 if (cum->intoffset == -1)
4766 intoffset = cum->intoffset;
4767 cum->intoffset = -1;
4769 /* If this is the trailing part of a word, try to only load that
4770 much into the register. Otherwise load the whole register. Note
4771 that in the latter case we may pick up unwanted bits. It's not a
4772 problem at the moment but may wish to revisit. */
4774 if (intoffset % BITS_PER_WORD != 0)
4776 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4778 if (mode == BLKmode)
4780 /* We couldn't find an appropriate mode, which happens,
4781 e.g., in packed structs when there are 3 bytes to load.
4782 Back intoffset back to the beginning of the word in this
4784 intoffset = intoffset & -BITS_PER_WORD;
4791 startbit = intoffset & -BITS_PER_WORD;
4792 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4793 intregs = (endbit - startbit) / BITS_PER_WORD;
4794 this_regno = cum->words + intoffset / BITS_PER_WORD;
4796 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4799 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4803 intoffset /= BITS_PER_UNIT;
4806 regno = GP_ARG_MIN_REG + this_regno;
4807 reg = gen_rtx_REG (mode, regno);
4809 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4812 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4816 while (intregs > 0);
4819 /* Recursive workhorse for the following. */
4822 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
4823 HOST_WIDE_INT startbitpos, rtx rvec[],
4828 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4829 if (TREE_CODE (f) == FIELD_DECL)
4831 HOST_WIDE_INT bitpos = startbitpos;
4832 tree ftype = TREE_TYPE (f);
4833 enum machine_mode mode = TYPE_MODE (ftype);
4835 if (DECL_SIZE (f) != 0
4836 && host_integerp (bit_position (f), 1))
4837 bitpos += int_bit_position (f);
4839 /* ??? FIXME: else assume zero offset. */
4841 if (TREE_CODE (ftype) == RECORD_TYPE)
4842 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4843 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
4848 case SCmode: mode = SFmode; break;
4849 case DCmode: mode = DFmode; break;
4850 case TCmode: mode = TFmode; break;
4854 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4856 = gen_rtx_EXPR_LIST (VOIDmode,
4857 gen_rtx_REG (mode, cum->fregno++),
4858 GEN_INT (bitpos / BITS_PER_UNIT));
4862 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4864 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4866 = gen_rtx_EXPR_LIST (VOIDmode,
4867 gen_rtx_REG (mode, cum->vregno++),
4868 GEN_INT (bitpos / BITS_PER_UNIT));
4870 else if (cum->intoffset == -1)
4871 cum->intoffset = bitpos;
4875 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4876 the register(s) to be used for each field and subfield of a struct
4877 being passed by value, along with the offset of where the
4878 register's value may be found in the block. FP fields go in FP
4879 register, vector fields go in vector registers, and everything
4880 else goes in int registers, packed as in memory.
4882 This code is also used for function return values. RETVAL indicates
4883 whether this is the case.
4885 Much of this is taken from the SPARC V9 port, which has a similar
4886 calling convention. */
4889 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4890 int named, bool retval)
4892 rtx rvec[FIRST_PSEUDO_REGISTER];
4893 int k = 1, kbase = 1;
4894 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4895 /* This is a copy; modifications are not visible to our caller. */
4896 CUMULATIVE_ARGS copy_cum = *orig_cum;
4897 CUMULATIVE_ARGS *cum = ©_cum;
4899 /* Pad to 16 byte boundary if needed. */
4900 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4901 && (cum->words % 2) != 0)
4908 /* Put entries into rvec[] for individual FP and vector fields, and
4909 for the chunks of memory that go in int regs. Note we start at
4910 element 1; 0 is reserved for an indication of using memory, and
4911 may or may not be filled in below. */
4912 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4913 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4915 /* If any part of the struct went on the stack put all of it there.
4916 This hack is because the generic code for
4917 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4918 parts of the struct are not at the beginning. */
4922 return NULL_RTX; /* doesn't go in registers at all */
4924 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4926 if (k > 1 || cum->use_stack)
4927 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
4932 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4935 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4939 rtx rvec[GP_ARG_NUM_REG + 1];
4941 if (align_words >= GP_ARG_NUM_REG)
4944 n_units = rs6000_arg_size (mode, type);
4946 /* Optimize the simple case where the arg fits in one gpr, except in
4947 the case of BLKmode due to assign_parms assuming that registers are
4948 BITS_PER_WORD wide. */
4950 || (n_units == 1 && mode != BLKmode))
4951 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4954 if (align_words + n_units > GP_ARG_NUM_REG)
4955 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4956 using a magic NULL_RTX component.
4957 FIXME: This is not strictly correct. Only some of the arg
4958 belongs in memory, not all of it. However, there isn't any way
4959 to do this currently, apart from building rtx descriptions for
4960 the pieces of memory we want stored. Due to bugs in the generic
4961 code we can't use the normal function_arg_partial_nregs scheme
4962 with the PARALLEL arg description we emit here.
4963 In any case, the code to store the whole arg to memory is often
4964 more efficient than code to store pieces, and we know that space
4965 is available in the right place for the whole arg. */
4966 /* FIXME: This should be fixed since the conversion to
4967 TARGET_ARG_PARTIAL_BYTES. */
4968 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4973 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4974 rtx off = GEN_INT (i++ * 4);
4975 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4977 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4979 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4982 /* Determine where to put an argument to a function.
4983 Value is zero to push the argument on the stack,
4984 or a hard register in which to store the argument.
4986 MODE is the argument's machine mode.
4987 TYPE is the data type of the argument (as a tree).
4988 This is null for libcalls where that information may
4990 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4991 the preceding args and about the function being called. It is
4992 not modified in this routine.
4993 NAMED is nonzero if this argument is a named parameter
4994 (otherwise it is an extra parameter matching an ellipsis).
4996 On RS/6000 the first eight words of non-FP are normally in registers
4997 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4998 Under V.4, the first 8 FP args are in registers.
5000 If this is floating-point and no prototype is specified, we use
5001 both an FP and integer register (or possibly FP reg and stack). Library
5002 functions (when CALL_LIBCALL is set) always have the proper types for args,
5003 so we can pass the FP value just in one register. emit_library_function
5004 doesn't support PARALLEL anyway.
5006 Note that for args passed by reference, function_arg will be called
5007 with MODE and TYPE set to that of the pointer to the arg, not the arg
5011 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5012 tree type, int named)
5014 enum rs6000_abi abi = DEFAULT_ABI;
5016 /* Return a marker to indicate whether CR1 needs to set or clear the
5017 bit that V.4 uses to say fp args were passed in registers.
5018 Assume that we don't need the marker for software floating point,
5019 or compiler generated library calls. */
5020 if (mode == VOIDmode)
5023 && (cum->call_cookie & CALL_LIBCALL) == 0
5025 || (cum->nargs_prototype < 0
5026 && (cum->prototype || TARGET_NO_PROTOTYPE))))
5028 /* For the SPE, we need to crxor CR6 always. */
5030 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5031 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5032 return GEN_INT (cum->call_cookie
5033 | ((cum->fregno == FP_ARG_MIN_REG)
5034 ? CALL_V4_SET_FP_ARGS
5035 : CALL_V4_CLEAR_FP_ARGS));
5038 return GEN_INT (cum->call_cookie);
5041 if (rs6000_darwin64_abi && mode == BLKmode
5042 && TREE_CODE (type) == RECORD_TYPE)
5044 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5045 if (rslt != NULL_RTX)
5047 /* Else fall through to usual handling. */
5050 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5051 if (TARGET_64BIT && ! cum->prototype)
5053 /* Vector parameters get passed in vector register
5054 and also in GPRs or memory, in absence of prototype. */
5057 align_words = (cum->words + 1) & ~1;
5059 if (align_words >= GP_ARG_NUM_REG)
5065 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5067 return gen_rtx_PARALLEL (mode,
5069 gen_rtx_EXPR_LIST (VOIDmode,
5071 gen_rtx_EXPR_LIST (VOIDmode,
5072 gen_rtx_REG (mode, cum->vregno),
5076 return gen_rtx_REG (mode, cum->vregno);
5077 else if (TARGET_ALTIVEC_ABI
5078 && (ALTIVEC_VECTOR_MODE (mode)
5079 || (type && TREE_CODE (type) == VECTOR_TYPE
5080 && int_size_in_bytes (type) == 16)))
5082 if (named || abi == ABI_V4)
5086 /* Vector parameters to varargs functions under AIX or Darwin
5087 get passed in memory and possibly also in GPRs. */
5088 int align, align_words, n_words;
5089 enum machine_mode part_mode;
5091 /* Vector parameters must be 16-byte aligned. This places them at
5092 2 mod 4 in terms of words in 32-bit mode, since the parameter
5093 save area starts at offset 24 from the stack. In 64-bit mode,
5094 they just have to start on an even word, since the parameter
5095 save area is 16-byte aligned. */
5097 align = (2 - cum->words) & 3;
5099 align = cum->words & 1;
5100 align_words = cum->words + align;
5102 /* Out of registers? Memory, then. */
5103 if (align_words >= GP_ARG_NUM_REG)
5106 if (TARGET_32BIT && TARGET_POWERPC64)
5107 return rs6000_mixed_function_arg (mode, type, align_words);
5109 /* The vector value goes in GPRs. Only the part of the
5110 value in GPRs is reported here. */
5112 n_words = rs6000_arg_size (mode, type);
5113 if (align_words + n_words > GP_ARG_NUM_REG)
5114 /* Fortunately, there are only two possibilities, the value
5115 is either wholly in GPRs or half in GPRs and half not. */
5118 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5121 else if (TARGET_SPE_ABI && TARGET_SPE
5122 && (SPE_VECTOR_MODE (mode)
5123 || (TARGET_E500_DOUBLE && (mode == DFmode
5124 || mode == DCmode))))
5125 return rs6000_spe_function_arg (cum, mode, type);
5127 else if (abi == ABI_V4)
5129 if (TARGET_HARD_FLOAT && TARGET_FPRS
5130 && (mode == SFmode || mode == DFmode))
5132 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5133 return gen_rtx_REG (mode, cum->fregno);
5139 int n_words = rs6000_arg_size (mode, type);
5140 int gregno = cum->sysv_gregno;
5142 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5143 (r7,r8) or (r9,r10). As does any other 2 word item such
5144 as complex int due to a historical mistake. */
5146 gregno += (1 - gregno) & 1;
5148 /* Multi-reg args are not split between registers and stack. */
5149 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5152 if (TARGET_32BIT && TARGET_POWERPC64)
5153 return rs6000_mixed_function_arg (mode, type,
5154 gregno - GP_ARG_MIN_REG);
5155 return gen_rtx_REG (mode, gregno);
5160 int align_words = rs6000_parm_start (mode, type, cum->words);
5162 if (USE_FP_FOR_ARG_P (cum, mode, type))
5164 rtx rvec[GP_ARG_NUM_REG + 1];
5168 enum machine_mode fmode = mode;
5169 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5171 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5173 /* Currently, we only ever need one reg here because complex
5174 doubles are split. */
5175 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5177 /* Long double split over regs and memory. */
5181 /* Do we also need to pass this arg in the parameter save
5184 && (cum->nargs_prototype <= 0
5185 || (DEFAULT_ABI == ABI_AIX
5187 && align_words >= GP_ARG_NUM_REG)));
5189 if (!needs_psave && mode == fmode)
5190 return gen_rtx_REG (fmode, cum->fregno);
5195 /* Describe the part that goes in gprs or the stack.
5196 This piece must come first, before the fprs. */
5197 if (align_words < GP_ARG_NUM_REG)
5199 unsigned long n_words = rs6000_arg_size (mode, type);
5201 if (align_words + n_words > GP_ARG_NUM_REG
5202 || (TARGET_32BIT && TARGET_POWERPC64))
5204 /* If this is partially on the stack, then we only
5205 include the portion actually in registers here. */
5206 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5209 if (align_words + n_words > GP_ARG_NUM_REG
5210 && (TARGET_32BIT && TARGET_POWERPC64))
5211 /* Not all of the arg fits in gprs. Say that it
5212 goes in memory too, using a magic NULL_RTX
5213 component. Also see comment in
5214 rs6000_mixed_function_arg for why the normal
5215 function_arg_partial_nregs scheme doesn't work
5217 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5221 r = gen_rtx_REG (rmode,
5222 GP_ARG_MIN_REG + align_words);
5223 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5224 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5226 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5230 /* The whole arg fits in gprs. */
5231 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5232 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5236 /* It's entirely in memory. */
5237 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5240 /* Describe where this piece goes in the fprs. */
5241 r = gen_rtx_REG (fmode, cum->fregno);
5242 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5244 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5246 else if (align_words < GP_ARG_NUM_REG)
5248 if (TARGET_32BIT && TARGET_POWERPC64)
5249 return rs6000_mixed_function_arg (mode, type, align_words);
5251 if (mode == BLKmode)
5254 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5261 /* For an arg passed partly in registers and partly in memory, this is
5262 the number of bytes passed in registers. For args passed entirely in
5263 registers or entirely in memory, zero. When an arg is described by a
5264 PARALLEL, perhaps using more than one register type, this function
5265 returns the number of bytes used by the first element of the PARALLEL. */
5268 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5269 tree type, bool named)
5274 if (DEFAULT_ABI == ABI_V4)
5277 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5278 && cum->nargs_prototype >= 0)
5281 /* In this complicated case we just disable the partial_nregs code. */
5282 if (rs6000_darwin64_abi && mode == BLKmode
5283 && TREE_CODE (type) == RECORD_TYPE
5284 && int_size_in_bytes (type) > 0)
5287 align_words = rs6000_parm_start (mode, type, cum->words);
5289 if (USE_FP_FOR_ARG_P (cum, mode, type)
5290 /* If we are passing this arg in the fixed parameter save area
5291 (gprs or memory) as well as fprs, then this function should
5292 return the number of bytes passed in the parameter save area
5293 rather than bytes passed in fprs. */
5295 && (cum->nargs_prototype <= 0
5296 || (DEFAULT_ABI == ABI_AIX
5298 && align_words >= GP_ARG_NUM_REG))))
5300 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5301 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5302 else if (cum->nargs_prototype >= 0)
5306 if (align_words < GP_ARG_NUM_REG
5307 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5308 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5310 if (ret != 0 && TARGET_DEBUG_ARG)
5311 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5316 /* A C expression that indicates when an argument must be passed by
5317 reference. If nonzero for an argument, a copy of that argument is
5318 made in memory and a pointer to the argument is passed instead of
5319 the argument itself. The pointer is passed in whatever way is
5320 appropriate for passing a pointer to that type.
5322 Under V.4, aggregates and long double are passed by reference.
5324 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5325 reference unless the AltiVec vector extension ABI is in force.
5327 As an extension to all ABIs, variable sized types are passed by
5331 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5332 enum machine_mode mode, tree type,
5333 bool named ATTRIBUTE_UNUSED)
5335 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
5337 if (TARGET_DEBUG_ARG)
5338 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5345 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5347 if (TARGET_DEBUG_ARG)
5348 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5352 if (int_size_in_bytes (type) < 0)
5354 if (TARGET_DEBUG_ARG)
5355 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5359 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5360 modes only exist for GCC vector types if -maltivec. */
5361 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5363 if (TARGET_DEBUG_ARG)
5364 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5368 /* Pass synthetic vectors in memory. */
5369 if (TREE_CODE (type) == VECTOR_TYPE
5370 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5372 static bool warned_for_pass_big_vectors = false;
5373 if (TARGET_DEBUG_ARG)
5374 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5375 if (!warned_for_pass_big_vectors)
5377 warning (0, "GCC vector passed by reference: "
5378 "non-standard ABI extension with no compatibility guarantee");
5379 warned_for_pass_big_vectors = true;
5388 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5391 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5396 for (i = 0; i < nregs; i++)
5398 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5399 if (reload_completed)
5401 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5404 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5405 i * GET_MODE_SIZE (reg_mode));
5408 tem = replace_equiv_address (tem, XEXP (tem, 0));
5412 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5416 /* Perform any needed actions needed for a function that is receiving a
5417 variable number of arguments.
5421 MODE and TYPE are the mode and type of the current parameter.
5423 PRETEND_SIZE is a variable that should be set to the amount of stack
5424 that must be pushed by the prolog to pretend that our caller pushed
5427 Normally, this macro will push all remaining incoming registers on the
5428 stack and set PRETEND_SIZE to the length of the registers pushed. */
5431 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5432 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5435 CUMULATIVE_ARGS next_cum;
5436 int reg_size = TARGET_32BIT ? 4 : 8;
5437 rtx save_area = NULL_RTX, mem;
5438 int first_reg_offset, set;
5440 /* Skip the last named argument. */
5442 function_arg_advance (&next_cum, mode, type, 1, 0);
5444 if (DEFAULT_ABI == ABI_V4)
5446 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5450 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5451 HOST_WIDE_INT offset = 0;
5453 /* Try to optimize the size of the varargs save area.
5454 The ABI requires that ap.reg_save_area is doubleword
5455 aligned, but we don't need to allocate space for all
5456 the bytes, only those to which we actually will save
5458 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5459 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5460 if (TARGET_HARD_FLOAT && TARGET_FPRS
5461 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5462 && cfun->va_list_fpr_size)
5465 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5466 * UNITS_PER_FP_WORD;
5467 if (cfun->va_list_fpr_size
5468 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5469 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5471 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5472 * UNITS_PER_FP_WORD;
5476 offset = -((first_reg_offset * reg_size) & ~7);
5477 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5479 gpr_reg_num = cfun->va_list_gpr_size;
5480 if (reg_size == 4 && (first_reg_offset & 1))
5483 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5486 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5488 - (int) (GP_ARG_NUM_REG * reg_size);
5490 if (gpr_size + fpr_size)
5493 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5494 gcc_assert (GET_CODE (reg_save_area) == MEM);
5495 reg_save_area = XEXP (reg_save_area, 0);
5496 if (GET_CODE (reg_save_area) == PLUS)
5498 gcc_assert (XEXP (reg_save_area, 0)
5499 == virtual_stack_vars_rtx);
5500 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5501 offset += INTVAL (XEXP (reg_save_area, 1));
5504 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5507 cfun->machine->varargs_save_offset = offset;
5508 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5513 first_reg_offset = next_cum.words;
5514 save_area = virtual_incoming_args_rtx;
5516 if (targetm.calls.must_pass_in_stack (mode, type))
5517 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5520 set = get_varargs_alias_set ();
5521 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5522 && cfun->va_list_gpr_size)
5524 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5526 if (va_list_gpr_counter_field)
5528 /* V4 va_list_gpr_size counts number of registers needed. */
5529 if (nregs > cfun->va_list_gpr_size)
5530 nregs = cfun->va_list_gpr_size;
5534 /* char * va_list instead counts number of bytes needed. */
5535 if (nregs > cfun->va_list_gpr_size / reg_size)
5536 nregs = cfun->va_list_gpr_size / reg_size;
5539 mem = gen_rtx_MEM (BLKmode,
5540 plus_constant (save_area,
5541 first_reg_offset * reg_size));
5542 MEM_NOTRAP_P (mem) = 1;
5543 set_mem_alias_set (mem, set);
5544 set_mem_align (mem, BITS_PER_WORD);
5546 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5550 /* Save FP registers if needed. */
5551 if (DEFAULT_ABI == ABI_V4
5552 && TARGET_HARD_FLOAT && TARGET_FPRS
5554 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5555 && cfun->va_list_fpr_size)
5557 int fregno = next_cum.fregno, nregs;
5558 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5559 rtx lab = gen_label_rtx ();
5560 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5561 * UNITS_PER_FP_WORD);
5564 (gen_rtx_SET (VOIDmode,
5566 gen_rtx_IF_THEN_ELSE (VOIDmode,
5567 gen_rtx_NE (VOIDmode, cr1,
5569 gen_rtx_LABEL_REF (VOIDmode, lab),
5573 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5574 fregno++, off += UNITS_PER_FP_WORD, nregs++)
5576 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5577 MEM_NOTRAP_P (mem) = 1;
5578 set_mem_alias_set (mem, set);
5579 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5580 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5587 /* Create the va_list data type. */
5590 rs6000_build_builtin_va_list (void)
5592 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5594 /* For AIX, prefer 'char *' because that's what the system
5595 header files like. */
5596 if (DEFAULT_ABI != ABI_V4)
5597 return build_pointer_type (char_type_node);
5599 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5600 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5602 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5603 unsigned_char_type_node);
5604 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5605 unsigned_char_type_node);
5606 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5608 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5609 short_unsigned_type_node);
5610 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5612 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5615 va_list_gpr_counter_field = f_gpr;
5616 va_list_fpr_counter_field = f_fpr;
5618 DECL_FIELD_CONTEXT (f_gpr) = record;
5619 DECL_FIELD_CONTEXT (f_fpr) = record;
5620 DECL_FIELD_CONTEXT (f_res) = record;
5621 DECL_FIELD_CONTEXT (f_ovf) = record;
5622 DECL_FIELD_CONTEXT (f_sav) = record;
5624 TREE_CHAIN (record) = type_decl;
5625 TYPE_NAME (record) = type_decl;
5626 TYPE_FIELDS (record) = f_gpr;
5627 TREE_CHAIN (f_gpr) = f_fpr;
5628 TREE_CHAIN (f_fpr) = f_res;
5629 TREE_CHAIN (f_res) = f_ovf;
5630 TREE_CHAIN (f_ovf) = f_sav;
5632 layout_type (record);
5634 /* The correct type is an array type of one element. */
5635 return build_array_type (record, build_index_type (size_zero_node));
5638 /* Implement va_start. */
5641 rs6000_va_start (tree valist, rtx nextarg)
5643 HOST_WIDE_INT words, n_gpr, n_fpr;
5644 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5645 tree gpr, fpr, ovf, sav, t;
5647 /* Only SVR4 needs something special. */
5648 if (DEFAULT_ABI != ABI_V4)
5650 std_expand_builtin_va_start (valist, nextarg);
5654 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5655 f_fpr = TREE_CHAIN (f_gpr);
5656 f_res = TREE_CHAIN (f_fpr);
5657 f_ovf = TREE_CHAIN (f_res);
5658 f_sav = TREE_CHAIN (f_ovf);
5660 valist = build_va_arg_indirect_ref (valist);
5661 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5662 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5663 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5664 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5666 /* Count number of gp and fp argument registers used. */
5667 words = current_function_args_info.words;
5668 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5670 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5673 if (TARGET_DEBUG_ARG)
5674 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5675 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5676 words, n_gpr, n_fpr);
5678 if (cfun->va_list_gpr_size)
5680 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5681 build_int_cst (NULL_TREE, n_gpr));
5682 TREE_SIDE_EFFECTS (t) = 1;
5683 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5686 if (cfun->va_list_fpr_size)
5688 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5689 build_int_cst (NULL_TREE, n_fpr));
5690 TREE_SIDE_EFFECTS (t) = 1;
5691 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5694 /* Find the overflow area. */
5695 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5697 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5698 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5699 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5700 TREE_SIDE_EFFECTS (t) = 1;
5701 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5703 /* If there were no va_arg invocations, don't set up the register
5705 if (!cfun->va_list_gpr_size
5706 && !cfun->va_list_fpr_size
5707 && n_gpr < GP_ARG_NUM_REG
5708 && n_fpr < FP_ARG_V4_MAX_REG)
5711 /* Find the register save area. */
5712 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5713 if (cfun->machine->varargs_save_offset)
5714 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5715 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5716 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5717 TREE_SIDE_EFFECTS (t) = 1;
5718 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5721 /* Implement va_arg. */
5724 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5726 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5727 tree gpr, fpr, ovf, sav, reg, t, u;
5728 int size, rsize, n_reg, sav_ofs, sav_scale;
5729 tree lab_false, lab_over, addr;
5731 tree ptrtype = build_pointer_type (type);
5733 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5735 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5736 return build_va_arg_indirect_ref (t);
5739 if (DEFAULT_ABI != ABI_V4)
5741 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5743 tree elem_type = TREE_TYPE (type);
5744 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5745 int elem_size = GET_MODE_SIZE (elem_mode);
5747 if (elem_size < UNITS_PER_WORD)
5749 tree real_part, imag_part;
5750 tree post = NULL_TREE;
5752 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5754 /* Copy the value into a temporary, lest the formal temporary
5755 be reused out from under us. */
5756 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5757 append_to_statement_list (post, pre_p);
5759 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5762 return build (COMPLEX_EXPR, type, real_part, imag_part);
5766 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5769 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5770 f_fpr = TREE_CHAIN (f_gpr);
5771 f_res = TREE_CHAIN (f_fpr);
5772 f_ovf = TREE_CHAIN (f_res);
5773 f_sav = TREE_CHAIN (f_ovf);
5775 valist = build_va_arg_indirect_ref (valist);
5776 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5777 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5778 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5779 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5781 size = int_size_in_bytes (type);
5782 rsize = (size + 3) / 4;
5785 if (TARGET_HARD_FLOAT && TARGET_FPRS
5786 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5788 /* FP args go in FP registers, if present. */
5793 if (TYPE_MODE (type) == DFmode)
5798 /* Otherwise into GP registers. */
5807 /* Pull the value out of the saved registers.... */
5810 addr = create_tmp_var (ptr_type_node, "addr");
5811 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5813 /* AltiVec vectors never go in registers when -mabi=altivec. */
5814 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5818 lab_false = create_artificial_label ();
5819 lab_over = create_artificial_label ();
5821 /* Long long and SPE vectors are aligned in the registers.
5822 As are any other 2 gpr item such as complex int due to a
5823 historical mistake. */
5827 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5828 size_int (n_reg - 1));
5829 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5832 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5833 t = build2 (GE_EXPR, boolean_type_node, u, t);
5834 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5835 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5836 gimplify_and_add (t, pre_p);
5840 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5842 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5843 u = build1 (CONVERT_EXPR, integer_type_node, u);
5844 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5845 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5847 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5848 gimplify_and_add (t, pre_p);
5850 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5851 gimplify_and_add (t, pre_p);
5853 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5854 append_to_statement_list (t, pre_p);
5858 /* Ensure that we don't find any more args in regs.
5859 Alignment has taken care of the n_reg == 2 case. */
5860 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5861 gimplify_and_add (t, pre_p);
5865 /* ... otherwise out of the overflow area. */
5867 /* Care for on-stack alignment if needed. */
5871 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5872 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5873 build_int_cst (NULL_TREE, -align));
5875 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5877 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5878 gimplify_and_add (u, pre_p);
5880 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5881 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5882 gimplify_and_add (t, pre_p);
5886 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5887 append_to_statement_list (t, pre_p);
5890 addr = fold_convert (ptrtype, addr);
5891 return build_va_arg_indirect_ref (addr);
5897 def_builtin (int mask, const char *name, tree type, int code)
5899 if (mask & target_flags)
5901 if (rs6000_builtin_decls[code])
5904 rs6000_builtin_decls[code] =
5905 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5910 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5912 static const struct builtin_description bdesc_3arg[] =
5914 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5915 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5916 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5918 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5919 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5920 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5921 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5922 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5923 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5924 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5925 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5926 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5927 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5928 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5929 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5930 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5931 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5932 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5933 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5934 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5935 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5936 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5938 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5939 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5940 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5941 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5942 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5943 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5944 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5945 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5946 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5947 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5948 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5949 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5950 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5951 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5952 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
5955 /* DST operations: void foo (void *, const int, const char). */
5957 static const struct builtin_description bdesc_dst[] =
5959 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5960 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5961 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5962 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5964 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5965 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5966 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5967 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
5970 /* Simple binary operations: VECc = foo (VECa, VECb). */
5972 static struct builtin_description bdesc_2arg[] =
5974 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5975 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5976 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5977 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5982 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5983 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5984 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5985 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5986 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5988 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5989 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5990 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5991 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5992 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5993 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5994 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5995 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5996 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5997 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6005 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6007 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
6008 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6009 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
6010 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6011 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
6012 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6013 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6014 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6015 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6016 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
6017 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6018 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6019 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6020 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6021 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6022 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
6023 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6024 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6025 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6026 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6027 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6028 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6029 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
6030 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6031 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6032 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6033 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6034 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6035 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6036 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6037 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
6039 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
6040 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6041 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6042 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6043 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6049 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6051 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6052 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6053 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6054 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6055 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6056 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6057 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6058 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6059 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6060 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6061 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6062 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6063 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6064 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6065 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6066 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6067 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6068 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6069 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6070 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6071 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6072 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6073 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6074 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6075 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6076 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6077 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6078 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6079 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6080 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6081 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6082 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6083 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6084 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6214 /* Place holder, leave as first spe builtin. */
6215 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6216 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6217 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6218 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6219 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6220 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6221 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6222 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6223 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6224 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6225 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6226 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6227 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6228 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6229 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6230 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6231 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6232 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6233 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6234 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6235 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6236 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6237 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6238 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6239 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6240 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6241 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6242 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6243 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6244 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6245 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6246 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6247 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6248 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6249 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6250 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6251 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6252 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6253 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6254 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6255 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6256 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6257 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6258 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6259 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6260 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6261 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6262 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6263 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6264 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6265 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6266 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6267 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6268 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6269 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6270 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6271 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6272 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6273 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6274 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6275 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6276 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6277 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6278 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6279 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6280 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6281 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6282 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6283 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6284 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6285 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6286 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6287 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6288 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6289 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6290 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6291 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6292 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6293 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6294 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6295 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6296 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6297 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6298 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6299 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6300 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6301 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6302 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6303 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6304 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6305 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6306 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6307 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6308 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6309 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6310 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6311 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6312 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6313 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6314 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6315 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6316 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6317 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6318 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6319 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6320 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6321 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6322 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6323 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6325 /* SPE binary operations expecting a 5-bit unsigned literal. */
6326 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6328 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6329 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6330 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6331 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6332 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6333 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6334 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6335 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6336 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6337 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6338 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6339 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6340 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6341 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6342 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6343 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6344 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6345 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6346 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6347 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6348 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6349 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6350 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6351 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6352 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6353 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6355 /* Place-holder. Leave as last binary SPE builtin. */
6356 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6359 /* AltiVec predicates. */
6361 struct builtin_description_predicates
6363 const unsigned int mask;
6364 const enum insn_code icode;
6366 const char *const name;
6367 const enum rs6000_builtins code;
6370 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6372 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6373 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6374 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6375 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6376 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6377 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6378 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6379 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6380 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6381 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6382 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6383 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6384 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6386 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6387 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6388 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6391 /* SPE predicates. */
6392 static struct builtin_description bdesc_spe_predicates[] =
6394 /* Place-holder. Leave as first. */
6395 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6396 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6397 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6398 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6399 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6400 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6401 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6402 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6403 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6404 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6405 /* Place-holder. Leave as last. */
6406 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6409 /* SPE evsel predicates. */
6410 static struct builtin_description bdesc_spe_evsel[] =
6412 /* Place-holder. Leave as first. */
6413 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6414 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6415 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6416 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6417 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6418 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6419 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6420 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6421 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6422 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6423 /* Place-holder. Leave as last. */
6424 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6427 /* ABS* operations. */
6429 static const struct builtin_description bdesc_abs[] =
6431 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6432 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6433 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6434 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6435 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6436 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6437 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6440 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6443 static struct builtin_description bdesc_1arg[] =
6445 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6446 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6447 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6448 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6449 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6450 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6451 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6452 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6453 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6454 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6455 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6456 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6457 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6458 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6459 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6460 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6461 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6483 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6484 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6485 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6486 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6487 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6488 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6489 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6490 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6491 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6492 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6493 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6494 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6495 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6496 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6497 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6498 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6499 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6500 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6501 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6502 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6503 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6504 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6505 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6506 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6507 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6508 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6509 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6510 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6511 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6512 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6514 /* Place-holder. Leave as last unary SPE builtin. */
6515 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6519 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6522 tree arg0 = TREE_VALUE (arglist);
6523 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6524 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6525 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6527 if (icode == CODE_FOR_nothing)
6528 /* Builtin not supported on this processor. */
6531 /* If we got invalid arguments bail out before generating bad rtl. */
6532 if (arg0 == error_mark_node)
6535 if (icode == CODE_FOR_altivec_vspltisb
6536 || icode == CODE_FOR_altivec_vspltish
6537 || icode == CODE_FOR_altivec_vspltisw
6538 || icode == CODE_FOR_spe_evsplatfi
6539 || icode == CODE_FOR_spe_evsplati)
6541 /* Only allow 5-bit *signed* literals. */
6542 if (GET_CODE (op0) != CONST_INT
6543 || INTVAL (op0) > 15
6544 || INTVAL (op0) < -16)
6546 error ("argument 1 must be a 5-bit signed literal");
6552 || GET_MODE (target) != tmode
6553 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6554 target = gen_reg_rtx (tmode);
6556 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6557 op0 = copy_to_mode_reg (mode0, op0);
6559 pat = GEN_FCN (icode) (target, op0);
6568 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6570 rtx pat, scratch1, scratch2;
6571 tree arg0 = TREE_VALUE (arglist);
6572 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6573 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6574 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6576 /* If we have invalid arguments, bail out before generating bad rtl. */
6577 if (arg0 == error_mark_node)
6581 || GET_MODE (target) != tmode
6582 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6583 target = gen_reg_rtx (tmode);
6585 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6586 op0 = copy_to_mode_reg (mode0, op0);
6588 scratch1 = gen_reg_rtx (mode0);
6589 scratch2 = gen_reg_rtx (mode0);
6591 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6600 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6603 tree arg0 = TREE_VALUE (arglist);
6604 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6605 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6606 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6607 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6608 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6609 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6611 if (icode == CODE_FOR_nothing)
6612 /* Builtin not supported on this processor. */
6615 /* If we got invalid arguments bail out before generating bad rtl. */
6616 if (arg0 == error_mark_node || arg1 == error_mark_node)
6619 if (icode == CODE_FOR_altivec_vcfux
6620 || icode == CODE_FOR_altivec_vcfsx
6621 || icode == CODE_FOR_altivec_vctsxs
6622 || icode == CODE_FOR_altivec_vctuxs
6623 || icode == CODE_FOR_altivec_vspltb
6624 || icode == CODE_FOR_altivec_vsplth
6625 || icode == CODE_FOR_altivec_vspltw
6626 || icode == CODE_FOR_spe_evaddiw
6627 || icode == CODE_FOR_spe_evldd
6628 || icode == CODE_FOR_spe_evldh
6629 || icode == CODE_FOR_spe_evldw
6630 || icode == CODE_FOR_spe_evlhhesplat
6631 || icode == CODE_FOR_spe_evlhhossplat
6632 || icode == CODE_FOR_spe_evlhhousplat
6633 || icode == CODE_FOR_spe_evlwhe
6634 || icode == CODE_FOR_spe_evlwhos
6635 || icode == CODE_FOR_spe_evlwhou
6636 || icode == CODE_FOR_spe_evlwhsplat
6637 || icode == CODE_FOR_spe_evlwwsplat
6638 || icode == CODE_FOR_spe_evrlwi
6639 || icode == CODE_FOR_spe_evslwi
6640 || icode == CODE_FOR_spe_evsrwis
6641 || icode == CODE_FOR_spe_evsubifw
6642 || icode == CODE_FOR_spe_evsrwiu)
6644 /* Only allow 5-bit unsigned literals. */
6646 if (TREE_CODE (arg1) != INTEGER_CST
6647 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6649 error ("argument 2 must be a 5-bit unsigned literal");
6655 || GET_MODE (target) != tmode
6656 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6657 target = gen_reg_rtx (tmode);
6659 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6660 op0 = copy_to_mode_reg (mode0, op0);
6661 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6662 op1 = copy_to_mode_reg (mode1, op1);
6664 pat = GEN_FCN (icode) (target, op0, op1);
6673 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6674 tree arglist, rtx target)
6677 tree cr6_form = TREE_VALUE (arglist);
6678 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6679 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6680 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6681 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6682 enum machine_mode tmode = SImode;
6683 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6684 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6687 if (TREE_CODE (cr6_form) != INTEGER_CST)
6689 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6693 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6695 gcc_assert (mode0 == mode1);
6697 /* If we have invalid arguments, bail out before generating bad rtl. */
6698 if (arg0 == error_mark_node || arg1 == error_mark_node)
6702 || GET_MODE (target) != tmode
6703 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6704 target = gen_reg_rtx (tmode);
6706 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6707 op0 = copy_to_mode_reg (mode0, op0);
6708 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6709 op1 = copy_to_mode_reg (mode1, op1);
6711 scratch = gen_reg_rtx (mode0);
6713 pat = GEN_FCN (icode) (scratch, op0, op1,
6714 gen_rtx_SYMBOL_REF (Pmode, opcode));
6719 /* The vec_any* and vec_all* predicates use the same opcodes for two
6720 different operations, but the bits in CR6 will be different
6721 depending on what information we want. So we have to play tricks
6722 with CR6 to get the right bits out.
6724 If you think this is disgusting, look at the specs for the
6725 AltiVec predicates. */
6727 switch (cr6_form_int)
6730 emit_insn (gen_cr6_test_for_zero (target));
6733 emit_insn (gen_cr6_test_for_zero_reverse (target));
6736 emit_insn (gen_cr6_test_for_lt (target));
6739 emit_insn (gen_cr6_test_for_lt_reverse (target));
6742 error ("argument 1 of __builtin_altivec_predicate is out of range");
6750 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6753 tree arg0 = TREE_VALUE (arglist);
6754 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6755 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6756 enum machine_mode mode0 = Pmode;
6757 enum machine_mode mode1 = Pmode;
6758 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6759 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6761 if (icode == CODE_FOR_nothing)
6762 /* Builtin not supported on this processor. */
6765 /* If we got invalid arguments bail out before generating bad rtl. */
6766 if (arg0 == error_mark_node || arg1 == error_mark_node)
6770 || GET_MODE (target) != tmode
6771 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6772 target = gen_reg_rtx (tmode);
6774 op1 = copy_to_mode_reg (mode1, op1);
6776 if (op0 == const0_rtx)
6778 addr = gen_rtx_MEM (tmode, op1);
6782 op0 = copy_to_mode_reg (mode0, op0);
6783 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6786 pat = GEN_FCN (icode) (target, addr);
6796 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6798 tree arg0 = TREE_VALUE (arglist);
6799 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6800 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6801 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6802 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6803 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6805 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6806 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6807 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6809 /* Invalid arguments. Bail before doing anything stoopid! */
6810 if (arg0 == error_mark_node
6811 || arg1 == error_mark_node
6812 || arg2 == error_mark_node)
6815 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6816 op0 = copy_to_mode_reg (mode2, op0);
6817 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6818 op1 = copy_to_mode_reg (mode0, op1);
6819 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6820 op2 = copy_to_mode_reg (mode1, op2);
6822 pat = GEN_FCN (icode) (op1, op2, op0);
6829 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6831 tree arg0 = TREE_VALUE (arglist);
6832 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6833 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6834 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6835 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6836 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6838 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6839 enum machine_mode mode1 = Pmode;
6840 enum machine_mode mode2 = Pmode;
6842 /* Invalid arguments. Bail before doing anything stoopid! */
6843 if (arg0 == error_mark_node
6844 || arg1 == error_mark_node
6845 || arg2 == error_mark_node)
6848 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6849 op0 = copy_to_mode_reg (tmode, op0);
6851 op2 = copy_to_mode_reg (mode2, op2);
6853 if (op1 == const0_rtx)
6855 addr = gen_rtx_MEM (tmode, op2);
6859 op1 = copy_to_mode_reg (mode1, op1);
6860 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6863 pat = GEN_FCN (icode) (addr, op0);
6870 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6873 tree arg0 = TREE_VALUE (arglist);
6874 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6875 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6876 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6877 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6878 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6879 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6880 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6881 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6882 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6884 if (icode == CODE_FOR_nothing)
6885 /* Builtin not supported on this processor. */
6888 /* If we got invalid arguments bail out before generating bad rtl. */
6889 if (arg0 == error_mark_node
6890 || arg1 == error_mark_node
6891 || arg2 == error_mark_node)
6894 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6895 || icode == CODE_FOR_altivec_vsldoi_v4si
6896 || icode == CODE_FOR_altivec_vsldoi_v8hi
6897 || icode == CODE_FOR_altivec_vsldoi_v16qi)
6899 /* Only allow 4-bit unsigned literals. */
6901 if (TREE_CODE (arg2) != INTEGER_CST
6902 || TREE_INT_CST_LOW (arg2) & ~0xf)
6904 error ("argument 3 must be a 4-bit unsigned literal");
6910 || GET_MODE (target) != tmode
6911 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6912 target = gen_reg_rtx (tmode);
6914 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6915 op0 = copy_to_mode_reg (mode0, op0);
6916 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6917 op1 = copy_to_mode_reg (mode1, op1);
6918 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6919 op2 = copy_to_mode_reg (mode2, op2);
6921 pat = GEN_FCN (icode) (target, op0, op1, op2);
6929 /* Expand the lvx builtins. */
6931 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6933 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6934 tree arglist = TREE_OPERAND (exp, 1);
6935 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6937 enum machine_mode tmode, mode0;
6939 enum insn_code icode;
6943 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6944 icode = CODE_FOR_altivec_lvx_v16qi;
6946 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6947 icode = CODE_FOR_altivec_lvx_v8hi;
6949 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6950 icode = CODE_FOR_altivec_lvx_v4si;
6952 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6953 icode = CODE_FOR_altivec_lvx_v4sf;
6962 arg0 = TREE_VALUE (arglist);
6963 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6964 tmode = insn_data[icode].operand[0].mode;
6965 mode0 = insn_data[icode].operand[1].mode;
6968 || GET_MODE (target) != tmode
6969 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6970 target = gen_reg_rtx (tmode);
6972 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6973 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6975 pat = GEN_FCN (icode) (target, op0);
6982 /* Expand the stvx builtins. */
6984 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6987 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6988 tree arglist = TREE_OPERAND (exp, 1);
6989 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6991 enum machine_mode mode0, mode1;
6993 enum insn_code icode;
6997 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6998 icode = CODE_FOR_altivec_stvx_v16qi;
7000 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
7001 icode = CODE_FOR_altivec_stvx_v8hi;
7003 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
7004 icode = CODE_FOR_altivec_stvx_v4si;
7006 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
7007 icode = CODE_FOR_altivec_stvx_v4sf;
7014 arg0 = TREE_VALUE (arglist);
7015 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7016 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7017 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7018 mode0 = insn_data[icode].operand[0].mode;
7019 mode1 = insn_data[icode].operand[1].mode;
7021 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7022 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7023 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7024 op1 = copy_to_mode_reg (mode1, op1);
7026 pat = GEN_FCN (icode) (op0, op1);
7034 /* Expand the dst builtins. */
7036 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7039 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7040 tree arglist = TREE_OPERAND (exp, 1);
7041 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7042 tree arg0, arg1, arg2;
7043 enum machine_mode mode0, mode1, mode2;
7044 rtx pat, op0, op1, op2;
7045 struct builtin_description *d;
7050 /* Handle DST variants. */
7051 d = (struct builtin_description *) bdesc_dst;
7052 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7053 if (d->code == fcode)
7055 arg0 = TREE_VALUE (arglist);
7056 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7057 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7058 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7059 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7060 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7061 mode0 = insn_data[d->icode].operand[0].mode;
7062 mode1 = insn_data[d->icode].operand[1].mode;
7063 mode2 = insn_data[d->icode].operand[2].mode;
7065 /* Invalid arguments, bail out before generating bad rtl. */
7066 if (arg0 == error_mark_node
7067 || arg1 == error_mark_node
7068 || arg2 == error_mark_node)
7073 if (TREE_CODE (arg2) != INTEGER_CST
7074 || TREE_INT_CST_LOW (arg2) & ~0x3)
7076 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7080 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7081 op0 = copy_to_mode_reg (Pmode, op0);
7082 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7083 op1 = copy_to_mode_reg (mode1, op1);
7085 pat = GEN_FCN (d->icode) (op0, op1, op2);
7095 /* Expand vec_init builtin. */
7097 altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7099 enum machine_mode tmode = TYPE_MODE (type);
7100 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7101 int i, n_elt = GET_MODE_NUNITS (tmode);
7102 rtvec v = rtvec_alloc (n_elt);
7104 gcc_assert (VECTOR_MODE_P (tmode));
7106 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7108 rtx x = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7109 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7112 gcc_assert (arglist == NULL);
7114 if (!target || !register_operand (target, tmode))
7115 target = gen_reg_rtx (tmode);
7117 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7121 /* Return the integer constant in ARG. Constrain it to be in the range
7122 of the subparts of VEC_TYPE; issue an error if not. */
7125 get_element_number (tree vec_type, tree arg)
7127 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7129 if (!host_integerp (arg, 1)
7130 || (elt = tree_low_cst (arg, 1), elt > max))
7132 error ("selector must be an integer constant in the range 0..%wi", max);
7139 /* Expand vec_set builtin. */
7141 altivec_expand_vec_set_builtin (tree arglist)
7143 enum machine_mode tmode, mode1;
7144 tree arg0, arg1, arg2;
7148 arg0 = TREE_VALUE (arglist);
7149 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7150 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7152 tmode = TYPE_MODE (TREE_TYPE (arg0));
7153 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7154 gcc_assert (VECTOR_MODE_P (tmode));
7156 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7157 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7158 elt = get_element_number (TREE_TYPE (arg0), arg2);
7160 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7161 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7163 op0 = force_reg (tmode, op0);
7164 op1 = force_reg (mode1, op1);
7166 rs6000_expand_vector_set (op0, op1, elt);
7171 /* Expand vec_ext builtin. */
7173 altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7175 enum machine_mode tmode, mode0;
7180 arg0 = TREE_VALUE (arglist);
7181 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7183 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7184 elt = get_element_number (TREE_TYPE (arg0), arg1);
7186 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7187 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7188 gcc_assert (VECTOR_MODE_P (mode0));
7190 op0 = force_reg (mode0, op0);
7192 if (optimize || !target || !register_operand (target, tmode))
7193 target = gen_reg_rtx (tmode);
7195 rs6000_expand_vector_extract (target, op0, elt);
7200 /* Expand the builtin in EXP and store the result in TARGET. Store
7201 true in *EXPANDEDP if we found a builtin to expand. */
7203 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7205 struct builtin_description *d;
7206 struct builtin_description_predicates *dp;
7208 enum insn_code icode;
7209 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7210 tree arglist = TREE_OPERAND (exp, 1);
7213 enum machine_mode tmode, mode0;
7214 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7216 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7217 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7220 error ("unresolved overload for Altivec builtin %qF", fndecl);
7224 target = altivec_expand_ld_builtin (exp, target, expandedp);
7228 target = altivec_expand_st_builtin (exp, target, expandedp);
7232 target = altivec_expand_dst_builtin (exp, target, expandedp);
7240 case ALTIVEC_BUILTIN_STVX:
7241 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7242 case ALTIVEC_BUILTIN_STVEBX:
7243 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7244 case ALTIVEC_BUILTIN_STVEHX:
7245 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7246 case ALTIVEC_BUILTIN_STVEWX:
7247 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7248 case ALTIVEC_BUILTIN_STVXL:
7249 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7251 case ALTIVEC_BUILTIN_MFVSCR:
7252 icode = CODE_FOR_altivec_mfvscr;
7253 tmode = insn_data[icode].operand[0].mode;
7256 || GET_MODE (target) != tmode
7257 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7258 target = gen_reg_rtx (tmode);
7260 pat = GEN_FCN (icode) (target);
7266 case ALTIVEC_BUILTIN_MTVSCR:
7267 icode = CODE_FOR_altivec_mtvscr;
7268 arg0 = TREE_VALUE (arglist);
7269 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7270 mode0 = insn_data[icode].operand[0].mode;
7272 /* If we got invalid arguments bail out before generating bad rtl. */
7273 if (arg0 == error_mark_node)
7276 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7277 op0 = copy_to_mode_reg (mode0, op0);
7279 pat = GEN_FCN (icode) (op0);
7284 case ALTIVEC_BUILTIN_DSSALL:
7285 emit_insn (gen_altivec_dssall ());
7288 case ALTIVEC_BUILTIN_DSS:
7289 icode = CODE_FOR_altivec_dss;
7290 arg0 = TREE_VALUE (arglist);
7292 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7293 mode0 = insn_data[icode].operand[0].mode;
7295 /* If we got invalid arguments bail out before generating bad rtl. */
7296 if (arg0 == error_mark_node)
7299 if (TREE_CODE (arg0) != INTEGER_CST
7300 || TREE_INT_CST_LOW (arg0) & ~0x3)
7302 error ("argument to dss must be a 2-bit unsigned literal");
7306 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7307 op0 = copy_to_mode_reg (mode0, op0);
7309 emit_insn (gen_altivec_dss (op0));
7312 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7313 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7314 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7315 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7316 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7318 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7319 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7320 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7321 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7322 return altivec_expand_vec_set_builtin (arglist);
7324 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7325 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7326 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7327 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7328 return altivec_expand_vec_ext_builtin (arglist, target);
7335 /* Expand abs* operations. */
7336 d = (struct builtin_description *) bdesc_abs;
7337 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7338 if (d->code == fcode)
7339 return altivec_expand_abs_builtin (d->icode, arglist, target);
7341 /* Expand the AltiVec predicates. */
7342 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7343 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7344 if (dp->code == fcode)
7345 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7348 /* LV* are funky. We initialized them differently. */
7351 case ALTIVEC_BUILTIN_LVSL:
7352 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7354 case ALTIVEC_BUILTIN_LVSR:
7355 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7357 case ALTIVEC_BUILTIN_LVEBX:
7358 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7360 case ALTIVEC_BUILTIN_LVEHX:
7361 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7363 case ALTIVEC_BUILTIN_LVEWX:
7364 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7366 case ALTIVEC_BUILTIN_LVXL:
7367 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7369 case ALTIVEC_BUILTIN_LVX:
7370 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7381 /* Binops that need to be initialized manually, but can be expanded
7382 automagically by rs6000_expand_binop_builtin. */
7383 static struct builtin_description bdesc_2arg_spe[] =
7385 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7386 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7387 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7388 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7389 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7390 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7391 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7392 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7393 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7394 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7395 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7396 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7397 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7398 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7399 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7400 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7401 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7402 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7403 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7404 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7405 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7406 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7409 /* Expand the builtin in EXP and store the result in TARGET. Store
7410 true in *EXPANDEDP if we found a builtin to expand.
7412 This expands the SPE builtins that are not simple unary and binary
7415 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7417 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7418 tree arglist = TREE_OPERAND (exp, 1);
7420 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7421 enum insn_code icode;
7422 enum machine_mode tmode, mode0;
7424 struct builtin_description *d;
7429 /* Syntax check for a 5-bit unsigned immediate. */
7432 case SPE_BUILTIN_EVSTDD:
7433 case SPE_BUILTIN_EVSTDH:
7434 case SPE_BUILTIN_EVSTDW:
7435 case SPE_BUILTIN_EVSTWHE:
7436 case SPE_BUILTIN_EVSTWHO:
7437 case SPE_BUILTIN_EVSTWWE:
7438 case SPE_BUILTIN_EVSTWWO:
7439 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7440 if (TREE_CODE (arg1) != INTEGER_CST
7441 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7443 error ("argument 2 must be a 5-bit unsigned literal");
7451 /* The evsplat*i instructions are not quite generic. */
7454 case SPE_BUILTIN_EVSPLATFI:
7455 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7457 case SPE_BUILTIN_EVSPLATI:
7458 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7464 d = (struct builtin_description *) bdesc_2arg_spe;
7465 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7466 if (d->code == fcode)
7467 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7469 d = (struct builtin_description *) bdesc_spe_predicates;
7470 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7471 if (d->code == fcode)
7472 return spe_expand_predicate_builtin (d->icode, arglist, target);
7474 d = (struct builtin_description *) bdesc_spe_evsel;
7475 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7476 if (d->code == fcode)
7477 return spe_expand_evsel_builtin (d->icode, arglist, target);
7481 case SPE_BUILTIN_EVSTDDX:
7482 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7483 case SPE_BUILTIN_EVSTDHX:
7484 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7485 case SPE_BUILTIN_EVSTDWX:
7486 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7487 case SPE_BUILTIN_EVSTWHEX:
7488 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7489 case SPE_BUILTIN_EVSTWHOX:
7490 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7491 case SPE_BUILTIN_EVSTWWEX:
7492 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7493 case SPE_BUILTIN_EVSTWWOX:
7494 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7495 case SPE_BUILTIN_EVSTDD:
7496 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7497 case SPE_BUILTIN_EVSTDH:
7498 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7499 case SPE_BUILTIN_EVSTDW:
7500 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7501 case SPE_BUILTIN_EVSTWHE:
7502 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7503 case SPE_BUILTIN_EVSTWHO:
7504 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7505 case SPE_BUILTIN_EVSTWWE:
7506 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7507 case SPE_BUILTIN_EVSTWWO:
7508 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7509 case SPE_BUILTIN_MFSPEFSCR:
7510 icode = CODE_FOR_spe_mfspefscr;
7511 tmode = insn_data[icode].operand[0].mode;
7514 || GET_MODE (target) != tmode
7515 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7516 target = gen_reg_rtx (tmode);
7518 pat = GEN_FCN (icode) (target);
7523 case SPE_BUILTIN_MTSPEFSCR:
7524 icode = CODE_FOR_spe_mtspefscr;
7525 arg0 = TREE_VALUE (arglist);
7526 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7527 mode0 = insn_data[icode].operand[0].mode;
7529 if (arg0 == error_mark_node)
7532 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7533 op0 = copy_to_mode_reg (mode0, op0);
7535 pat = GEN_FCN (icode) (op0);
7548 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7550 rtx pat, scratch, tmp;
7551 tree form = TREE_VALUE (arglist);
7552 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7553 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7554 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7555 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7556 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7557 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7561 if (TREE_CODE (form) != INTEGER_CST)
7563 error ("argument 1 of __builtin_spe_predicate must be a constant");
7567 form_int = TREE_INT_CST_LOW (form);
7569 gcc_assert (mode0 == mode1);
7571 if (arg0 == error_mark_node || arg1 == error_mark_node)
7575 || GET_MODE (target) != SImode
7576 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7577 target = gen_reg_rtx (SImode);
7579 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7580 op0 = copy_to_mode_reg (mode0, op0);
7581 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7582 op1 = copy_to_mode_reg (mode1, op1);
7584 scratch = gen_reg_rtx (CCmode);
7586 pat = GEN_FCN (icode) (scratch, op0, op1);
7591 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7592 _lower_. We use one compare, but look in different bits of the
7593 CR for each variant.
7595 There are 2 elements in each SPE simd type (upper/lower). The CR
7596 bits are set as follows:
7598 BIT0 | BIT 1 | BIT 2 | BIT 3
7599 U | L | (U | L) | (U & L)
7601 So, for an "all" relationship, BIT 3 would be set.
7602 For an "any" relationship, BIT 2 would be set. Etc.
7604 Following traditional nomenclature, these bits map to:
7606 BIT0 | BIT 1 | BIT 2 | BIT 3
7609 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7614 /* All variant. OV bit. */
7616 /* We need to get to the OV bit, which is the ORDERED bit. We
7617 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7618 that's ugly and will make validate_condition_mode die.
7619 So let's just use another pattern. */
7620 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7622 /* Any variant. EQ bit. */
7626 /* Upper variant. LT bit. */
7630 /* Lower variant. GT bit. */
7635 error ("argument 1 of __builtin_spe_predicate is out of range");
7639 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7640 emit_move_insn (target, tmp);
7645 /* The evsel builtins look like this:
7647 e = __builtin_spe_evsel_OP (a, b, c, d);
7651 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7652 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7656 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7659 tree arg0 = TREE_VALUE (arglist);
7660 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7661 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7662 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7663 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7664 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7665 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7666 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7667 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7668 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7670 gcc_assert (mode0 == mode1);
7672 if (arg0 == error_mark_node || arg1 == error_mark_node
7673 || arg2 == error_mark_node || arg3 == error_mark_node)
7677 || GET_MODE (target) != mode0
7678 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7679 target = gen_reg_rtx (mode0);
7681 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7682 op0 = copy_to_mode_reg (mode0, op0);
7683 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7684 op1 = copy_to_mode_reg (mode0, op1);
7685 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7686 op2 = copy_to_mode_reg (mode0, op2);
7687 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7688 op3 = copy_to_mode_reg (mode0, op3);
7690 /* Generate the compare. */
7691 scratch = gen_reg_rtx (CCmode);
7692 pat = GEN_FCN (icode) (scratch, op0, op1);
7697 if (mode0 == V2SImode)
7698 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7700 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7705 /* Expand an expression EXP that calls a built-in function,
7706 with result going to TARGET if that's convenient
7707 (and in mode MODE if that's convenient).
7708 SUBTARGET may be used as the target for computing one of EXP's operands.
7709 IGNORE is nonzero if the value is to be ignored. */
7712 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7713 enum machine_mode mode ATTRIBUTE_UNUSED,
7714 int ignore ATTRIBUTE_UNUSED)
7716 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7717 tree arglist = TREE_OPERAND (exp, 1);
7718 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7719 struct builtin_description *d;
7724 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7725 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7727 int icode = (int) CODE_FOR_altivec_lvsr;
7728 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7729 enum machine_mode mode = insn_data[icode].operand[1].mode;
7733 gcc_assert (TARGET_ALTIVEC);
7735 arg = TREE_VALUE (arglist);
7736 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7737 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7738 addr = memory_address (mode, op);
7739 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7743 /* For the load case need to negate the address. */
7744 op = gen_reg_rtx (GET_MODE (addr));
7745 emit_insn (gen_rtx_SET (VOIDmode, op,
7746 gen_rtx_NEG (GET_MODE (addr), addr)));
7748 op = gen_rtx_MEM (mode, op);
7751 || GET_MODE (target) != tmode
7752 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7753 target = gen_reg_rtx (tmode);
7755 /*pat = gen_altivec_lvsr (target, op);*/
7756 pat = GEN_FCN (icode) (target, op);
7766 ret = altivec_expand_builtin (exp, target, &success);
7773 ret = spe_expand_builtin (exp, target, &success);
7779 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7781 /* Handle simple unary operations. */
7782 d = (struct builtin_description *) bdesc_1arg;
7783 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7784 if (d->code == fcode)
7785 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7787 /* Handle simple binary operations. */
7788 d = (struct builtin_description *) bdesc_2arg;
7789 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7790 if (d->code == fcode)
7791 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7793 /* Handle simple ternary operations. */
7794 d = (struct builtin_description *) bdesc_3arg;
7795 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7796 if (d->code == fcode)
7797 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7803 build_opaque_vector_type (tree node, int nunits)
7805 node = copy_node (node);
7806 TYPE_MAIN_VARIANT (node) = node;
7807 return build_vector_type (node, nunits);
7811 rs6000_init_builtins (void)
7813 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7814 V2SF_type_node = build_vector_type (float_type_node, 2);
7815 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7816 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7817 V4SF_type_node = build_vector_type (float_type_node, 4);
7818 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7819 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7821 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7822 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7823 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7825 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7826 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7827 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7828 opaque_V4SI_type_node = copy_node (V4SI_type_node);
7830 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7831 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7832 'vector unsigned short'. */
7834 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7835 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7836 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7837 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7839 long_integer_type_internal_node = long_integer_type_node;
7840 long_unsigned_type_internal_node = long_unsigned_type_node;
7841 intQI_type_internal_node = intQI_type_node;
7842 uintQI_type_internal_node = unsigned_intQI_type_node;
7843 intHI_type_internal_node = intHI_type_node;
7844 uintHI_type_internal_node = unsigned_intHI_type_node;
7845 intSI_type_internal_node = intSI_type_node;
7846 uintSI_type_internal_node = unsigned_intSI_type_node;
7847 float_type_internal_node = float_type_node;
7848 void_type_internal_node = void_type_node;
7850 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7851 get_identifier ("__bool char"),
7852 bool_char_type_node));
7853 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7854 get_identifier ("__bool short"),
7855 bool_short_type_node));
7856 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7857 get_identifier ("__bool int"),
7858 bool_int_type_node));
7859 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7860 get_identifier ("__pixel"),
7863 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7864 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7865 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7866 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7868 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7869 get_identifier ("__vector unsigned char"),
7870 unsigned_V16QI_type_node));
7871 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7872 get_identifier ("__vector signed char"),
7874 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7875 get_identifier ("__vector __bool char"),
7876 bool_V16QI_type_node));
7878 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7879 get_identifier ("__vector unsigned short"),
7880 unsigned_V8HI_type_node));
7881 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7882 get_identifier ("__vector signed short"),
7884 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7885 get_identifier ("__vector __bool short"),
7886 bool_V8HI_type_node));
7888 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7889 get_identifier ("__vector unsigned int"),
7890 unsigned_V4SI_type_node));
7891 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7892 get_identifier ("__vector signed int"),
7894 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7895 get_identifier ("__vector __bool int"),
7896 bool_V4SI_type_node));
7898 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7899 get_identifier ("__vector float"),
7901 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7902 get_identifier ("__vector __pixel"),
7903 pixel_V8HI_type_node));
7906 spe_init_builtins ();
7908 altivec_init_builtins ();
7909 if (TARGET_ALTIVEC || TARGET_SPE)
7910 rs6000_common_init_builtins ();
7913 /* Search through a set of builtins and enable the mask bits.
7914 DESC is an array of builtins.
7915 SIZE is the total number of builtins.
7916 START is the builtin enum at which to start.
7917 END is the builtin enum at which to end. */
7919 enable_mask_for_builtins (struct builtin_description *desc, int size,
7920 enum rs6000_builtins start,
7921 enum rs6000_builtins end)
7925 for (i = 0; i < size; ++i)
7926 if (desc[i].code == start)
7932 for (; i < size; ++i)
7934 /* Flip all the bits on. */
7935 desc[i].mask = target_flags;
7936 if (desc[i].code == end)
7942 spe_init_builtins (void)
7944 tree endlink = void_list_node;
7945 tree puint_type_node = build_pointer_type (unsigned_type_node);
7946 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7947 struct builtin_description *d;
7950 tree v2si_ftype_4_v2si
7951 = build_function_type
7952 (opaque_V2SI_type_node,
7953 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7954 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7955 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7956 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7959 tree v2sf_ftype_4_v2sf
7960 = build_function_type
7961 (opaque_V2SF_type_node,
7962 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7963 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7964 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7965 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7968 tree int_ftype_int_v2si_v2si
7969 = build_function_type
7971 tree_cons (NULL_TREE, integer_type_node,
7972 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7973 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7976 tree int_ftype_int_v2sf_v2sf
7977 = build_function_type
7979 tree_cons (NULL_TREE, integer_type_node,
7980 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7981 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7984 tree void_ftype_v2si_puint_int
7985 = build_function_type (void_type_node,
7986 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7987 tree_cons (NULL_TREE, puint_type_node,
7988 tree_cons (NULL_TREE,
7992 tree void_ftype_v2si_puint_char
7993 = build_function_type (void_type_node,
7994 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7995 tree_cons (NULL_TREE, puint_type_node,
7996 tree_cons (NULL_TREE,
8000 tree void_ftype_v2si_pv2si_int
8001 = build_function_type (void_type_node,
8002 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8003 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8004 tree_cons (NULL_TREE,
8008 tree void_ftype_v2si_pv2si_char
8009 = build_function_type (void_type_node,
8010 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8011 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8012 tree_cons (NULL_TREE,
8017 = build_function_type (void_type_node,
8018 tree_cons (NULL_TREE, integer_type_node, endlink));
8021 = build_function_type (integer_type_node, endlink);
8023 tree v2si_ftype_pv2si_int
8024 = build_function_type (opaque_V2SI_type_node,
8025 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8026 tree_cons (NULL_TREE, integer_type_node,
8029 tree v2si_ftype_puint_int
8030 = build_function_type (opaque_V2SI_type_node,
8031 tree_cons (NULL_TREE, puint_type_node,
8032 tree_cons (NULL_TREE, integer_type_node,
8035 tree v2si_ftype_pushort_int
8036 = build_function_type (opaque_V2SI_type_node,
8037 tree_cons (NULL_TREE, pushort_type_node,
8038 tree_cons (NULL_TREE, integer_type_node,
8041 tree v2si_ftype_signed_char
8042 = build_function_type (opaque_V2SI_type_node,
8043 tree_cons (NULL_TREE, signed_char_type_node,
8046 /* The initialization of the simple binary and unary builtins is
8047 done in rs6000_common_init_builtins, but we have to enable the
8048 mask bits here manually because we have run out of `target_flags'
8049 bits. We really need to redesign this mask business. */
8051 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8052 ARRAY_SIZE (bdesc_2arg),
8055 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8056 ARRAY_SIZE (bdesc_1arg),
8058 SPE_BUILTIN_EVSUBFUSIAAW);
8059 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8060 ARRAY_SIZE (bdesc_spe_predicates),
8061 SPE_BUILTIN_EVCMPEQ,
8062 SPE_BUILTIN_EVFSTSTLT);
8063 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8064 ARRAY_SIZE (bdesc_spe_evsel),
8065 SPE_BUILTIN_EVSEL_CMPGTS,
8066 SPE_BUILTIN_EVSEL_FSTSTEQ);
8068 (*lang_hooks.decls.pushdecl)
8069 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8070 opaque_V2SI_type_node));
8072 /* Initialize irregular SPE builtins. */
8074 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8075 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8076 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8077 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8078 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8079 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8080 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8081 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8082 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8083 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8084 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8085 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8086 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8087 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8088 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8089 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8090 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8091 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8094 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8095 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8096 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8097 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8098 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8099 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8100 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8101 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8102 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8103 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8104 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8105 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8106 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8107 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8108 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8109 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8110 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8111 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8112 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8113 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8114 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8115 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8118 d = (struct builtin_description *) bdesc_spe_predicates;
8119 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8123 switch (insn_data[d->icode].operand[1].mode)
8126 type = int_ftype_int_v2si_v2si;
8129 type = int_ftype_int_v2sf_v2sf;
8135 def_builtin (d->mask, d->name, type, d->code);
8138 /* Evsel predicates. */
8139 d = (struct builtin_description *) bdesc_spe_evsel;
8140 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8144 switch (insn_data[d->icode].operand[1].mode)
8147 type = v2si_ftype_4_v2si;
8150 type = v2sf_ftype_4_v2sf;
8156 def_builtin (d->mask, d->name, type, d->code);
8161 altivec_init_builtins (void)
8163 struct builtin_description *d;
8164 struct builtin_description_predicates *dp;
8168 tree pfloat_type_node = build_pointer_type (float_type_node);
8169 tree pint_type_node = build_pointer_type (integer_type_node);
8170 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8171 tree pchar_type_node = build_pointer_type (char_type_node);
8173 tree pvoid_type_node = build_pointer_type (void_type_node);
8175 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8176 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8177 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8178 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8180 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8182 tree int_ftype_opaque
8183 = build_function_type_list (integer_type_node,
8184 opaque_V4SI_type_node, NULL_TREE);
8186 tree opaque_ftype_opaque_int
8187 = build_function_type_list (opaque_V4SI_type_node,
8188 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8189 tree opaque_ftype_opaque_opaque_int
8190 = build_function_type_list (opaque_V4SI_type_node,
8191 opaque_V4SI_type_node, opaque_V4SI_type_node,
8192 integer_type_node, NULL_TREE);
8193 tree int_ftype_int_opaque_opaque
8194 = build_function_type_list (integer_type_node,
8195 integer_type_node, opaque_V4SI_type_node,
8196 opaque_V4SI_type_node, NULL_TREE);
8197 tree int_ftype_int_v4si_v4si
8198 = build_function_type_list (integer_type_node,
8199 integer_type_node, V4SI_type_node,
8200 V4SI_type_node, NULL_TREE);
8201 tree v4sf_ftype_pcfloat
8202 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8203 tree void_ftype_pfloat_v4sf
8204 = build_function_type_list (void_type_node,
8205 pfloat_type_node, V4SF_type_node, NULL_TREE);
8206 tree v4si_ftype_pcint
8207 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8208 tree void_ftype_pint_v4si
8209 = build_function_type_list (void_type_node,
8210 pint_type_node, V4SI_type_node, NULL_TREE);
8211 tree v8hi_ftype_pcshort
8212 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8213 tree void_ftype_pshort_v8hi
8214 = build_function_type_list (void_type_node,
8215 pshort_type_node, V8HI_type_node, NULL_TREE);
8216 tree v16qi_ftype_pcchar
8217 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8218 tree void_ftype_pchar_v16qi
8219 = build_function_type_list (void_type_node,
8220 pchar_type_node, V16QI_type_node, NULL_TREE);
8221 tree void_ftype_v4si
8222 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8223 tree v8hi_ftype_void
8224 = build_function_type (V8HI_type_node, void_list_node);
8225 tree void_ftype_void
8226 = build_function_type (void_type_node, void_list_node);
8228 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8230 tree opaque_ftype_long_pcvoid
8231 = build_function_type_list (opaque_V4SI_type_node,
8232 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8233 tree v16qi_ftype_long_pcvoid
8234 = build_function_type_list (V16QI_type_node,
8235 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8236 tree v8hi_ftype_long_pcvoid
8237 = build_function_type_list (V8HI_type_node,
8238 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8239 tree v4si_ftype_long_pcvoid
8240 = build_function_type_list (V4SI_type_node,
8241 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8243 tree void_ftype_opaque_long_pvoid
8244 = build_function_type_list (void_type_node,
8245 opaque_V4SI_type_node, long_integer_type_node,
8246 pvoid_type_node, NULL_TREE);
8247 tree void_ftype_v4si_long_pvoid
8248 = build_function_type_list (void_type_node,
8249 V4SI_type_node, long_integer_type_node,
8250 pvoid_type_node, NULL_TREE);
8251 tree void_ftype_v16qi_long_pvoid
8252 = build_function_type_list (void_type_node,
8253 V16QI_type_node, long_integer_type_node,
8254 pvoid_type_node, NULL_TREE);
8255 tree void_ftype_v8hi_long_pvoid
8256 = build_function_type_list (void_type_node,
8257 V8HI_type_node, long_integer_type_node,
8258 pvoid_type_node, NULL_TREE);
8259 tree int_ftype_int_v8hi_v8hi
8260 = build_function_type_list (integer_type_node,
8261 integer_type_node, V8HI_type_node,
8262 V8HI_type_node, NULL_TREE);
8263 tree int_ftype_int_v16qi_v16qi
8264 = build_function_type_list (integer_type_node,
8265 integer_type_node, V16QI_type_node,
8266 V16QI_type_node, NULL_TREE);
8267 tree int_ftype_int_v4sf_v4sf
8268 = build_function_type_list (integer_type_node,
8269 integer_type_node, V4SF_type_node,
8270 V4SF_type_node, NULL_TREE);
8271 tree v4si_ftype_v4si
8272 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8273 tree v8hi_ftype_v8hi
8274 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8275 tree v16qi_ftype_v16qi
8276 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8277 tree v4sf_ftype_v4sf
8278 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8279 tree void_ftype_pcvoid_int_int
8280 = build_function_type_list (void_type_node,
8281 pcvoid_type_node, integer_type_node,
8282 integer_type_node, NULL_TREE);
8284 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8285 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8286 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8287 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8288 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8289 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8290 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8291 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8292 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8293 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8294 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8295 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8296 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8297 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8298 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8299 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8300 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8301 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8302 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8303 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8304 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8305 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8306 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8307 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8308 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8309 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8310 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8311 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8312 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8313 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8314 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8315 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8316 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8317 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8318 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8319 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8320 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8321 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8322 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8323 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8324 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8325 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8326 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8327 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8328 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8329 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8331 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8333 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8334 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8335 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8336 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8337 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8338 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8339 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8340 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8341 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8342 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8344 /* Add the DST variants. */
8345 d = (struct builtin_description *) bdesc_dst;
8346 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8347 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8349 /* Initialize the predicates. */
8350 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8351 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8353 enum machine_mode mode1;
8355 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8356 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8361 mode1 = insn_data[dp->icode].operand[1].mode;
8366 type = int_ftype_int_opaque_opaque;
8369 type = int_ftype_int_v4si_v4si;
8372 type = int_ftype_int_v8hi_v8hi;
8375 type = int_ftype_int_v16qi_v16qi;
8378 type = int_ftype_int_v4sf_v4sf;
8384 def_builtin (dp->mask, dp->name, type, dp->code);
8387 /* Initialize the abs* operators. */
8388 d = (struct builtin_description *) bdesc_abs;
8389 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8391 enum machine_mode mode0;
8394 mode0 = insn_data[d->icode].operand[0].mode;
8399 type = v4si_ftype_v4si;
8402 type = v8hi_ftype_v8hi;
8405 type = v16qi_ftype_v16qi;
8408 type = v4sf_ftype_v4sf;
8414 def_builtin (d->mask, d->name, type, d->code);
8421 /* Initialize target builtin that implements
8422 targetm.vectorize.builtin_mask_for_load. */
8424 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8425 v16qi_ftype_long_pcvoid,
8426 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8428 tree_cons (get_identifier ("const"),
8429 NULL_TREE, NULL_TREE));
8430 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8431 altivec_builtin_mask_for_load = decl;
8434 /* Access to the vec_init patterns. */
8435 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8436 integer_type_node, integer_type_node,
8437 integer_type_node, NULL_TREE);
8438 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8439 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8441 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8442 short_integer_type_node,
8443 short_integer_type_node,
8444 short_integer_type_node,
8445 short_integer_type_node,
8446 short_integer_type_node,
8447 short_integer_type_node,
8448 short_integer_type_node, NULL_TREE);
8449 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8450 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8452 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8453 char_type_node, char_type_node,
8454 char_type_node, char_type_node,
8455 char_type_node, char_type_node,
8456 char_type_node, char_type_node,
8457 char_type_node, char_type_node,
8458 char_type_node, char_type_node,
8459 char_type_node, char_type_node,
8460 char_type_node, NULL_TREE);
8461 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8462 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8464 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8465 float_type_node, float_type_node,
8466 float_type_node, NULL_TREE);
8467 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8468 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8470 /* Access to the vec_set patterns. */
8471 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8473 integer_type_node, NULL_TREE);
8474 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8475 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8477 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8479 integer_type_node, NULL_TREE);
8480 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8481 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8483 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8485 integer_type_node, NULL_TREE);
8486 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8487 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8489 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8491 integer_type_node, NULL_TREE);
8492 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8493 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8495 /* Access to the vec_extract patterns. */
8496 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8497 integer_type_node, NULL_TREE);
8498 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8499 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8501 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8502 integer_type_node, NULL_TREE);
8503 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8504 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8506 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8507 integer_type_node, NULL_TREE);
8508 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8509 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8511 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8512 integer_type_node, NULL_TREE);
8513 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8514 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8518 rs6000_common_init_builtins (void)
8520 struct builtin_description *d;
8523 tree v4sf_ftype_v4sf_v4sf_v16qi
8524 = build_function_type_list (V4SF_type_node,
8525 V4SF_type_node, V4SF_type_node,
8526 V16QI_type_node, NULL_TREE);
8527 tree v4si_ftype_v4si_v4si_v16qi
8528 = build_function_type_list (V4SI_type_node,
8529 V4SI_type_node, V4SI_type_node,
8530 V16QI_type_node, NULL_TREE);
8531 tree v8hi_ftype_v8hi_v8hi_v16qi
8532 = build_function_type_list (V8HI_type_node,
8533 V8HI_type_node, V8HI_type_node,
8534 V16QI_type_node, NULL_TREE);
8535 tree v16qi_ftype_v16qi_v16qi_v16qi
8536 = build_function_type_list (V16QI_type_node,
8537 V16QI_type_node, V16QI_type_node,
8538 V16QI_type_node, NULL_TREE);
8540 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8542 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8543 tree v16qi_ftype_int
8544 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8545 tree v8hi_ftype_v16qi
8546 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8547 tree v4sf_ftype_v4sf
8548 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8550 tree v2si_ftype_v2si_v2si
8551 = build_function_type_list (opaque_V2SI_type_node,
8552 opaque_V2SI_type_node,
8553 opaque_V2SI_type_node, NULL_TREE);
8555 tree v2sf_ftype_v2sf_v2sf
8556 = build_function_type_list (opaque_V2SF_type_node,
8557 opaque_V2SF_type_node,
8558 opaque_V2SF_type_node, NULL_TREE);
8560 tree v2si_ftype_int_int
8561 = build_function_type_list (opaque_V2SI_type_node,
8562 integer_type_node, integer_type_node,
8565 tree opaque_ftype_opaque
8566 = build_function_type_list (opaque_V4SI_type_node,
8567 opaque_V4SI_type_node, NULL_TREE);
8569 tree v2si_ftype_v2si
8570 = build_function_type_list (opaque_V2SI_type_node,
8571 opaque_V2SI_type_node, NULL_TREE);
8573 tree v2sf_ftype_v2sf
8574 = build_function_type_list (opaque_V2SF_type_node,
8575 opaque_V2SF_type_node, NULL_TREE);
8577 tree v2sf_ftype_v2si
8578 = build_function_type_list (opaque_V2SF_type_node,
8579 opaque_V2SI_type_node, NULL_TREE);
8581 tree v2si_ftype_v2sf
8582 = build_function_type_list (opaque_V2SI_type_node,
8583 opaque_V2SF_type_node, NULL_TREE);
8585 tree v2si_ftype_v2si_char
8586 = build_function_type_list (opaque_V2SI_type_node,
8587 opaque_V2SI_type_node,
8588 char_type_node, NULL_TREE);
8590 tree v2si_ftype_int_char
8591 = build_function_type_list (opaque_V2SI_type_node,
8592 integer_type_node, char_type_node, NULL_TREE);
8594 tree v2si_ftype_char
8595 = build_function_type_list (opaque_V2SI_type_node,
8596 char_type_node, NULL_TREE);
8598 tree int_ftype_int_int
8599 = build_function_type_list (integer_type_node,
8600 integer_type_node, integer_type_node,
8603 tree opaque_ftype_opaque_opaque
8604 = build_function_type_list (opaque_V4SI_type_node,
8605 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8606 tree v4si_ftype_v4si_v4si
8607 = build_function_type_list (V4SI_type_node,
8608 V4SI_type_node, V4SI_type_node, NULL_TREE);
8609 tree v4sf_ftype_v4si_int
8610 = build_function_type_list (V4SF_type_node,
8611 V4SI_type_node, integer_type_node, NULL_TREE);
8612 tree v4si_ftype_v4sf_int
8613 = build_function_type_list (V4SI_type_node,
8614 V4SF_type_node, integer_type_node, NULL_TREE);
8615 tree v4si_ftype_v4si_int
8616 = build_function_type_list (V4SI_type_node,
8617 V4SI_type_node, integer_type_node, NULL_TREE);
8618 tree v8hi_ftype_v8hi_int
8619 = build_function_type_list (V8HI_type_node,
8620 V8HI_type_node, integer_type_node, NULL_TREE);
8621 tree v16qi_ftype_v16qi_int
8622 = build_function_type_list (V16QI_type_node,
8623 V16QI_type_node, integer_type_node, NULL_TREE);
8624 tree v16qi_ftype_v16qi_v16qi_int
8625 = build_function_type_list (V16QI_type_node,
8626 V16QI_type_node, V16QI_type_node,
8627 integer_type_node, NULL_TREE);
8628 tree v8hi_ftype_v8hi_v8hi_int
8629 = build_function_type_list (V8HI_type_node,
8630 V8HI_type_node, V8HI_type_node,
8631 integer_type_node, NULL_TREE);
8632 tree v4si_ftype_v4si_v4si_int
8633 = build_function_type_list (V4SI_type_node,
8634 V4SI_type_node, V4SI_type_node,
8635 integer_type_node, NULL_TREE);
8636 tree v4sf_ftype_v4sf_v4sf_int
8637 = build_function_type_list (V4SF_type_node,
8638 V4SF_type_node, V4SF_type_node,
8639 integer_type_node, NULL_TREE);
8640 tree v4sf_ftype_v4sf_v4sf
8641 = build_function_type_list (V4SF_type_node,
8642 V4SF_type_node, V4SF_type_node, NULL_TREE);
8643 tree opaque_ftype_opaque_opaque_opaque
8644 = build_function_type_list (opaque_V4SI_type_node,
8645 opaque_V4SI_type_node, opaque_V4SI_type_node,
8646 opaque_V4SI_type_node, NULL_TREE);
8647 tree v4sf_ftype_v4sf_v4sf_v4si
8648 = build_function_type_list (V4SF_type_node,
8649 V4SF_type_node, V4SF_type_node,
8650 V4SI_type_node, NULL_TREE);
8651 tree v4sf_ftype_v4sf_v4sf_v4sf
8652 = build_function_type_list (V4SF_type_node,
8653 V4SF_type_node, V4SF_type_node,
8654 V4SF_type_node, NULL_TREE);
8655 tree v4si_ftype_v4si_v4si_v4si
8656 = build_function_type_list (V4SI_type_node,
8657 V4SI_type_node, V4SI_type_node,
8658 V4SI_type_node, NULL_TREE);
8659 tree v8hi_ftype_v8hi_v8hi
8660 = build_function_type_list (V8HI_type_node,
8661 V8HI_type_node, V8HI_type_node, NULL_TREE);
8662 tree v8hi_ftype_v8hi_v8hi_v8hi
8663 = build_function_type_list (V8HI_type_node,
8664 V8HI_type_node, V8HI_type_node,
8665 V8HI_type_node, NULL_TREE);
8666 tree v4si_ftype_v8hi_v8hi_v4si
8667 = build_function_type_list (V4SI_type_node,
8668 V8HI_type_node, V8HI_type_node,
8669 V4SI_type_node, NULL_TREE);
8670 tree v4si_ftype_v16qi_v16qi_v4si
8671 = build_function_type_list (V4SI_type_node,
8672 V16QI_type_node, V16QI_type_node,
8673 V4SI_type_node, NULL_TREE);
8674 tree v16qi_ftype_v16qi_v16qi
8675 = build_function_type_list (V16QI_type_node,
8676 V16QI_type_node, V16QI_type_node, NULL_TREE);
8677 tree v4si_ftype_v4sf_v4sf
8678 = build_function_type_list (V4SI_type_node,
8679 V4SF_type_node, V4SF_type_node, NULL_TREE);
8680 tree v8hi_ftype_v16qi_v16qi
8681 = build_function_type_list (V8HI_type_node,
8682 V16QI_type_node, V16QI_type_node, NULL_TREE);
8683 tree v4si_ftype_v8hi_v8hi
8684 = build_function_type_list (V4SI_type_node,
8685 V8HI_type_node, V8HI_type_node, NULL_TREE);
8686 tree v8hi_ftype_v4si_v4si
8687 = build_function_type_list (V8HI_type_node,
8688 V4SI_type_node, V4SI_type_node, NULL_TREE);
8689 tree v16qi_ftype_v8hi_v8hi
8690 = build_function_type_list (V16QI_type_node,
8691 V8HI_type_node, V8HI_type_node, NULL_TREE);
8692 tree v4si_ftype_v16qi_v4si
8693 = build_function_type_list (V4SI_type_node,
8694 V16QI_type_node, V4SI_type_node, NULL_TREE);
8695 tree v4si_ftype_v16qi_v16qi
8696 = build_function_type_list (V4SI_type_node,
8697 V16QI_type_node, V16QI_type_node, NULL_TREE);
8698 tree v4si_ftype_v8hi_v4si
8699 = build_function_type_list (V4SI_type_node,
8700 V8HI_type_node, V4SI_type_node, NULL_TREE);
8701 tree v4si_ftype_v8hi
8702 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8703 tree int_ftype_v4si_v4si
8704 = build_function_type_list (integer_type_node,
8705 V4SI_type_node, V4SI_type_node, NULL_TREE);
8706 tree int_ftype_v4sf_v4sf
8707 = build_function_type_list (integer_type_node,
8708 V4SF_type_node, V4SF_type_node, NULL_TREE);
8709 tree int_ftype_v16qi_v16qi
8710 = build_function_type_list (integer_type_node,
8711 V16QI_type_node, V16QI_type_node, NULL_TREE);
8712 tree int_ftype_v8hi_v8hi
8713 = build_function_type_list (integer_type_node,
8714 V8HI_type_node, V8HI_type_node, NULL_TREE);
8716 /* Add the simple ternary operators. */
8717 d = (struct builtin_description *) bdesc_3arg;
8718 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8720 enum machine_mode mode0, mode1, mode2, mode3;
8722 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8723 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8734 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8737 mode0 = insn_data[d->icode].operand[0].mode;
8738 mode1 = insn_data[d->icode].operand[1].mode;
8739 mode2 = insn_data[d->icode].operand[2].mode;
8740 mode3 = insn_data[d->icode].operand[3].mode;
8743 /* When all four are of the same mode. */
8744 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8749 type = opaque_ftype_opaque_opaque_opaque;
8752 type = v4si_ftype_v4si_v4si_v4si;
8755 type = v4sf_ftype_v4sf_v4sf_v4sf;
8758 type = v8hi_ftype_v8hi_v8hi_v8hi;
8761 type = v16qi_ftype_v16qi_v16qi_v16qi;
8767 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8772 type = v4si_ftype_v4si_v4si_v16qi;
8775 type = v4sf_ftype_v4sf_v4sf_v16qi;
8778 type = v8hi_ftype_v8hi_v8hi_v16qi;
8781 type = v16qi_ftype_v16qi_v16qi_v16qi;
8787 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8788 && mode3 == V4SImode)
8789 type = v4si_ftype_v16qi_v16qi_v4si;
8790 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8791 && mode3 == V4SImode)
8792 type = v4si_ftype_v8hi_v8hi_v4si;
8793 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8794 && mode3 == V4SImode)
8795 type = v4sf_ftype_v4sf_v4sf_v4si;
8797 /* vchar, vchar, vchar, 4 bit literal. */
8798 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8800 type = v16qi_ftype_v16qi_v16qi_int;
8802 /* vshort, vshort, vshort, 4 bit literal. */
8803 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8805 type = v8hi_ftype_v8hi_v8hi_int;
8807 /* vint, vint, vint, 4 bit literal. */
8808 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8810 type = v4si_ftype_v4si_v4si_int;
8812 /* vfloat, vfloat, vfloat, 4 bit literal. */
8813 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8815 type = v4sf_ftype_v4sf_v4sf_int;
8820 def_builtin (d->mask, d->name, type, d->code);
8823 /* Add the simple binary operators. */
8824 d = (struct builtin_description *) bdesc_2arg;
8825 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8827 enum machine_mode mode0, mode1, mode2;
8829 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8830 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8840 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8843 mode0 = insn_data[d->icode].operand[0].mode;
8844 mode1 = insn_data[d->icode].operand[1].mode;
8845 mode2 = insn_data[d->icode].operand[2].mode;
8848 /* When all three operands are of the same mode. */
8849 if (mode0 == mode1 && mode1 == mode2)
8854 type = opaque_ftype_opaque_opaque;
8857 type = v4sf_ftype_v4sf_v4sf;
8860 type = v4si_ftype_v4si_v4si;
8863 type = v16qi_ftype_v16qi_v16qi;
8866 type = v8hi_ftype_v8hi_v8hi;
8869 type = v2si_ftype_v2si_v2si;
8872 type = v2sf_ftype_v2sf_v2sf;
8875 type = int_ftype_int_int;
8882 /* A few other combos we really don't want to do manually. */
8884 /* vint, vfloat, vfloat. */
8885 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8886 type = v4si_ftype_v4sf_v4sf;
8888 /* vshort, vchar, vchar. */
8889 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8890 type = v8hi_ftype_v16qi_v16qi;
8892 /* vint, vshort, vshort. */
8893 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8894 type = v4si_ftype_v8hi_v8hi;
8896 /* vshort, vint, vint. */
8897 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8898 type = v8hi_ftype_v4si_v4si;
8900 /* vchar, vshort, vshort. */
8901 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8902 type = v16qi_ftype_v8hi_v8hi;
8904 /* vint, vchar, vint. */
8905 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8906 type = v4si_ftype_v16qi_v4si;
8908 /* vint, vchar, vchar. */
8909 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8910 type = v4si_ftype_v16qi_v16qi;
8912 /* vint, vshort, vint. */
8913 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8914 type = v4si_ftype_v8hi_v4si;
8916 /* vint, vint, 5 bit literal. */
8917 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8918 type = v4si_ftype_v4si_int;
8920 /* vshort, vshort, 5 bit literal. */
8921 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8922 type = v8hi_ftype_v8hi_int;
8924 /* vchar, vchar, 5 bit literal. */
8925 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8926 type = v16qi_ftype_v16qi_int;
8928 /* vfloat, vint, 5 bit literal. */
8929 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8930 type = v4sf_ftype_v4si_int;
8932 /* vint, vfloat, 5 bit literal. */
8933 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8934 type = v4si_ftype_v4sf_int;
8936 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8937 type = v2si_ftype_int_int;
8939 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8940 type = v2si_ftype_v2si_char;
8942 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8943 type = v2si_ftype_int_char;
8948 gcc_assert (mode0 == SImode);
8952 type = int_ftype_v4si_v4si;
8955 type = int_ftype_v4sf_v4sf;
8958 type = int_ftype_v16qi_v16qi;
8961 type = int_ftype_v8hi_v8hi;
8968 def_builtin (d->mask, d->name, type, d->code);
8971 /* Add the simple unary operators. */
8972 d = (struct builtin_description *) bdesc_1arg;
8973 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8975 enum machine_mode mode0, mode1;
8977 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8978 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8987 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8990 mode0 = insn_data[d->icode].operand[0].mode;
8991 mode1 = insn_data[d->icode].operand[1].mode;
8994 if (mode0 == V4SImode && mode1 == QImode)
8995 type = v4si_ftype_int;
8996 else if (mode0 == V8HImode && mode1 == QImode)
8997 type = v8hi_ftype_int;
8998 else if (mode0 == V16QImode && mode1 == QImode)
8999 type = v16qi_ftype_int;
9000 else if (mode0 == VOIDmode && mode1 == VOIDmode)
9001 type = opaque_ftype_opaque;
9002 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9003 type = v4sf_ftype_v4sf;
9004 else if (mode0 == V8HImode && mode1 == V16QImode)
9005 type = v8hi_ftype_v16qi;
9006 else if (mode0 == V4SImode && mode1 == V8HImode)
9007 type = v4si_ftype_v8hi;
9008 else if (mode0 == V2SImode && mode1 == V2SImode)
9009 type = v2si_ftype_v2si;
9010 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9011 type = v2sf_ftype_v2sf;
9012 else if (mode0 == V2SFmode && mode1 == V2SImode)
9013 type = v2sf_ftype_v2si;
9014 else if (mode0 == V2SImode && mode1 == V2SFmode)
9015 type = v2si_ftype_v2sf;
9016 else if (mode0 == V2SImode && mode1 == QImode)
9017 type = v2si_ftype_char;
9021 def_builtin (d->mask, d->name, type, d->code);
9026 rs6000_init_libfuncs (void)
9028 if (!TARGET_HARD_FLOAT)
9031 if (DEFAULT_ABI != ABI_V4)
9033 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
9035 /* AIX library routines for float->int conversion. */
9036 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9037 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9038 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9039 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9042 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9043 if (!TARGET_XL_COMPAT)
9045 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9046 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9047 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9048 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9052 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9053 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9054 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9055 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9060 /* 32-bit SVR4 quad floating point routines. */
9062 set_optab_libfunc (add_optab, TFmode, "_q_add");
9063 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9064 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9065 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9066 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9067 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9068 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9070 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9071 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9072 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9073 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9074 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9075 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9077 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9078 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9079 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9080 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9081 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9082 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9083 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9084 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
9089 /* Expand a block clear operation, and return 1 if successful. Return 0
9090 if we should let the compiler generate normal code.
9092 operands[0] is the destination
9093 operands[1] is the length
9094 operands[3] is the alignment */
9097 expand_block_clear (rtx operands[])
9099 rtx orig_dest = operands[0];
9100 rtx bytes_rtx = operands[1];
9101 rtx align_rtx = operands[3];
9102 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9103 HOST_WIDE_INT align;
9104 HOST_WIDE_INT bytes;
9109 /* If this is not a fixed size move, just call memcpy */
9113 /* This must be a fixed size alignment */
9114 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9115 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9117 /* Anything to clear? */
9118 bytes = INTVAL (bytes_rtx);
9122 /* Use the builtin memset after a point, to avoid huge code bloat.
9123 When optimize_size, avoid any significant code bloat; calling
9124 memset is about 4 instructions, so allow for one instruction to
9125 load zero and three to do clearing. */
9126 if (TARGET_ALTIVEC && align >= 128)
9128 else if (TARGET_POWERPC64 && align >= 32)
9133 if (optimize_size && bytes > 3 * clear_step)
9135 if (! optimize_size && bytes > 8 * clear_step)
9138 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9140 enum machine_mode mode = BLKmode;
9143 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9148 else if (bytes >= 8 && TARGET_POWERPC64
9149 /* 64-bit loads and stores require word-aligned
9151 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9156 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9157 { /* move 4 bytes */
9161 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9162 { /* move 2 bytes */
9166 else /* move 1 byte at a time */
9172 dest = adjust_address (orig_dest, mode, offset);
9174 emit_move_insn (dest, CONST0_RTX (mode));
9181 /* Expand a block move operation, and return 1 if successful. Return 0
9182 if we should let the compiler generate normal code.
9184 operands[0] is the destination
9185 operands[1] is the source
9186 operands[2] is the length
9187 operands[3] is the alignment */
9189 #define MAX_MOVE_REG 4
9192 expand_block_move (rtx operands[])
9194 rtx orig_dest = operands[0];
9195 rtx orig_src = operands[1];
9196 rtx bytes_rtx = operands[2];
9197 rtx align_rtx = operands[3];
9198 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
9203 rtx stores[MAX_MOVE_REG];
9206 /* If this is not a fixed size move, just call memcpy */
9210 /* This must be a fixed size alignment */
9211 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9212 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9214 /* Anything to move? */
9215 bytes = INTVAL (bytes_rtx);
9219 /* store_one_arg depends on expand_block_move to handle at least the size of
9220 reg_parm_stack_space. */
9221 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9224 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9227 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9228 rtx (*mov) (rtx, rtx);
9230 enum machine_mode mode = BLKmode;
9233 /* Altivec first, since it will be faster than a string move
9234 when it applies, and usually not significantly larger. */
9235 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9239 gen_func.mov = gen_movv4si;
9241 else if (TARGET_STRING
9242 && bytes > 24 /* move up to 32 bytes at a time */
9250 && ! fixed_regs[12])
9252 move_bytes = (bytes > 32) ? 32 : bytes;
9253 gen_func.movmemsi = gen_movmemsi_8reg;
9255 else if (TARGET_STRING
9256 && bytes > 16 /* move up to 24 bytes at a time */
9262 && ! fixed_regs[10])
9264 move_bytes = (bytes > 24) ? 24 : bytes;
9265 gen_func.movmemsi = gen_movmemsi_6reg;
9267 else if (TARGET_STRING
9268 && bytes > 8 /* move up to 16 bytes at a time */
9274 move_bytes = (bytes > 16) ? 16 : bytes;
9275 gen_func.movmemsi = gen_movmemsi_4reg;
9277 else if (bytes >= 8 && TARGET_POWERPC64
9278 /* 64-bit loads and stores require word-aligned
9280 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9284 gen_func.mov = gen_movdi;
9286 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9287 { /* move up to 8 bytes at a time */
9288 move_bytes = (bytes > 8) ? 8 : bytes;
9289 gen_func.movmemsi = gen_movmemsi_2reg;
9291 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9292 { /* move 4 bytes */
9295 gen_func.mov = gen_movsi;
9297 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9298 { /* move 2 bytes */
9301 gen_func.mov = gen_movhi;
9303 else if (TARGET_STRING && bytes > 1)
9304 { /* move up to 4 bytes at a time */
9305 move_bytes = (bytes > 4) ? 4 : bytes;
9306 gen_func.movmemsi = gen_movmemsi_1reg;
9308 else /* move 1 byte at a time */
9312 gen_func.mov = gen_movqi;
9315 src = adjust_address (orig_src, mode, offset);
9316 dest = adjust_address (orig_dest, mode, offset);
9318 if (mode != BLKmode)
9320 rtx tmp_reg = gen_reg_rtx (mode);
9322 emit_insn ((*gen_func.mov) (tmp_reg, src));
9323 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9326 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9329 for (i = 0; i < num_reg; i++)
9330 emit_insn (stores[i]);
9334 if (mode == BLKmode)
9336 /* Move the address into scratch registers. The movmemsi
9337 patterns require zero offset. */
9338 if (!REG_P (XEXP (src, 0)))
9340 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9341 src = replace_equiv_address (src, src_reg);
9343 set_mem_size (src, GEN_INT (move_bytes));
9345 if (!REG_P (XEXP (dest, 0)))
9347 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9348 dest = replace_equiv_address (dest, dest_reg);
9350 set_mem_size (dest, GEN_INT (move_bytes));
9352 emit_insn ((*gen_func.movmemsi) (dest, src,
9353 GEN_INT (move_bytes & 31),
9362 /* Return a string to perform a load_multiple operation.
9363 operands[0] is the vector.
9364 operands[1] is the source address.
9365 operands[2] is the first destination register. */
9368 rs6000_output_load_multiple (rtx operands[3])
9370 /* We have to handle the case where the pseudo used to contain the address
9371 is assigned to one of the output registers. */
9373 int words = XVECLEN (operands[0], 0);
9376 if (XVECLEN (operands[0], 0) == 1)
9377 return "{l|lwz} %2,0(%1)";
9379 for (i = 0; i < words; i++)
9380 if (refers_to_regno_p (REGNO (operands[2]) + i,
9381 REGNO (operands[2]) + i + 1, operands[1], 0))
9385 xop[0] = GEN_INT (4 * (words-1));
9386 xop[1] = operands[1];
9387 xop[2] = operands[2];
9388 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9393 xop[0] = GEN_INT (4 * (words-1));
9394 xop[1] = operands[1];
9395 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9396 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9401 for (j = 0; j < words; j++)
9404 xop[0] = GEN_INT (j * 4);
9405 xop[1] = operands[1];
9406 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9407 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9409 xop[0] = GEN_INT (i * 4);
9410 xop[1] = operands[1];
9411 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9416 return "{lsi|lswi} %2,%1,%N0";
9420 /* A validation routine: say whether CODE, a condition code, and MODE
9421 match. The other alternatives either don't make sense or should
9422 never be generated. */
9425 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9427 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9428 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9429 && GET_MODE_CLASS (mode) == MODE_CC);
9431 /* These don't make sense. */
9432 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9433 || mode != CCUNSmode);
9435 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9436 || mode == CCUNSmode);
9438 gcc_assert (mode == CCFPmode
9439 || (code != ORDERED && code != UNORDERED
9440 && code != UNEQ && code != LTGT
9441 && code != UNGT && code != UNLT
9442 && code != UNGE && code != UNLE));
9444 /* These should never be generated except for
9445 flag_finite_math_only. */
9446 gcc_assert (mode != CCFPmode
9447 || flag_finite_math_only
9448 || (code != LE && code != GE
9449 && code != UNEQ && code != LTGT
9450 && code != UNGT && code != UNLT));
9452 /* These are invalid; the information is not there. */
9453 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9457 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9458 mask required to convert the result of a rotate insn into a shift
9459 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9462 includes_lshift_p (rtx shiftop, rtx andop)
9464 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9466 shift_mask <<= INTVAL (shiftop);
9468 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9471 /* Similar, but for right shift. */
9474 includes_rshift_p (rtx shiftop, rtx andop)
9476 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9478 shift_mask >>= INTVAL (shiftop);
9480 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9483 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9484 to perform a left shift. It must have exactly SHIFTOP least
9485 significant 0's, then one or more 1's, then zero or more 0's. */
9488 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9490 if (GET_CODE (andop) == CONST_INT)
9492 HOST_WIDE_INT c, lsb, shift_mask;
9495 if (c == 0 || c == ~0)
9499 shift_mask <<= INTVAL (shiftop);
9501 /* Find the least significant one bit. */
9504 /* It must coincide with the LSB of the shift mask. */
9505 if (-lsb != shift_mask)
9508 /* Invert to look for the next transition (if any). */
9511 /* Remove the low group of ones (originally low group of zeros). */
9514 /* Again find the lsb, and check we have all 1's above. */
9518 else if (GET_CODE (andop) == CONST_DOUBLE
9519 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9521 HOST_WIDE_INT low, high, lsb;
9522 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9524 low = CONST_DOUBLE_LOW (andop);
9525 if (HOST_BITS_PER_WIDE_INT < 64)
9526 high = CONST_DOUBLE_HIGH (andop);
9528 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9529 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9532 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9534 shift_mask_high = ~0;
9535 if (INTVAL (shiftop) > 32)
9536 shift_mask_high <<= INTVAL (shiftop) - 32;
9540 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9547 return high == -lsb;
9550 shift_mask_low = ~0;
9551 shift_mask_low <<= INTVAL (shiftop);
9555 if (-lsb != shift_mask_low)
9558 if (HOST_BITS_PER_WIDE_INT < 64)
9563 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9566 return high == -lsb;
9570 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9576 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9577 to perform a left shift. It must have SHIFTOP or more least
9578 significant 0's, with the remainder of the word 1's. */
9581 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9583 if (GET_CODE (andop) == CONST_INT)
9585 HOST_WIDE_INT c, lsb, shift_mask;
9588 shift_mask <<= INTVAL (shiftop);
9591 /* Find the least significant one bit. */
9594 /* It must be covered by the shift mask.
9595 This test also rejects c == 0. */
9596 if ((lsb & shift_mask) == 0)
9599 /* Check we have all 1's above the transition, and reject all 1's. */
9600 return c == -lsb && lsb != 1;
9602 else if (GET_CODE (andop) == CONST_DOUBLE
9603 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9605 HOST_WIDE_INT low, lsb, shift_mask_low;
9607 low = CONST_DOUBLE_LOW (andop);
9609 if (HOST_BITS_PER_WIDE_INT < 64)
9611 HOST_WIDE_INT high, shift_mask_high;
9613 high = CONST_DOUBLE_HIGH (andop);
9617 shift_mask_high = ~0;
9618 if (INTVAL (shiftop) > 32)
9619 shift_mask_high <<= INTVAL (shiftop) - 32;
9623 if ((lsb & shift_mask_high) == 0)
9626 return high == -lsb;
9632 shift_mask_low = ~0;
9633 shift_mask_low <<= INTVAL (shiftop);
9637 if ((lsb & shift_mask_low) == 0)
9640 return low == -lsb && lsb != 1;
9646 /* Return 1 if operands will generate a valid arguments to rlwimi
9647 instruction for insert with right shift in 64-bit mode. The mask may
9648 not start on the first bit or stop on the last bit because wrap-around
9649 effects of instruction do not correspond to semantics of RTL insn. */
9652 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9654 if (INTVAL (startop) < 64
9655 && INTVAL (startop) > 32
9656 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9657 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9658 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9659 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9660 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9666 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9667 for lfq and stfq insns iff the registers are hard registers. */
9670 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9672 /* We might have been passed a SUBREG. */
9673 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9676 /* We might have been passed non floating point registers. */
9677 if (!FP_REGNO_P (REGNO (reg1))
9678 || !FP_REGNO_P (REGNO (reg2)))
9681 return (REGNO (reg1) == REGNO (reg2) - 1);
9684 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9685 addr1 and addr2 must be in consecutive memory locations
9686 (addr2 == addr1 + 8). */
9689 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9692 unsigned int reg1, reg2;
9693 int offset1, offset2;
9695 /* The mems cannot be volatile. */
9696 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9699 addr1 = XEXP (mem1, 0);
9700 addr2 = XEXP (mem2, 0);
9702 /* Extract an offset (if used) from the first addr. */
9703 if (GET_CODE (addr1) == PLUS)
9705 /* If not a REG, return zero. */
9706 if (GET_CODE (XEXP (addr1, 0)) != REG)
9710 reg1 = REGNO (XEXP (addr1, 0));
9711 /* The offset must be constant! */
9712 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9714 offset1 = INTVAL (XEXP (addr1, 1));
9717 else if (GET_CODE (addr1) != REG)
9721 reg1 = REGNO (addr1);
9722 /* This was a simple (mem (reg)) expression. Offset is 0. */
9726 /* And now for the second addr. */
9727 if (GET_CODE (addr2) == PLUS)
9729 /* If not a REG, return zero. */
9730 if (GET_CODE (XEXP (addr2, 0)) != REG)
9734 reg2 = REGNO (XEXP (addr2, 0));
9735 /* The offset must be constant. */
9736 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9738 offset2 = INTVAL (XEXP (addr2, 1));
9741 else if (GET_CODE (addr2) != REG)
9745 reg2 = REGNO (addr2);
9746 /* This was a simple (mem (reg)) expression. Offset is 0. */
9750 /* Both of these must have the same base register. */
9754 /* The offset for the second addr must be 8 more than the first addr. */
9755 if (offset2 != offset1 + 8)
9758 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9763 /* Return the register class of a scratch register needed to copy IN into
9764 or out of a register in CLASS in MODE. If it can be done directly,
9765 NO_REGS is returned. */
9768 rs6000_secondary_reload_class (enum reg_class class,
9769 enum machine_mode mode ATTRIBUTE_UNUSED,
9774 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9776 && MACHOPIC_INDIRECT
9780 /* We cannot copy a symbolic operand directly into anything
9781 other than BASE_REGS for TARGET_ELF. So indicate that a
9782 register from BASE_REGS is needed as an intermediate
9785 On Darwin, pic addresses require a load from memory, which
9786 needs a base register. */
9787 if (class != BASE_REGS
9788 && (GET_CODE (in) == SYMBOL_REF
9789 || GET_CODE (in) == HIGH
9790 || GET_CODE (in) == LABEL_REF
9791 || GET_CODE (in) == CONST))
9795 if (GET_CODE (in) == REG)
9798 if (regno >= FIRST_PSEUDO_REGISTER)
9800 regno = true_regnum (in);
9801 if (regno >= FIRST_PSEUDO_REGISTER)
9805 else if (GET_CODE (in) == SUBREG)
9807 regno = true_regnum (in);
9808 if (regno >= FIRST_PSEUDO_REGISTER)
9814 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9816 if (class == GENERAL_REGS || class == BASE_REGS
9817 || (regno >= 0 && INT_REGNO_P (regno)))
9820 /* Constants, memory, and FP registers can go into FP registers. */
9821 if ((regno == -1 || FP_REGNO_P (regno))
9822 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9825 /* Memory, and AltiVec registers can go into AltiVec registers. */
9826 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9827 && class == ALTIVEC_REGS)
9830 /* We can copy among the CR registers. */
9831 if ((class == CR_REGS || class == CR0_REGS)
9832 && regno >= 0 && CR_REGNO_P (regno))
9835 /* Otherwise, we need GENERAL_REGS. */
9836 return GENERAL_REGS;
9839 /* Given a comparison operation, return the bit number in CCR to test. We
9840 know this is a valid comparison.
9842 SCC_P is 1 if this is for an scc. That means that %D will have been
9843 used instead of %C, so the bits will be in different places.
9845 Return -1 if OP isn't a valid comparison for some reason. */
9848 ccr_bit (rtx op, int scc_p)
9850 enum rtx_code code = GET_CODE (op);
9851 enum machine_mode cc_mode;
9856 if (!COMPARISON_P (op))
9861 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9863 cc_mode = GET_MODE (reg);
9864 cc_regnum = REGNO (reg);
9865 base_bit = 4 * (cc_regnum - CR0_REGNO);
9867 validate_condition_mode (code, cc_mode);
9869 /* When generating a sCOND operation, only positive conditions are
9872 || code == EQ || code == GT || code == LT || code == UNORDERED
9873 || code == GTU || code == LTU);
9878 return scc_p ? base_bit + 3 : base_bit + 2;
9880 return base_bit + 2;
9881 case GT: case GTU: case UNLE:
9882 return base_bit + 1;
9883 case LT: case LTU: case UNGE:
9885 case ORDERED: case UNORDERED:
9886 return base_bit + 3;
9889 /* If scc, we will have done a cror to put the bit in the
9890 unordered position. So test that bit. For integer, this is ! LT
9891 unless this is an scc insn. */
9892 return scc_p ? base_bit + 3 : base_bit;
9895 return scc_p ? base_bit + 3 : base_bit + 1;
9902 /* Return the GOT register. */
9905 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9907 /* The second flow pass currently (June 1999) can't update
9908 regs_ever_live without disturbing other parts of the compiler, so
9909 update it here to make the prolog/epilogue code happy. */
9910 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9911 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9913 current_function_uses_pic_offset_table = 1;
9915 return pic_offset_table_rtx;
9918 /* Function to init struct machine_function.
9919 This will be called, via a pointer variable,
9920 from push_function_context. */
9922 static struct machine_function *
9923 rs6000_init_machine_status (void)
9925 return ggc_alloc_cleared (sizeof (machine_function));
9928 /* These macros test for integers and extract the low-order bits. */
9930 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9931 && GET_MODE (X) == VOIDmode)
9933 #define INT_LOWPART(X) \
9934 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9940 unsigned long val = INT_LOWPART (op);
9942 /* If the high bit is zero, the value is the first 1 bit we find
9944 if ((val & 0x80000000) == 0)
9946 gcc_assert (val & 0xffffffff);
9949 while (((val <<= 1) & 0x80000000) == 0)
9954 /* If the high bit is set and the low bit is not, or the mask is all
9955 1's, the value is zero. */
9956 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9959 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9962 while (((val >>= 1) & 1) != 0)
9972 unsigned long val = INT_LOWPART (op);
9974 /* If the low bit is zero, the value is the first 1 bit we find from
9978 gcc_assert (val & 0xffffffff);
9981 while (((val >>= 1) & 1) == 0)
9987 /* If the low bit is set and the high bit is not, or the mask is all
9988 1's, the value is 31. */
9989 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9992 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9995 while (((val <<= 1) & 0x80000000) != 0)
10001 /* Locate some local-dynamic symbol still in use by this function
10002 so that we can print its name in some tls_ld pattern. */
10004 static const char *
10005 rs6000_get_some_local_dynamic_name (void)
10009 if (cfun->machine->some_ld_name)
10010 return cfun->machine->some_ld_name;
10012 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10014 && for_each_rtx (&PATTERN (insn),
10015 rs6000_get_some_local_dynamic_name_1, 0))
10016 return cfun->machine->some_ld_name;
10018 gcc_unreachable ();
10021 /* Helper function for rs6000_get_some_local_dynamic_name. */
10024 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10028 if (GET_CODE (x) == SYMBOL_REF)
10030 const char *str = XSTR (x, 0);
10031 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10033 cfun->machine->some_ld_name = str;
10041 /* Write out a function code label. */
10044 rs6000_output_function_entry (FILE *file, const char *fname)
10046 if (fname[0] != '.')
10048 switch (DEFAULT_ABI)
10051 gcc_unreachable ();
10057 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10066 RS6000_OUTPUT_BASENAME (file, fname);
10068 assemble_name (file, fname);
10071 /* Print an operand. Recognize special options, documented below. */
10074 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10075 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10077 #define SMALL_DATA_RELOC "sda21"
10078 #define SMALL_DATA_REG 0
10082 print_operand (FILE *file, rtx x, int code)
10086 unsigned HOST_WIDE_INT uval;
10091 /* Write out an instruction after the call which may be replaced
10092 with glue code by the loader. This depends on the AIX version. */
10093 asm_fprintf (file, RS6000_CALL_GLUE);
10096 /* %a is output_address. */
10099 /* If X is a constant integer whose low-order 5 bits are zero,
10100 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10101 in the AIX assembler where "sri" with a zero shift count
10102 writes a trash instruction. */
10103 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10110 /* If constant, low-order 16 bits of constant, unsigned.
10111 Otherwise, write normally. */
10113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10115 print_operand (file, x, 0);
10119 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10120 for 64-bit mask direction. */
10121 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10124 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10128 /* X is a CR register. Print the number of the GT bit of the CR. */
10129 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10130 output_operand_lossage ("invalid %%E value");
10132 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10136 /* Like 'J' but get to the EQ bit. */
10137 gcc_assert (GET_CODE (x) == REG);
10139 /* Bit 1 is EQ bit. */
10140 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
10142 fprintf (file, "%d", i);
10146 /* X is a CR register. Print the number of the EQ bit of the CR */
10147 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10148 output_operand_lossage ("invalid %%E value");
10150 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10154 /* X is a CR register. Print the shift count needed to move it
10155 to the high-order four bits. */
10156 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10157 output_operand_lossage ("invalid %%f value");
10159 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10163 /* Similar, but print the count for the rotate in the opposite
10165 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10166 output_operand_lossage ("invalid %%F value");
10168 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10172 /* X is a constant integer. If it is negative, print "m",
10173 otherwise print "z". This is to make an aze or ame insn. */
10174 if (GET_CODE (x) != CONST_INT)
10175 output_operand_lossage ("invalid %%G value");
10176 else if (INTVAL (x) >= 0)
10183 /* If constant, output low-order five bits. Otherwise, write
10186 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10188 print_operand (file, x, 0);
10192 /* If constant, output low-order six bits. Otherwise, write
10195 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10197 print_operand (file, x, 0);
10201 /* Print `i' if this is a constant, else nothing. */
10207 /* Write the bit number in CCR for jump. */
10208 i = ccr_bit (x, 0);
10210 output_operand_lossage ("invalid %%j code");
10212 fprintf (file, "%d", i);
10216 /* Similar, but add one for shift count in rlinm for scc and pass
10217 scc flag to `ccr_bit'. */
10218 i = ccr_bit (x, 1);
10220 output_operand_lossage ("invalid %%J code");
10222 /* If we want bit 31, write a shift count of zero, not 32. */
10223 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10227 /* X must be a constant. Write the 1's complement of the
10230 output_operand_lossage ("invalid %%k value");
10232 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10236 /* X must be a symbolic constant on ELF. Write an
10237 expression suitable for an 'addi' that adds in the low 16
10238 bits of the MEM. */
10239 if (GET_CODE (x) != CONST)
10241 print_operand_address (file, x);
10242 fputs ("@l", file);
10246 if (GET_CODE (XEXP (x, 0)) != PLUS
10247 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10248 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10249 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10250 output_operand_lossage ("invalid %%K value");
10251 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10252 fputs ("@l", file);
10253 /* For GNU as, there must be a non-alphanumeric character
10254 between 'l' and the number. The '-' is added by
10255 print_operand() already. */
10256 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10258 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10262 /* %l is output_asm_label. */
10265 /* Write second word of DImode or DFmode reference. Works on register
10266 or non-indexed memory only. */
10267 if (GET_CODE (x) == REG)
10268 fputs (reg_names[REGNO (x) + 1], file);
10269 else if (GET_CODE (x) == MEM)
10271 /* Handle possible auto-increment. Since it is pre-increment and
10272 we have already done it, we can just use an offset of word. */
10273 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10274 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10275 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10278 output_address (XEXP (adjust_address_nv (x, SImode,
10282 if (small_data_operand (x, GET_MODE (x)))
10283 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10284 reg_names[SMALL_DATA_REG]);
10289 /* MB value for a mask operand. */
10290 if (! mask_operand (x, SImode))
10291 output_operand_lossage ("invalid %%m value");
10293 fprintf (file, "%d", extract_MB (x));
10297 /* ME value for a mask operand. */
10298 if (! mask_operand (x, SImode))
10299 output_operand_lossage ("invalid %%M value");
10301 fprintf (file, "%d", extract_ME (x));
10304 /* %n outputs the negative of its operand. */
10307 /* Write the number of elements in the vector times 4. */
10308 if (GET_CODE (x) != PARALLEL)
10309 output_operand_lossage ("invalid %%N value");
10311 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10315 /* Similar, but subtract 1 first. */
10316 if (GET_CODE (x) != PARALLEL)
10317 output_operand_lossage ("invalid %%O value");
10319 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10323 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10325 || INT_LOWPART (x) < 0
10326 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10327 output_operand_lossage ("invalid %%p value");
10329 fprintf (file, "%d", i);
10333 /* The operand must be an indirect memory reference. The result
10334 is the register name. */
10335 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10336 || REGNO (XEXP (x, 0)) >= 32)
10337 output_operand_lossage ("invalid %%P value");
10339 fputs (reg_names[REGNO (XEXP (x, 0))], file);
10343 /* This outputs the logical code corresponding to a boolean
10344 expression. The expression may have one or both operands
10345 negated (if one, only the first one). For condition register
10346 logical operations, it will also treat the negated
10347 CR codes as NOTs, but not handle NOTs of them. */
10349 const char *const *t = 0;
10351 enum rtx_code code = GET_CODE (x);
10352 static const char * const tbl[3][3] = {
10353 { "and", "andc", "nor" },
10354 { "or", "orc", "nand" },
10355 { "xor", "eqv", "xor" } };
10359 else if (code == IOR)
10361 else if (code == XOR)
10364 output_operand_lossage ("invalid %%q value");
10366 if (GET_CODE (XEXP (x, 0)) != NOT)
10370 if (GET_CODE (XEXP (x, 1)) == NOT)
10388 /* X is a CR register. Print the mask for `mtcrf'. */
10389 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10390 output_operand_lossage ("invalid %%R value");
10392 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10396 /* Low 5 bits of 32 - value */
10398 output_operand_lossage ("invalid %%s value");
10400 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10404 /* PowerPC64 mask position. All 0's is excluded.
10405 CONST_INT 32-bit mask is considered sign-extended so any
10406 transition must occur within the CONST_INT, not on the boundary. */
10407 if (! mask64_operand (x, DImode))
10408 output_operand_lossage ("invalid %%S value");
10410 uval = INT_LOWPART (x);
10412 if (uval & 1) /* Clear Left */
10414 #if HOST_BITS_PER_WIDE_INT > 64
10415 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10419 else /* Clear Right */
10422 #if HOST_BITS_PER_WIDE_INT > 64
10423 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10429 gcc_assert (i >= 0);
10430 fprintf (file, "%d", i);
10434 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10435 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10437 /* Bit 3 is OV bit. */
10438 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10440 /* If we want bit 31, write a shift count of zero, not 32. */
10441 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10445 /* Print the symbolic name of a branch target register. */
10446 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10447 && REGNO (x) != COUNT_REGISTER_REGNUM))
10448 output_operand_lossage ("invalid %%T value");
10449 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10450 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10452 fputs ("ctr", file);
10456 /* High-order 16 bits of constant for use in unsigned operand. */
10458 output_operand_lossage ("invalid %%u value");
10460 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10461 (INT_LOWPART (x) >> 16) & 0xffff);
10465 /* High-order 16 bits of constant for use in signed operand. */
10467 output_operand_lossage ("invalid %%v value");
10469 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10470 (INT_LOWPART (x) >> 16) & 0xffff);
10474 /* Print `u' if this has an auto-increment or auto-decrement. */
10475 if (GET_CODE (x) == MEM
10476 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10477 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10482 /* Print the trap code for this operand. */
10483 switch (GET_CODE (x))
10486 fputs ("eq", file); /* 4 */
10489 fputs ("ne", file); /* 24 */
10492 fputs ("lt", file); /* 16 */
10495 fputs ("le", file); /* 20 */
10498 fputs ("gt", file); /* 8 */
10501 fputs ("ge", file); /* 12 */
10504 fputs ("llt", file); /* 2 */
10507 fputs ("lle", file); /* 6 */
10510 fputs ("lgt", file); /* 1 */
10513 fputs ("lge", file); /* 5 */
10516 gcc_unreachable ();
10521 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10524 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10525 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10527 print_operand (file, x, 0);
10531 /* MB value for a PowerPC64 rldic operand. */
10532 val = (GET_CODE (x) == CONST_INT
10533 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10538 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10539 if ((val <<= 1) < 0)
10542 #if HOST_BITS_PER_WIDE_INT == 32
10543 if (GET_CODE (x) == CONST_INT && i >= 0)
10544 i += 32; /* zero-extend high-part was all 0's */
10545 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10547 val = CONST_DOUBLE_LOW (x);
10553 for ( ; i < 64; i++)
10554 if ((val <<= 1) < 0)
10559 fprintf (file, "%d", i + 1);
10563 if (GET_CODE (x) == MEM
10564 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10569 /* Like 'L', for third word of TImode */
10570 if (GET_CODE (x) == REG)
10571 fputs (reg_names[REGNO (x) + 2], file);
10572 else if (GET_CODE (x) == MEM)
10574 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10575 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10576 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10578 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10579 if (small_data_operand (x, GET_MODE (x)))
10580 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10581 reg_names[SMALL_DATA_REG]);
10586 /* X is a SYMBOL_REF. Write out the name preceded by a
10587 period and without any trailing data in brackets. Used for function
10588 names. If we are configured for System V (or the embedded ABI) on
10589 the PowerPC, do not emit the period, since those systems do not use
10590 TOCs and the like. */
10591 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10593 /* Mark the decl as referenced so that cgraph will output the
10595 if (SYMBOL_REF_DECL (x))
10596 mark_decl_referenced (SYMBOL_REF_DECL (x));
10598 /* For macho, check to see if we need a stub. */
10601 const char *name = XSTR (x, 0);
10603 if (MACHOPIC_INDIRECT
10604 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10605 name = machopic_indirection_name (x, /*stub_p=*/true);
10607 assemble_name (file, name);
10609 else if (!DOT_SYMBOLS)
10610 assemble_name (file, XSTR (x, 0));
10612 rs6000_output_function_entry (file, XSTR (x, 0));
10616 /* Like 'L', for last word of TImode. */
10617 if (GET_CODE (x) == REG)
10618 fputs (reg_names[REGNO (x) + 3], file);
10619 else if (GET_CODE (x) == MEM)
10621 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10622 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10623 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10625 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10626 if (small_data_operand (x, GET_MODE (x)))
10627 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10628 reg_names[SMALL_DATA_REG]);
10632 /* Print AltiVec or SPE memory operand. */
10637 gcc_assert (GET_CODE (x) == MEM);
10643 /* Handle [reg]. */
10644 if (GET_CODE (tmp) == REG)
10646 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10649 /* Handle [reg+UIMM]. */
10650 else if (GET_CODE (tmp) == PLUS &&
10651 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10655 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10657 x = INTVAL (XEXP (tmp, 1));
10658 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10662 /* Fall through. Must be [reg+reg]. */
10665 && GET_CODE (tmp) == AND
10666 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10667 && INTVAL (XEXP (tmp, 1)) == -16)
10668 tmp = XEXP (tmp, 0);
10669 if (GET_CODE (tmp) == REG)
10670 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10673 gcc_assert (GET_CODE (tmp) == PLUS
10674 && GET_CODE (XEXP (tmp, 1)) == REG);
10676 if (REGNO (XEXP (tmp, 0)) == 0)
10677 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10678 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10680 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10681 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10687 if (GET_CODE (x) == REG)
10688 fprintf (file, "%s", reg_names[REGNO (x)]);
10689 else if (GET_CODE (x) == MEM)
10691 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10692 know the width from the mode. */
10693 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10694 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10695 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10696 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10697 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10698 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10700 output_address (XEXP (x, 0));
10703 output_addr_const (file, x);
10707 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10711 output_operand_lossage ("invalid %%xn code");
10715 /* Print the address of an operand. */
10718 print_operand_address (FILE *file, rtx x)
10720 if (GET_CODE (x) == REG)
10721 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10722 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10723 || GET_CODE (x) == LABEL_REF)
10725 output_addr_const (file, x);
10726 if (small_data_operand (x, GET_MODE (x)))
10727 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10728 reg_names[SMALL_DATA_REG]);
10730 gcc_assert (!TARGET_TOC);
10732 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10734 if (REGNO (XEXP (x, 0)) == 0)
10735 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10736 reg_names[ REGNO (XEXP (x, 0)) ]);
10738 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10739 reg_names[ REGNO (XEXP (x, 1)) ]);
10741 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10742 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10743 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10745 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10746 && CONSTANT_P (XEXP (x, 1)))
10748 output_addr_const (file, XEXP (x, 1));
10749 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10753 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10754 && CONSTANT_P (XEXP (x, 1)))
10756 fprintf (file, "lo16(");
10757 output_addr_const (file, XEXP (x, 1));
10758 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10761 else if (legitimate_constant_pool_address_p (x))
10763 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10765 rtx contains_minus = XEXP (x, 1);
10769 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10770 turn it into (sym) for output_addr_const. */
10771 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10772 contains_minus = XEXP (contains_minus, 0);
10774 minus = XEXP (contains_minus, 0);
10775 symref = XEXP (minus, 0);
10776 XEXP (contains_minus, 0) = symref;
10781 name = XSTR (symref, 0);
10782 newname = alloca (strlen (name) + sizeof ("@toc"));
10783 strcpy (newname, name);
10784 strcat (newname, "@toc");
10785 XSTR (symref, 0) = newname;
10787 output_addr_const (file, XEXP (x, 1));
10789 XSTR (symref, 0) = name;
10790 XEXP (contains_minus, 0) = minus;
10793 output_addr_const (file, XEXP (x, 1));
10795 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10798 gcc_unreachable ();
10801 /* Target hook for assembling integer objects. The PowerPC version has
10802 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10803 is defined. It also needs to handle DI-mode objects on 64-bit
10807 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10809 #ifdef RELOCATABLE_NEEDS_FIXUP
10810 /* Special handling for SI values. */
10811 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
10813 extern int in_toc_section (void);
10814 static int recurse = 0;
10816 /* For -mrelocatable, we mark all addresses that need to be fixed up
10817 in the .fixup section. */
10818 if (TARGET_RELOCATABLE
10819 && !in_toc_section ()
10820 && !in_text_section ()
10821 && !in_unlikely_text_section ()
10823 && GET_CODE (x) != CONST_INT
10824 && GET_CODE (x) != CONST_DOUBLE
10830 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10832 ASM_OUTPUT_LABEL (asm_out_file, buf);
10833 fprintf (asm_out_file, "\t.long\t(");
10834 output_addr_const (asm_out_file, x);
10835 fprintf (asm_out_file, ")@fixup\n");
10836 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10837 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10838 fprintf (asm_out_file, "\t.long\t");
10839 assemble_name (asm_out_file, buf);
10840 fprintf (asm_out_file, "\n\t.previous\n");
10844 /* Remove initial .'s to turn a -mcall-aixdesc function
10845 address into the address of the descriptor, not the function
10847 else if (GET_CODE (x) == SYMBOL_REF
10848 && XSTR (x, 0)[0] == '.'
10849 && DEFAULT_ABI == ABI_AIX)
10851 const char *name = XSTR (x, 0);
10852 while (*name == '.')
10855 fprintf (asm_out_file, "\t.long\t%s\n", name);
10859 #endif /* RELOCATABLE_NEEDS_FIXUP */
10860 return default_assemble_integer (x, size, aligned_p);
10863 #ifdef HAVE_GAS_HIDDEN
10864 /* Emit an assembler directive to set symbol visibility for DECL to
10865 VISIBILITY_TYPE. */
10868 rs6000_assemble_visibility (tree decl, int vis)
10870 /* Functions need to have their entry point symbol visibility set as
10871 well as their descriptor symbol visibility. */
10872 if (DEFAULT_ABI == ABI_AIX
10874 && TREE_CODE (decl) == FUNCTION_DECL)
10876 static const char * const visibility_types[] = {
10877 NULL, "internal", "hidden", "protected"
10880 const char *name, *type;
10882 name = ((* targetm.strip_name_encoding)
10883 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10884 type = visibility_types[vis];
10886 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10887 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10890 default_assemble_visibility (decl, vis);
10895 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10897 /* Reversal of FP compares takes care -- an ordered compare
10898 becomes an unordered compare and vice versa. */
10899 if (mode == CCFPmode
10900 && (!flag_finite_math_only
10901 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10902 || code == UNEQ || code == LTGT))
10903 return reverse_condition_maybe_unordered (code);
10905 return reverse_condition (code);
10908 /* Generate a compare for CODE. Return a brand-new rtx that
10909 represents the result of the compare. */
10912 rs6000_generate_compare (enum rtx_code code)
10914 enum machine_mode comp_mode;
10915 rtx compare_result;
10917 if (rs6000_compare_fp_p)
10918 comp_mode = CCFPmode;
10919 else if (code == GTU || code == LTU
10920 || code == GEU || code == LEU)
10921 comp_mode = CCUNSmode;
10922 else if ((code == EQ || code == NE)
10923 && GET_CODE (rs6000_compare_op0) == SUBREG
10924 && GET_CODE (rs6000_compare_op1) == SUBREG
10925 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10926 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10927 /* These are unsigned values, perhaps there will be a later
10928 ordering compare that can be shared with this one.
10929 Unfortunately we cannot detect the signedness of the operands
10930 for non-subregs. */
10931 comp_mode = CCUNSmode;
10933 comp_mode = CCmode;
10935 /* First, the compare. */
10936 compare_result = gen_reg_rtx (comp_mode);
10938 /* SPE FP compare instructions on the GPRs. Yuck! */
10939 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10940 && rs6000_compare_fp_p)
10942 rtx cmp, or_result, compare_result2;
10943 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10945 if (op_mode == VOIDmode)
10946 op_mode = GET_MODE (rs6000_compare_op1);
10948 /* Note: The E500 comparison instructions set the GT bit (x +
10949 1), on success. This explains the mess. */
10953 case EQ: case UNEQ: case NE: case LTGT:
10957 cmp = flag_unsafe_math_optimizations
10958 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10959 rs6000_compare_op1)
10960 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10961 rs6000_compare_op1);
10965 cmp = flag_unsafe_math_optimizations
10966 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10967 rs6000_compare_op1)
10968 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10969 rs6000_compare_op1);
10973 gcc_unreachable ();
10977 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10981 cmp = flag_unsafe_math_optimizations
10982 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10983 rs6000_compare_op1)
10984 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10985 rs6000_compare_op1);
10989 cmp = flag_unsafe_math_optimizations
10990 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10991 rs6000_compare_op1)
10992 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10993 rs6000_compare_op1);
10997 gcc_unreachable ();
11001 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
11005 cmp = flag_unsafe_math_optimizations
11006 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11007 rs6000_compare_op1)
11008 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11009 rs6000_compare_op1);
11013 cmp = flag_unsafe_math_optimizations
11014 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11015 rs6000_compare_op1)
11016 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11017 rs6000_compare_op1);
11021 gcc_unreachable ();
11025 gcc_unreachable ();
11028 /* Synthesize LE and GE from LT/GT || EQ. */
11029 if (code == LE || code == GE || code == LEU || code == GEU)
11035 case LE: code = LT; break;
11036 case GE: code = GT; break;
11037 case LEU: code = LT; break;
11038 case GEU: code = GT; break;
11039 default: gcc_unreachable ();
11042 compare_result2 = gen_reg_rtx (CCFPmode);
11048 cmp = flag_unsafe_math_optimizations
11049 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11050 rs6000_compare_op1)
11051 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11052 rs6000_compare_op1);
11056 cmp = flag_unsafe_math_optimizations
11057 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11058 rs6000_compare_op1)
11059 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11060 rs6000_compare_op1);
11064 gcc_unreachable ();
11068 /* OR them together. */
11069 or_result = gen_reg_rtx (CCFPmode);
11070 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11072 compare_result = or_result;
11077 if (code == NE || code == LTGT)
11087 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11088 CLOBBERs to match cmptf_internal2 pattern. */
11089 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11090 && GET_MODE (rs6000_compare_op0) == TFmode
11091 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
11092 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11093 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11095 gen_rtx_SET (VOIDmode,
11097 gen_rtx_COMPARE (comp_mode,
11098 rs6000_compare_op0,
11099 rs6000_compare_op1)),
11100 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11101 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11102 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11103 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11104 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11105 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11106 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11107 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11108 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11109 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11111 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11112 comp_mode = CCEQmode;
11113 compare_result = gen_reg_rtx (CCEQmode);
11115 emit_insn (gen_stack_protect_testdi (compare_result,
11116 rs6000_compare_op0, op1));
11118 emit_insn (gen_stack_protect_testsi (compare_result,
11119 rs6000_compare_op0, op1));
11122 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11123 gen_rtx_COMPARE (comp_mode,
11124 rs6000_compare_op0,
11125 rs6000_compare_op1)));
11128 /* Some kinds of FP comparisons need an OR operation;
11129 under flag_finite_math_only we don't bother. */
11130 if (rs6000_compare_fp_p
11131 && !flag_finite_math_only
11132 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11133 && (code == LE || code == GE
11134 || code == UNEQ || code == LTGT
11135 || code == UNGT || code == UNLT))
11137 enum rtx_code or1, or2;
11138 rtx or1_rtx, or2_rtx, compare2_rtx;
11139 rtx or_result = gen_reg_rtx (CCEQmode);
11143 case LE: or1 = LT; or2 = EQ; break;
11144 case GE: or1 = GT; or2 = EQ; break;
11145 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11146 case LTGT: or1 = LT; or2 = GT; break;
11147 case UNGT: or1 = UNORDERED; or2 = GT; break;
11148 case UNLT: or1 = UNORDERED; or2 = LT; break;
11149 default: gcc_unreachable ();
11151 validate_condition_mode (or1, comp_mode);
11152 validate_condition_mode (or2, comp_mode);
11153 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11154 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11155 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11156 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11158 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11160 compare_result = or_result;
11164 validate_condition_mode (code, GET_MODE (compare_result));
11166 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11170 /* Emit the RTL for an sCOND pattern. */
11173 rs6000_emit_sCOND (enum rtx_code code, rtx result)
11176 enum machine_mode op_mode;
11177 enum rtx_code cond_code;
11179 condition_rtx = rs6000_generate_compare (code);
11180 cond_code = GET_CODE (condition_rtx);
11182 if (TARGET_E500 && rs6000_compare_fp_p
11183 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11187 PUT_MODE (condition_rtx, SImode);
11188 t = XEXP (condition_rtx, 0);
11190 gcc_assert (cond_code == NE || cond_code == EQ);
11192 if (cond_code == NE)
11193 emit_insn (gen_e500_flip_gt_bit (t, t));
11195 emit_insn (gen_move_from_CR_gt_bit (result, t));
11199 if (cond_code == NE
11200 || cond_code == GE || cond_code == LE
11201 || cond_code == GEU || cond_code == LEU
11202 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11204 rtx not_result = gen_reg_rtx (CCEQmode);
11205 rtx not_op, rev_cond_rtx;
11206 enum machine_mode cc_mode;
11208 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11210 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11211 SImode, XEXP (condition_rtx, 0), const0_rtx);
11212 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11213 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11214 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11217 op_mode = GET_MODE (rs6000_compare_op0);
11218 if (op_mode == VOIDmode)
11219 op_mode = GET_MODE (rs6000_compare_op1);
11221 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11223 PUT_MODE (condition_rtx, DImode);
11224 convert_move (result, condition_rtx, 0);
11228 PUT_MODE (condition_rtx, SImode);
11229 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11233 /* Emit a branch of kind CODE to location LOC. */
11236 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11238 rtx condition_rtx, loc_ref;
11240 condition_rtx = rs6000_generate_compare (code);
11241 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11242 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11243 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11244 loc_ref, pc_rtx)));
11247 /* Return the string to output a conditional branch to LABEL, which is
11248 the operand number of the label, or -1 if the branch is really a
11249 conditional return.
11251 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11252 condition code register and its mode specifies what kind of
11253 comparison we made.
11255 REVERSED is nonzero if we should reverse the sense of the comparison.
11257 INSN is the insn. */
11260 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11262 static char string[64];
11263 enum rtx_code code = GET_CODE (op);
11264 rtx cc_reg = XEXP (op, 0);
11265 enum machine_mode mode = GET_MODE (cc_reg);
11266 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11267 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11268 int really_reversed = reversed ^ need_longbranch;
11274 validate_condition_mode (code, mode);
11276 /* Work out which way this really branches. We could use
11277 reverse_condition_maybe_unordered here always but this
11278 makes the resulting assembler clearer. */
11279 if (really_reversed)
11281 /* Reversal of FP compares takes care -- an ordered compare
11282 becomes an unordered compare and vice versa. */
11283 if (mode == CCFPmode)
11284 code = reverse_condition_maybe_unordered (code);
11286 code = reverse_condition (code);
11289 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11291 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11296 /* Opposite of GT. */
11305 gcc_unreachable ();
11311 /* Not all of these are actually distinct opcodes, but
11312 we distinguish them for clarity of the resulting assembler. */
11313 case NE: case LTGT:
11314 ccode = "ne"; break;
11315 case EQ: case UNEQ:
11316 ccode = "eq"; break;
11318 ccode = "ge"; break;
11319 case GT: case GTU: case UNGT:
11320 ccode = "gt"; break;
11322 ccode = "le"; break;
11323 case LT: case LTU: case UNLT:
11324 ccode = "lt"; break;
11325 case UNORDERED: ccode = "un"; break;
11326 case ORDERED: ccode = "nu"; break;
11327 case UNGE: ccode = "nl"; break;
11328 case UNLE: ccode = "ng"; break;
11330 gcc_unreachable ();
11333 /* Maybe we have a guess as to how likely the branch is.
11334 The old mnemonics don't have a way to specify this information. */
11336 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11337 if (note != NULL_RTX)
11339 /* PROB is the difference from 50%. */
11340 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11342 /* Only hint for highly probable/improbable branches on newer
11343 cpus as static prediction overrides processor dynamic
11344 prediction. For older cpus we may as well always hint, but
11345 assume not taken for branches that are very close to 50% as a
11346 mispredicted taken branch is more expensive than a
11347 mispredicted not-taken branch. */
11348 if (rs6000_always_hint
11349 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11351 if (abs (prob) > REG_BR_PROB_BASE / 20
11352 && ((prob > 0) ^ need_longbranch))
11360 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11362 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11364 /* We need to escape any '%' characters in the reg_names string.
11365 Assume they'd only be the first character.... */
11366 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11368 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11372 /* If the branch distance was too far, we may have to use an
11373 unconditional branch to go the distance. */
11374 if (need_longbranch)
11375 s += sprintf (s, ",$+8\n\tb %s", label);
11377 s += sprintf (s, ",%s", label);
11383 /* Return the string to flip the GT bit on a CR. */
11385 output_e500_flip_gt_bit (rtx dst, rtx src)
11387 static char string[64];
11390 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11391 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11394 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11395 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11397 sprintf (string, "crnot %d,%d", a, b);
11401 /* Return insn index for the vector compare instruction for given CODE,
11402 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11406 get_vec_cmp_insn (enum rtx_code code,
11407 enum machine_mode dest_mode,
11408 enum machine_mode op_mode)
11410 if (!TARGET_ALTIVEC)
11411 return INSN_NOT_AVAILABLE;
11416 if (dest_mode == V16QImode && op_mode == V16QImode)
11417 return UNSPEC_VCMPEQUB;
11418 if (dest_mode == V8HImode && op_mode == V8HImode)
11419 return UNSPEC_VCMPEQUH;
11420 if (dest_mode == V4SImode && op_mode == V4SImode)
11421 return UNSPEC_VCMPEQUW;
11422 if (dest_mode == V4SImode && op_mode == V4SFmode)
11423 return UNSPEC_VCMPEQFP;
11426 if (dest_mode == V4SImode && op_mode == V4SFmode)
11427 return UNSPEC_VCMPGEFP;
11429 if (dest_mode == V16QImode && op_mode == V16QImode)
11430 return UNSPEC_VCMPGTSB;
11431 if (dest_mode == V8HImode && op_mode == V8HImode)
11432 return UNSPEC_VCMPGTSH;
11433 if (dest_mode == V4SImode && op_mode == V4SImode)
11434 return UNSPEC_VCMPGTSW;
11435 if (dest_mode == V4SImode && op_mode == V4SFmode)
11436 return UNSPEC_VCMPGTFP;
11439 if (dest_mode == V16QImode && op_mode == V16QImode)
11440 return UNSPEC_VCMPGTUB;
11441 if (dest_mode == V8HImode && op_mode == V8HImode)
11442 return UNSPEC_VCMPGTUH;
11443 if (dest_mode == V4SImode && op_mode == V4SImode)
11444 return UNSPEC_VCMPGTUW;
11449 return INSN_NOT_AVAILABLE;
11452 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11453 DMODE is expected destination mode. This is a recursive function. */
11456 rs6000_emit_vector_compare (enum rtx_code rcode,
11458 enum machine_mode dmode)
11462 enum machine_mode dest_mode;
11463 enum machine_mode op_mode = GET_MODE (op1);
11465 gcc_assert (TARGET_ALTIVEC);
11466 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11468 /* Floating point vector compare instructions uses destination V4SImode.
11469 Move destination to appropriate mode later. */
11470 if (dmode == V4SFmode)
11471 dest_mode = V4SImode;
11475 mask = gen_reg_rtx (dest_mode);
11476 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11478 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11480 bool swap_operands = false;
11481 bool try_again = false;
11486 swap_operands = true;
11491 swap_operands = true;
11495 /* Treat A != B as ~(A==B). */
11497 enum insn_code nor_code;
11498 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11501 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11502 gcc_assert (nor_code != CODE_FOR_nothing);
11503 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11505 if (dmode != dest_mode)
11507 rtx temp = gen_reg_rtx (dest_mode);
11508 convert_move (temp, mask, 0);
11518 /* Try GT/GTU/LT/LTU OR EQ */
11521 enum insn_code ior_code;
11522 enum rtx_code new_code;
11543 gcc_unreachable ();
11546 c_rtx = rs6000_emit_vector_compare (new_code,
11547 op0, op1, dest_mode);
11548 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11551 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11552 gcc_assert (ior_code != CODE_FOR_nothing);
11553 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11554 if (dmode != dest_mode)
11556 rtx temp = gen_reg_rtx (dest_mode);
11557 convert_move (temp, mask, 0);
11564 gcc_unreachable ();
11569 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11570 /* You only get two chances. */
11571 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11583 emit_insn (gen_rtx_SET (VOIDmode, mask,
11584 gen_rtx_UNSPEC (dest_mode,
11585 gen_rtvec (2, op0, op1),
11587 if (dmode != dest_mode)
11589 rtx temp = gen_reg_rtx (dest_mode);
11590 convert_move (temp, mask, 0);
11596 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11597 valid insn doesn exist for given mode. */
11600 get_vsel_insn (enum machine_mode mode)
11605 return UNSPEC_VSEL4SI;
11608 return UNSPEC_VSEL4SF;
11611 return UNSPEC_VSEL8HI;
11614 return UNSPEC_VSEL16QI;
11617 return INSN_NOT_AVAILABLE;
11620 return INSN_NOT_AVAILABLE;
11623 /* Emit vector select insn where DEST is destination using
11624 operands OP1, OP2 and MASK. */
11627 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11630 enum machine_mode dest_mode = GET_MODE (dest);
11631 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11633 temp = gen_reg_rtx (dest_mode);
11635 /* For each vector element, select op1 when mask is 1 otherwise
11637 t = gen_rtx_SET (VOIDmode, temp,
11638 gen_rtx_UNSPEC (dest_mode,
11639 gen_rtvec (3, op2, op1, mask),
11642 emit_move_insn (dest, temp);
11646 /* Emit vector conditional expression.
11647 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11648 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11651 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11652 rtx cond, rtx cc_op0, rtx cc_op1)
11654 enum machine_mode dest_mode = GET_MODE (dest);
11655 enum rtx_code rcode = GET_CODE (cond);
11658 if (!TARGET_ALTIVEC)
11661 /* Get the vector mask for the given relational operations. */
11662 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11664 rs6000_emit_vector_select (dest, op1, op2, mask);
11669 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11670 operands of the last comparison is nonzero/true, FALSE_COND if it
11671 is zero/false. Return 0 if the hardware has no such operation. */
11674 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11676 enum rtx_code code = GET_CODE (op);
11677 rtx op0 = rs6000_compare_op0;
11678 rtx op1 = rs6000_compare_op1;
11679 REAL_VALUE_TYPE c1;
11680 enum machine_mode compare_mode = GET_MODE (op0);
11681 enum machine_mode result_mode = GET_MODE (dest);
11683 bool is_against_zero;
11685 /* These modes should always match. */
11686 if (GET_MODE (op1) != compare_mode
11687 /* In the isel case however, we can use a compare immediate, so
11688 op1 may be a small constant. */
11689 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11691 if (GET_MODE (true_cond) != result_mode)
11693 if (GET_MODE (false_cond) != result_mode)
11696 /* First, work out if the hardware can do this at all, or
11697 if it's too slow.... */
11698 if (! rs6000_compare_fp_p)
11701 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11704 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11705 && SCALAR_FLOAT_MODE_P (compare_mode))
11708 is_against_zero = op1 == CONST0_RTX (compare_mode);
11710 /* A floating-point subtract might overflow, underflow, or produce
11711 an inexact result, thus changing the floating-point flags, so it
11712 can't be generated if we care about that. It's safe if one side
11713 of the construct is zero, since then no subtract will be
11715 if (SCALAR_FLOAT_MODE_P (compare_mode)
11716 && flag_trapping_math && ! is_against_zero)
11719 /* Eliminate half of the comparisons by switching operands, this
11720 makes the remaining code simpler. */
11721 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11722 || code == LTGT || code == LT || code == UNLE)
11724 code = reverse_condition_maybe_unordered (code);
11726 true_cond = false_cond;
11730 /* UNEQ and LTGT take four instructions for a comparison with zero,
11731 it'll probably be faster to use a branch here too. */
11732 if (code == UNEQ && HONOR_NANS (compare_mode))
11735 if (GET_CODE (op1) == CONST_DOUBLE)
11736 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11738 /* We're going to try to implement comparisons by performing
11739 a subtract, then comparing against zero. Unfortunately,
11740 Inf - Inf is NaN which is not zero, and so if we don't
11741 know that the operand is finite and the comparison
11742 would treat EQ different to UNORDERED, we can't do it. */
11743 if (HONOR_INFINITIES (compare_mode)
11744 && code != GT && code != UNGE
11745 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11746 /* Constructs of the form (a OP b ? a : b) are safe. */
11747 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11748 || (! rtx_equal_p (op0, true_cond)
11749 && ! rtx_equal_p (op1, true_cond))))
11752 /* At this point we know we can use fsel. */
11754 /* Reduce the comparison to a comparison against zero. */
11755 if (! is_against_zero)
11757 temp = gen_reg_rtx (compare_mode);
11758 emit_insn (gen_rtx_SET (VOIDmode, temp,
11759 gen_rtx_MINUS (compare_mode, op0, op1)));
11761 op1 = CONST0_RTX (compare_mode);
11764 /* If we don't care about NaNs we can reduce some of the comparisons
11765 down to faster ones. */
11766 if (! HONOR_NANS (compare_mode))
11772 true_cond = false_cond;
11785 /* Now, reduce everything down to a GE. */
11792 temp = gen_reg_rtx (compare_mode);
11793 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11798 temp = gen_reg_rtx (compare_mode);
11799 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11804 temp = gen_reg_rtx (compare_mode);
11805 emit_insn (gen_rtx_SET (VOIDmode, temp,
11806 gen_rtx_NEG (compare_mode,
11807 gen_rtx_ABS (compare_mode, op0))));
11812 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11813 temp = gen_reg_rtx (result_mode);
11814 emit_insn (gen_rtx_SET (VOIDmode, temp,
11815 gen_rtx_IF_THEN_ELSE (result_mode,
11816 gen_rtx_GE (VOIDmode,
11818 true_cond, false_cond)));
11819 false_cond = true_cond;
11822 temp = gen_reg_rtx (compare_mode);
11823 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11828 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11829 temp = gen_reg_rtx (result_mode);
11830 emit_insn (gen_rtx_SET (VOIDmode, temp,
11831 gen_rtx_IF_THEN_ELSE (result_mode,
11832 gen_rtx_GE (VOIDmode,
11834 true_cond, false_cond)));
11835 true_cond = false_cond;
11838 temp = gen_reg_rtx (compare_mode);
11839 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11844 gcc_unreachable ();
11847 emit_insn (gen_rtx_SET (VOIDmode, dest,
11848 gen_rtx_IF_THEN_ELSE (result_mode,
11849 gen_rtx_GE (VOIDmode,
11851 true_cond, false_cond)));
11855 /* Same as above, but for ints (isel). */
11858 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11860 rtx condition_rtx, cr;
11862 /* All isel implementations thus far are 32-bits. */
11863 if (GET_MODE (rs6000_compare_op0) != SImode)
11866 /* We still have to do the compare, because isel doesn't do a
11867 compare, it just looks at the CRx bits set by a previous compare
11869 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11870 cr = XEXP (condition_rtx, 0);
11872 if (GET_MODE (cr) == CCmode)
11873 emit_insn (gen_isel_signed (dest, condition_rtx,
11874 true_cond, false_cond, cr));
11876 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11877 true_cond, false_cond, cr));
11883 output_isel (rtx *operands)
11885 enum rtx_code code;
11887 code = GET_CODE (operands[1]);
11888 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11890 PUT_CODE (operands[1], reverse_condition (code));
11891 return "isel %0,%3,%2,%j1";
11894 return "isel %0,%2,%3,%j1";
11898 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11900 enum machine_mode mode = GET_MODE (op0);
11904 if (code == SMAX || code == SMIN)
11909 if (code == SMAX || code == UMAX)
11910 target = emit_conditional_move (dest, c, op0, op1, mode,
11911 op0, op1, mode, 0);
11913 target = emit_conditional_move (dest, c, op0, op1, mode,
11914 op1, op0, mode, 0);
11915 gcc_assert (target);
11916 if (target != dest)
11917 emit_move_insn (dest, target);
11920 /* Emit instructions to perform a load-reserved/store-conditional operation.
11921 The operation performed is an atomic
11922 (set M (CODE:MODE M OP))
11923 If not NULL, BEFORE is atomically set to M before the operation, and
11924 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11925 If SYNC_P then a memory barrier is emitted before the operation.
11926 Either OP or M may be wrapped in a NOT operation. */
11929 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11930 rtx m, rtx op, rtx before_param, rtx after_param,
11933 enum machine_mode used_mode;
11934 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11937 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11938 rtx shift = NULL_RTX;
11941 emit_insn (gen_memory_barrier ());
11943 if (GET_CODE (m) == NOT)
11944 used_m = XEXP (m, 0);
11948 /* If this is smaller than SImode, we'll have to use SImode with
11950 if (mode == QImode || mode == HImode)
11954 if (MEM_ALIGN (used_m) >= 32)
11957 if (BYTES_BIG_ENDIAN)
11958 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
11960 shift = GEN_INT (ishift);
11964 rtx addrSI, aligned_addr;
11965 int shift_mask = mode == QImode ? 0x18 : 0x10;
11967 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11968 XEXP (used_m, 0)));
11969 shift = gen_reg_rtx (SImode);
11971 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
11972 GEN_INT (shift_mask)));
11973 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
11975 aligned_addr = expand_binop (Pmode, and_optab,
11977 GEN_INT (-4), NULL_RTX,
11978 1, OPTAB_LIB_WIDEN);
11979 used_m = change_address (used_m, SImode, aligned_addr);
11980 set_mem_align (used_m, 32);
11981 /* It's safe to keep the old alias set of USED_M, because
11982 the operation is atomic and only affects the original
11984 if (GET_CODE (m) == NOT)
11985 m = gen_rtx_NOT (SImode, used_m);
11990 if (GET_CODE (op) == NOT)
11992 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11993 oldop = gen_rtx_NOT (SImode, oldop);
11996 oldop = lowpart_subreg (SImode, op, mode);
12002 newop = expand_binop (SImode, and_optab,
12003 oldop, GEN_INT (imask), NULL_RTX,
12004 1, OPTAB_LIB_WIDEN);
12005 emit_insn (gen_ashlsi3 (newop, newop, shift));
12009 newop = expand_binop (SImode, ior_optab,
12010 oldop, GEN_INT (~imask), NULL_RTX,
12011 1, OPTAB_LIB_WIDEN);
12012 emit_insn (gen_rotlsi3 (newop, newop, shift));
12020 newop = expand_binop (SImode, and_optab,
12021 oldop, GEN_INT (imask), NULL_RTX,
12022 1, OPTAB_LIB_WIDEN);
12023 emit_insn (gen_ashlsi3 (newop, newop, shift));
12025 mask = gen_reg_rtx (SImode);
12026 emit_move_insn (mask, GEN_INT (imask));
12027 emit_insn (gen_ashlsi3 (mask, mask, shift));
12030 newop = gen_rtx_PLUS (SImode, m, newop);
12032 newop = gen_rtx_MINUS (SImode, m, newop);
12033 newop = gen_rtx_AND (SImode, newop, mask);
12034 newop = gen_rtx_IOR (SImode, newop,
12035 gen_rtx_AND (SImode,
12036 gen_rtx_NOT (SImode, mask),
12042 gcc_unreachable ();
12045 if (GET_CODE (m) == NOT)
12049 mask = gen_reg_rtx (SImode);
12050 emit_move_insn (mask, GEN_INT (imask));
12051 emit_insn (gen_ashlsi3 (mask, mask, shift));
12053 xorm = gen_rtx_XOR (SImode, used_m, mask);
12054 /* Depending on the value of 'op', the XOR or the operation might
12055 be able to be simplified away. */
12056 newop = simplify_gen_binary (code, SImode, xorm, newop);
12059 used_mode = SImode;
12060 before = gen_reg_rtx (used_mode);
12061 after = gen_reg_rtx (used_mode);
12066 before = before_param;
12067 after = after_param;
12069 if (before == NULL_RTX)
12070 before = gen_reg_rtx (used_mode);
12071 if (after == NULL_RTX)
12072 after = gen_reg_rtx (used_mode);
12075 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12076 && used_mode != mode)
12077 the_op = op; /* Computed above. */
12078 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12079 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12081 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12083 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12084 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12085 set_atomic = gen_rtx_SET (VOIDmode, used_m,
12086 gen_rtx_UNSPEC (used_mode,
12087 gen_rtvec (1, the_op),
12089 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12091 if ((code == PLUS || code == MINUS) && used_mode != mode)
12092 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12093 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12095 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12096 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12098 /* Shift and mask the return values properly. */
12099 if (used_mode != mode && before_param)
12101 emit_insn (gen_lshrsi3 (before, before, shift));
12102 convert_move (before_param, before, 1);
12105 if (used_mode != mode && after_param)
12107 emit_insn (gen_lshrsi3 (after, after, shift));
12108 convert_move (after_param, after, 1);
12111 /* The previous sequence will end with a branch that's dependent on
12112 the conditional store, so placing an isync will ensure that no
12113 other instructions (especially, no load or store instructions)
12114 can start before the atomic operation completes. */
12116 emit_insn (gen_isync ());
12119 /* A subroutine of the atomic operation splitters. Jump to LABEL if
12120 COND is true. Mark the jump as unlikely to be taken. */
12123 emit_unlikely_jump (rtx cond, rtx label)
12125 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12128 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12129 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12130 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12133 /* A subroutine of the atomic operation splitters. Emit a load-locked
12134 instruction in MODE. */
12137 emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12139 rtx (*fn) (rtx, rtx) = NULL;
12140 if (mode == SImode)
12141 fn = gen_load_locked_si;
12142 else if (mode == DImode)
12143 fn = gen_load_locked_di;
12144 emit_insn (fn (reg, mem));
12147 /* A subroutine of the atomic operation splitters. Emit a store-conditional
12148 instruction in MODE. */
12151 emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12153 rtx (*fn) (rtx, rtx, rtx) = NULL;
12154 if (mode == SImode)
12155 fn = gen_store_conditional_si;
12156 else if (mode == DImode)
12157 fn = gen_store_conditional_di;
12159 /* Emit sync before stwcx. to address PPC405 Erratum. */
12160 if (PPC405_ERRATUM77)
12161 emit_insn (gen_memory_barrier ());
12163 emit_insn (fn (res, mem, val));
12166 /* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
12167 to perform. MEM is the memory on which to operate. VAL is the second
12168 operand of the binary operator. BEFORE and AFTER are optional locations to
12169 return the value of MEM either before of after the operation. SCRATCH is
12170 a scratch register. */
12173 rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12174 rtx before, rtx after, rtx scratch)
12176 enum machine_mode mode = GET_MODE (mem);
12177 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12179 emit_insn (gen_memory_barrier ());
12181 label = gen_label_rtx ();
12182 emit_label (label);
12183 label = gen_rtx_LABEL_REF (VOIDmode, label);
12185 if (before == NULL_RTX)
12187 emit_load_locked (mode, before, mem);
12190 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12191 else if (code == AND)
12192 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12194 x = gen_rtx_fmt_ee (code, mode, before, val);
12196 if (after != NULL_RTX)
12197 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12198 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12200 emit_store_conditional (mode, cond, mem, scratch);
12202 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12203 emit_unlikely_jump (x, label);
12205 emit_insn (gen_isync ());
12208 /* Expand an atomic compare and swap operation. MEM is the memory on which
12209 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12210 value to be stored. SCRATCH is a scratch GPR. */
12213 rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12216 enum machine_mode mode = GET_MODE (mem);
12217 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12219 emit_insn (gen_memory_barrier ());
12221 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12222 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12223 emit_label (XEXP (label1, 0));
12225 emit_load_locked (mode, retval, mem);
12227 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12228 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12230 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12231 emit_unlikely_jump (x, label2);
12233 emit_move_insn (scratch, newval);
12234 emit_store_conditional (mode, cond, mem, scratch);
12236 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12237 emit_unlikely_jump (x, label1);
12239 emit_insn (gen_isync ());
12240 emit_label (XEXP (label2, 0));
12243 /* Expand an atomic test and set operation. MEM is the memory on which
12244 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12247 rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12249 enum machine_mode mode = GET_MODE (mem);
12250 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12252 emit_insn (gen_memory_barrier ());
12254 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12255 emit_label (XEXP (label, 0));
12257 emit_load_locked (mode, retval, mem);
12258 emit_move_insn (scratch, val);
12259 emit_store_conditional (mode, cond, mem, scratch);
12261 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12262 emit_unlikely_jump (x, label);
12264 emit_insn (gen_isync ());
12267 /* Emit instructions to move SRC to DST. Called by splitters for
12268 multi-register moves. It will emit at most one instruction for
12269 each register that is accessed; that is, it won't emit li/lis pairs
12270 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12274 rs6000_split_multireg_move (rtx dst, rtx src)
12276 /* The register number of the first register being moved. */
12278 /* The mode that is to be moved. */
12279 enum machine_mode mode;
12280 /* The mode that the move is being done in, and its size. */
12281 enum machine_mode reg_mode;
12283 /* The number of registers that will be moved. */
12286 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12287 mode = GET_MODE (dst);
12288 nregs = hard_regno_nregs[reg][mode];
12289 if (FP_REGNO_P (reg))
12291 else if (ALTIVEC_REGNO_P (reg))
12292 reg_mode = V16QImode;
12294 reg_mode = word_mode;
12295 reg_mode_size = GET_MODE_SIZE (reg_mode);
12297 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12299 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12301 /* Move register range backwards, if we might have destructive
12304 for (i = nregs - 1; i >= 0; i--)
12305 emit_insn (gen_rtx_SET (VOIDmode,
12306 simplify_gen_subreg (reg_mode, dst, mode,
12307 i * reg_mode_size),
12308 simplify_gen_subreg (reg_mode, src, mode,
12309 i * reg_mode_size)));
12315 bool used_update = false;
12317 if (MEM_P (src) && INT_REGNO_P (reg))
12321 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12322 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12325 breg = XEXP (XEXP (src, 0), 0);
12326 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12327 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12328 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12329 emit_insn (TARGET_32BIT
12330 ? gen_addsi3 (breg, breg, delta_rtx)
12331 : gen_adddi3 (breg, breg, delta_rtx));
12332 src = replace_equiv_address (src, breg);
12334 else if (! offsettable_memref_p (src))
12337 basereg = gen_rtx_REG (Pmode, reg);
12338 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12339 src = replace_equiv_address (src, basereg);
12342 breg = XEXP (src, 0);
12343 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12344 breg = XEXP (breg, 0);
12346 /* If the base register we are using to address memory is
12347 also a destination reg, then change that register last. */
12349 && REGNO (breg) >= REGNO (dst)
12350 && REGNO (breg) < REGNO (dst) + nregs)
12351 j = REGNO (breg) - REGNO (dst);
12354 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12358 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12359 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12362 breg = XEXP (XEXP (dst, 0), 0);
12363 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12364 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12365 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12367 /* We have to update the breg before doing the store.
12368 Use store with update, if available. */
12372 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12373 emit_insn (TARGET_32BIT
12374 ? (TARGET_POWERPC64
12375 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12376 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12377 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12378 used_update = true;
12381 emit_insn (TARGET_32BIT
12382 ? gen_addsi3 (breg, breg, delta_rtx)
12383 : gen_adddi3 (breg, breg, delta_rtx));
12384 dst = replace_equiv_address (dst, breg);
12387 gcc_assert (offsettable_memref_p (dst));
12390 for (i = 0; i < nregs; i++)
12392 /* Calculate index to next subword. */
12397 /* If compiler already emitted move of first word by
12398 store with update, no need to do anything. */
12399 if (j == 0 && used_update)
12402 emit_insn (gen_rtx_SET (VOIDmode,
12403 simplify_gen_subreg (reg_mode, dst, mode,
12404 j * reg_mode_size),
12405 simplify_gen_subreg (reg_mode, src, mode,
12406 j * reg_mode_size)));
12412 /* This page contains routines that are used to determine what the
12413 function prologue and epilogue code will do and write them out. */
12415 /* Return the first fixed-point register that is required to be
12416 saved. 32 if none. */
12419 first_reg_to_save (void)
12423 /* Find lowest numbered live register. */
12424 for (first_reg = 13; first_reg <= 31; first_reg++)
12425 if (regs_ever_live[first_reg]
12426 && (! call_used_regs[first_reg]
12427 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12428 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12429 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12430 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12435 && current_function_uses_pic_offset_table
12436 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12437 return RS6000_PIC_OFFSET_TABLE_REGNUM;
12443 /* Similar, for FP regs. */
12446 first_fp_reg_to_save (void)
12450 /* Find lowest numbered live register. */
12451 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12452 if (regs_ever_live[first_reg])
12458 /* Similar, for AltiVec regs. */
12461 first_altivec_reg_to_save (void)
12465 /* Stack frame remains as is unless we are in AltiVec ABI. */
12466 if (! TARGET_ALTIVEC_ABI)
12467 return LAST_ALTIVEC_REGNO + 1;
12469 /* Find lowest numbered live register. */
12470 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12471 if (regs_ever_live[i])
12477 /* Return a 32-bit mask of the AltiVec registers we need to set in
12478 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12479 the 32-bit word is 0. */
12481 static unsigned int
12482 compute_vrsave_mask (void)
12484 unsigned int i, mask = 0;
12486 /* First, find out if we use _any_ altivec registers. */
12487 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12488 if (regs_ever_live[i])
12489 mask |= ALTIVEC_REG_BIT (i);
12494 /* Next, remove the argument registers from the set. These must
12495 be in the VRSAVE mask set by the caller, so we don't need to add
12496 them in again. More importantly, the mask we compute here is
12497 used to generate CLOBBERs in the set_vrsave insn, and we do not
12498 wish the argument registers to die. */
12499 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12500 mask &= ~ALTIVEC_REG_BIT (i);
12502 /* Similarly, remove the return value from the set. */
12505 diddle_return_value (is_altivec_return_reg, &yes);
12507 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12513 /* For a very restricted set of circumstances, we can cut down the
12514 size of prologues/epilogues by calling our own save/restore-the-world
12518 compute_save_world_info (rs6000_stack_t *info_ptr)
12520 info_ptr->world_save_p = 1;
12521 info_ptr->world_save_p
12522 = (WORLD_SAVE_P (info_ptr)
12523 && DEFAULT_ABI == ABI_DARWIN
12524 && ! (current_function_calls_setjmp && flag_exceptions)
12525 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12526 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12527 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12528 && info_ptr->cr_save_p);
12530 /* This will not work in conjunction with sibcalls. Make sure there
12531 are none. (This check is expensive, but seldom executed.) */
12532 if (WORLD_SAVE_P (info_ptr))
12535 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12536 if ( GET_CODE (insn) == CALL_INSN
12537 && SIBLING_CALL_P (insn))
12539 info_ptr->world_save_p = 0;
12544 if (WORLD_SAVE_P (info_ptr))
12546 /* Even if we're not touching VRsave, make sure there's room on the
12547 stack for it, if it looks like we're calling SAVE_WORLD, which
12548 will attempt to save it. */
12549 info_ptr->vrsave_size = 4;
12551 /* "Save" the VRsave register too if we're saving the world. */
12552 if (info_ptr->vrsave_mask == 0)
12553 info_ptr->vrsave_mask = compute_vrsave_mask ();
12555 /* Because the Darwin register save/restore routines only handle
12556 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12558 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12559 && (info_ptr->first_altivec_reg_save
12560 >= FIRST_SAVED_ALTIVEC_REGNO));
12567 is_altivec_return_reg (rtx reg, void *xyes)
12569 bool *yes = (bool *) xyes;
12570 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12575 /* Calculate the stack information for the current function. This is
12576 complicated by having two separate calling sequences, the AIX calling
12577 sequence and the V.4 calling sequence.
12579 AIX (and Darwin/Mac OS X) stack frames look like:
12581 SP----> +---------------------------------------+
12582 | back chain to caller | 0 0
12583 +---------------------------------------+
12584 | saved CR | 4 8 (8-11)
12585 +---------------------------------------+
12587 +---------------------------------------+
12588 | reserved for compilers | 12 24
12589 +---------------------------------------+
12590 | reserved for binders | 16 32
12591 +---------------------------------------+
12592 | saved TOC pointer | 20 40
12593 +---------------------------------------+
12594 | Parameter save area (P) | 24 48
12595 +---------------------------------------+
12596 | Alloca space (A) | 24+P etc.
12597 +---------------------------------------+
12598 | Local variable space (L) | 24+P+A
12599 +---------------------------------------+
12600 | Float/int conversion temporary (X) | 24+P+A+L
12601 +---------------------------------------+
12602 | Save area for AltiVec registers (W) | 24+P+A+L+X
12603 +---------------------------------------+
12604 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12605 +---------------------------------------+
12606 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
12607 +---------------------------------------+
12608 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12609 +---------------------------------------+
12610 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
12611 +---------------------------------------+
12612 old SP->| back chain to caller's caller |
12613 +---------------------------------------+
12615 The required alignment for AIX configurations is two words (i.e., 8
12619 V.4 stack frames look like:
12621 SP----> +---------------------------------------+
12622 | back chain to caller | 0
12623 +---------------------------------------+
12624 | caller's saved LR | 4
12625 +---------------------------------------+
12626 | Parameter save area (P) | 8
12627 +---------------------------------------+
12628 | Alloca space (A) | 8+P
12629 +---------------------------------------+
12630 | Varargs save area (V) | 8+P+A
12631 +---------------------------------------+
12632 | Local variable space (L) | 8+P+A+V
12633 +---------------------------------------+
12634 | Float/int conversion temporary (X) | 8+P+A+V+L
12635 +---------------------------------------+
12636 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12637 +---------------------------------------+
12638 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12639 +---------------------------------------+
12640 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12641 +---------------------------------------+
12642 | SPE: area for 64-bit GP registers |
12643 +---------------------------------------+
12644 | SPE alignment padding |
12645 +---------------------------------------+
12646 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
12647 +---------------------------------------+
12648 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
12649 +---------------------------------------+
12650 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
12651 +---------------------------------------+
12652 old SP->| back chain to caller's caller |
12653 +---------------------------------------+
12655 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12656 given. (But note below and in sysv4.h that we require only 8 and
12657 may round up the size of our stack frame anyways. The historical
12658 reason is early versions of powerpc-linux which didn't properly
12659 align the stack at program startup. A happy side-effect is that
12660 -mno-eabi libraries can be used with -meabi programs.)
12662 The EABI configuration defaults to the V.4 layout. However,
12663 the stack alignment requirements may differ. If -mno-eabi is not
12664 given, the required stack alignment is 8 bytes; if -mno-eabi is
12665 given, the required alignment is 16 bytes. (But see V.4 comment
12668 #ifndef ABI_STACK_BOUNDARY
12669 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
12672 static rs6000_stack_t *
12673 rs6000_stack_info (void)
12675 static rs6000_stack_t info, zero_info;
12676 rs6000_stack_t *info_ptr = &info;
12677 int reg_size = TARGET_32BIT ? 4 : 8;
12680 HOST_WIDE_INT non_fixed_size;
12682 /* Zero all fields portably. */
12687 /* Cache value so we don't rescan instruction chain over and over. */
12688 if (cfun->machine->insn_chain_scanned_p == 0)
12689 cfun->machine->insn_chain_scanned_p
12690 = spe_func_has_64bit_regs_p () + 1;
12691 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
12694 /* Select which calling sequence. */
12695 info_ptr->abi = DEFAULT_ABI;
12697 /* Calculate which registers need to be saved & save area size. */
12698 info_ptr->first_gp_reg_save = first_reg_to_save ();
12699 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12700 even if it currently looks like we won't. */
12701 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
12702 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12703 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
12704 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12705 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
12707 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
12709 /* For the SPE, we have an additional upper 32-bits on each GPR.
12710 Ideally we should save the entire 64-bits only when the upper
12711 half is used in SIMD instructions. Since we only record
12712 registers live (not the size they are used in), this proves
12713 difficult because we'd have to traverse the instruction chain at
12714 the right time, taking reload into account. This is a real pain,
12715 so we opt to save the GPRs in 64-bits always if but one register
12716 gets used in 64-bits. Otherwise, all the registers in the frame
12717 get saved in 32-bits.
12719 So... since when we save all GPRs (except the SP) in 64-bits, the
12720 traditional GP save area will be empty. */
12721 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12722 info_ptr->gp_size = 0;
12724 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12725 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12727 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12728 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12729 - info_ptr->first_altivec_reg_save);
12731 /* Does this function call anything? */
12732 info_ptr->calls_p = (! current_function_is_leaf
12733 || cfun->machine->ra_needs_full_frame);
12735 /* Determine if we need to save the link register. */
12736 if (rs6000_ra_ever_killed ()
12737 || (DEFAULT_ABI == ABI_AIX
12738 && current_function_profile
12739 && !TARGET_PROFILE_KERNEL)
12740 #ifdef TARGET_RELOCATABLE
12741 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12743 || (info_ptr->first_fp_reg_save != 64
12744 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
12745 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
12746 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
12747 || info_ptr->calls_p)
12749 info_ptr->lr_save_p = 1;
12750 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
12753 /* Determine if we need to save the condition code registers. */
12754 if (regs_ever_live[CR2_REGNO]
12755 || regs_ever_live[CR3_REGNO]
12756 || regs_ever_live[CR4_REGNO])
12758 info_ptr->cr_save_p = 1;
12759 if (DEFAULT_ABI == ABI_V4)
12760 info_ptr->cr_size = reg_size;
12763 /* If the current function calls __builtin_eh_return, then we need
12764 to allocate stack space for registers that will hold data for
12765 the exception handler. */
12766 if (current_function_calls_eh_return)
12769 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12772 /* SPE saves EH registers in 64-bits. */
12773 ehrd_size = i * (TARGET_SPE_ABI
12774 && info_ptr->spe_64bit_regs_used != 0
12775 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12780 /* Determine various sizes. */
12781 info_ptr->reg_size = reg_size;
12782 info_ptr->fixed_size = RS6000_SAVE_AREA;
12783 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12784 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12785 TARGET_ALTIVEC ? 16 : 8);
12786 if (FRAME_GROWS_DOWNWARD)
12787 info_ptr->vars_size
12788 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
12789 + info_ptr->parm_size,
12790 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
12791 - (info_ptr->fixed_size + info_ptr->vars_size
12792 + info_ptr->parm_size);
12794 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12795 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12797 info_ptr->spe_gp_size = 0;
12799 if (TARGET_ALTIVEC_ABI)
12800 info_ptr->vrsave_mask = compute_vrsave_mask ();
12802 info_ptr->vrsave_mask = 0;
12804 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12805 info_ptr->vrsave_size = 4;
12807 info_ptr->vrsave_size = 0;
12809 compute_save_world_info (info_ptr);
12811 /* Calculate the offsets. */
12812 switch (DEFAULT_ABI)
12816 gcc_unreachable ();
12820 info_ptr->fp_save_offset = - info_ptr->fp_size;
12821 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12823 if (TARGET_ALTIVEC_ABI)
12825 info_ptr->vrsave_save_offset
12826 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12828 /* Align stack so vector save area is on a quadword boundary. */
12829 if (info_ptr->altivec_size != 0)
12830 info_ptr->altivec_padding_size
12831 = 16 - (-info_ptr->vrsave_save_offset % 16);
12833 info_ptr->altivec_padding_size = 0;
12835 info_ptr->altivec_save_offset
12836 = info_ptr->vrsave_save_offset
12837 - info_ptr->altivec_padding_size
12838 - info_ptr->altivec_size;
12840 /* Adjust for AltiVec case. */
12841 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12844 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12845 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12846 info_ptr->lr_save_offset = 2*reg_size;
12850 info_ptr->fp_save_offset = - info_ptr->fp_size;
12851 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12852 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12854 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12856 /* Align stack so SPE GPR save area is aligned on a
12857 double-word boundary. */
12858 if (info_ptr->spe_gp_size != 0)
12859 info_ptr->spe_padding_size
12860 = 8 - (-info_ptr->cr_save_offset % 8);
12862 info_ptr->spe_padding_size = 0;
12864 info_ptr->spe_gp_save_offset
12865 = info_ptr->cr_save_offset
12866 - info_ptr->spe_padding_size
12867 - info_ptr->spe_gp_size;
12869 /* Adjust for SPE case. */
12870 info_ptr->toc_save_offset
12871 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12873 else if (TARGET_ALTIVEC_ABI)
12875 info_ptr->vrsave_save_offset
12876 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12878 /* Align stack so vector save area is on a quadword boundary. */
12879 if (info_ptr->altivec_size != 0)
12880 info_ptr->altivec_padding_size
12881 = 16 - (-info_ptr->vrsave_save_offset % 16);
12883 info_ptr->altivec_padding_size = 0;
12885 info_ptr->altivec_save_offset
12886 = info_ptr->vrsave_save_offset
12887 - info_ptr->altivec_padding_size
12888 - info_ptr->altivec_size;
12890 /* Adjust for AltiVec case. */
12891 info_ptr->toc_save_offset
12892 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12895 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12896 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12897 info_ptr->lr_save_offset = reg_size;
12901 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12902 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12903 + info_ptr->gp_size
12904 + info_ptr->altivec_size
12905 + info_ptr->altivec_padding_size
12906 + info_ptr->spe_gp_size
12907 + info_ptr->spe_padding_size
12909 + info_ptr->cr_size
12910 + info_ptr->lr_size
12911 + info_ptr->vrsave_size
12912 + info_ptr->toc_size,
12915 non_fixed_size = (info_ptr->vars_size
12916 + info_ptr->parm_size
12917 + info_ptr->save_size);
12919 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12920 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12922 /* Determine if we need to allocate any stack frame:
12924 For AIX we need to push the stack if a frame pointer is needed
12925 (because the stack might be dynamically adjusted), if we are
12926 debugging, if we make calls, or if the sum of fp_save, gp_save,
12927 and local variables are more than the space needed to save all
12928 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12929 + 18*8 = 288 (GPR13 reserved).
12931 For V.4 we don't have the stack cushion that AIX uses, but assume
12932 that the debugger can handle stackless frames. */
12934 if (info_ptr->calls_p)
12935 info_ptr->push_p = 1;
12937 else if (DEFAULT_ABI == ABI_V4)
12938 info_ptr->push_p = non_fixed_size != 0;
12940 else if (frame_pointer_needed)
12941 info_ptr->push_p = 1;
12943 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12944 info_ptr->push_p = 1;
12947 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12949 /* Zero offsets if we're not saving those registers. */
12950 if (info_ptr->fp_size == 0)
12951 info_ptr->fp_save_offset = 0;
12953 if (info_ptr->gp_size == 0)
12954 info_ptr->gp_save_offset = 0;
12956 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12957 info_ptr->altivec_save_offset = 0;
12959 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12960 info_ptr->vrsave_save_offset = 0;
12962 if (! TARGET_SPE_ABI
12963 || info_ptr->spe_64bit_regs_used == 0
12964 || info_ptr->spe_gp_size == 0)
12965 info_ptr->spe_gp_save_offset = 0;
12967 if (! info_ptr->lr_save_p)
12968 info_ptr->lr_save_offset = 0;
12970 if (! info_ptr->cr_save_p)
12971 info_ptr->cr_save_offset = 0;
12973 if (! info_ptr->toc_save_p)
12974 info_ptr->toc_save_offset = 0;
12979 /* Return true if the current function uses any GPRs in 64-bit SIMD
12983 spe_func_has_64bit_regs_p (void)
12987 /* Functions that save and restore all the call-saved registers will
12988 need to save/restore the registers in 64-bits. */
12989 if (current_function_calls_eh_return
12990 || current_function_calls_setjmp
12991 || current_function_has_nonlocal_goto)
12994 insns = get_insns ();
12996 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13002 /* FIXME: This should be implemented with attributes...
13004 (set_attr "spe64" "true")....then,
13005 if (get_spe64(insn)) return true;
13007 It's the only reliable way to do the stuff below. */
13009 i = PATTERN (insn);
13010 if (GET_CODE (i) == SET)
13012 enum machine_mode mode = GET_MODE (SET_SRC (i));
13014 if (SPE_VECTOR_MODE (mode))
13016 if (TARGET_E500_DOUBLE && mode == DFmode)
13026 debug_stack_info (rs6000_stack_t *info)
13028 const char *abi_string;
13031 info = rs6000_stack_info ();
13033 fprintf (stderr, "\nStack information for function %s:\n",
13034 ((current_function_decl && DECL_NAME (current_function_decl))
13035 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13040 default: abi_string = "Unknown"; break;
13041 case ABI_NONE: abi_string = "NONE"; break;
13042 case ABI_AIX: abi_string = "AIX"; break;
13043 case ABI_DARWIN: abi_string = "Darwin"; break;
13044 case ABI_V4: abi_string = "V.4"; break;
13047 fprintf (stderr, "\tABI = %5s\n", abi_string);
13049 if (TARGET_ALTIVEC_ABI)
13050 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13052 if (TARGET_SPE_ABI)
13053 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13055 if (info->first_gp_reg_save != 32)
13056 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13058 if (info->first_fp_reg_save != 64)
13059 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
13061 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13062 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13063 info->first_altivec_reg_save);
13065 if (info->lr_save_p)
13066 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
13068 if (info->cr_save_p)
13069 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13071 if (info->toc_save_p)
13072 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
13074 if (info->vrsave_mask)
13075 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13078 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13081 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13083 if (info->gp_save_offset)
13084 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13086 if (info->fp_save_offset)
13087 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13089 if (info->altivec_save_offset)
13090 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13091 info->altivec_save_offset);
13093 if (info->spe_gp_save_offset)
13094 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13095 info->spe_gp_save_offset);
13097 if (info->vrsave_save_offset)
13098 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13099 info->vrsave_save_offset);
13101 if (info->lr_save_offset)
13102 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13104 if (info->cr_save_offset)
13105 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13107 if (info->toc_save_offset)
13108 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
13110 if (info->varargs_save_offset)
13111 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13113 if (info->total_size)
13114 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13117 if (info->vars_size)
13118 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13121 if (info->parm_size)
13122 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13124 if (info->fixed_size)
13125 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13128 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13130 if (info->spe_gp_size)
13131 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13134 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13136 if (info->altivec_size)
13137 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13139 if (info->vrsave_size)
13140 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13142 if (info->altivec_padding_size)
13143 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13144 info->altivec_padding_size);
13146 if (info->spe_padding_size)
13147 fprintf (stderr, "\tspe_padding_size = %5d\n",
13148 info->spe_padding_size);
13151 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
13154 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13156 if (info->toc_size)
13157 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
13159 if (info->save_size)
13160 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13162 if (info->reg_size != 4)
13163 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13165 fprintf (stderr, "\n");
13169 rs6000_return_addr (int count, rtx frame)
13171 /* Currently we don't optimize very well between prolog and body
13172 code and for PIC code the code can be actually quite bad, so
13173 don't try to be too clever here. */
13174 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13176 cfun->machine->ra_needs_full_frame = 1;
13183 plus_constant (copy_to_reg
13184 (gen_rtx_MEM (Pmode,
13185 memory_address (Pmode, frame))),
13186 RETURN_ADDRESS_OFFSET)));
13189 cfun->machine->ra_need_lr = 1;
13190 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13193 /* Say whether a function is a candidate for sibcall handling or not.
13194 We do not allow indirect calls to be optimized into sibling calls.
13195 Also, we can't do it if there are any vector parameters; there's
13196 nowhere to put the VRsave code so it works; note that functions with
13197 vector parameters are required to have a prototype, so the argument
13198 type info must be available here. (The tail recursion case can work
13199 with vector parameters, but there's no way to distinguish here.) */
13201 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13206 if (TARGET_ALTIVEC_VRSAVE)
13208 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13209 type; type = TREE_CHAIN (type))
13211 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13215 if (DEFAULT_ABI == ABI_DARWIN
13216 || (*targetm.binds_local_p) (decl))
13218 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13220 if (!lookup_attribute ("longcall", attr_list)
13221 || lookup_attribute ("shortcall", attr_list))
13228 /* NULL if INSN insn is valid within a low-overhead loop.
13229 Otherwise return why doloop cannot be applied.
13230 PowerPC uses the COUNT register for branch on table instructions. */
13232 static const char *
13233 rs6000_invalid_within_doloop (rtx insn)
13236 return "Function call in the loop.";
13239 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13240 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13241 return "Computed branch in the loop.";
13247 rs6000_ra_ever_killed (void)
13253 if (current_function_is_thunk)
13256 /* regs_ever_live has LR marked as used if any sibcalls are present,
13257 but this should not force saving and restoring in the
13258 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
13259 clobbers LR, so that is inappropriate. */
13261 /* Also, the prologue can generate a store into LR that
13262 doesn't really count, like this:
13265 bcl to set PIC register
13269 When we're called from the epilogue, we need to avoid counting
13270 this as a store. */
13272 push_topmost_sequence ();
13273 top = get_insns ();
13274 pop_topmost_sequence ();
13275 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13277 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13281 if (FIND_REG_INC_NOTE (insn, reg))
13283 else if (GET_CODE (insn) == CALL_INSN
13284 && !SIBLING_CALL_P (insn))
13286 else if (set_of (reg, insn) != NULL_RTX
13287 && !prologue_epilogue_contains (insn))
13294 /* Add a REG_MAYBE_DEAD note to the insn. */
13296 rs6000_maybe_dead (rtx insn)
13298 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13303 /* Emit instructions needed to load the TOC register.
13304 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13305 a constant pool; or for SVR4 -fpic. */
13308 rs6000_emit_load_toc_table (int fromprolog)
13311 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13313 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13316 rtx lab, tmp1, tmp2, got, tempLR;
13318 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13319 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13321 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13323 got = rs6000_got_sym ();
13324 tmp1 = tmp2 = dest;
13327 tmp1 = gen_reg_rtx (Pmode);
13328 tmp2 = gen_reg_rtx (Pmode);
13330 tempLR = (fromprolog
13331 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13332 : gen_reg_rtx (Pmode));
13333 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13335 rs6000_maybe_dead (insn);
13336 insn = emit_move_insn (tmp1, tempLR);
13338 rs6000_maybe_dead (insn);
13339 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13341 rs6000_maybe_dead (insn);
13342 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13344 rs6000_maybe_dead (insn);
13346 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13348 rtx tempLR = (fromprolog
13349 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13350 : gen_reg_rtx (Pmode));
13352 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13354 rs6000_maybe_dead (insn);
13355 insn = emit_move_insn (dest, tempLR);
13357 rs6000_maybe_dead (insn);
13359 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13362 rtx tempLR = (fromprolog
13363 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13364 : gen_reg_rtx (Pmode));
13365 rtx temp0 = (fromprolog
13366 ? gen_rtx_REG (Pmode, 0)
13367 : gen_reg_rtx (Pmode));
13373 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13374 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13376 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13377 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13379 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13381 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13382 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13390 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13391 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13392 emit_move_insn (dest, tempLR);
13393 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13395 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13397 rs6000_maybe_dead (insn);
13399 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13401 /* This is for AIX code running in non-PIC ELF32. */
13404 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13405 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13407 insn = emit_insn (gen_elf_high (dest, realsym));
13409 rs6000_maybe_dead (insn);
13410 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13412 rs6000_maybe_dead (insn);
13416 gcc_assert (DEFAULT_ABI == ABI_AIX);
13419 insn = emit_insn (gen_load_toc_aix_si (dest));
13421 insn = emit_insn (gen_load_toc_aix_di (dest));
13423 rs6000_maybe_dead (insn);
13427 /* Emit instructions to restore the link register after determining where
13428 its value has been stored. */
13431 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13433 rs6000_stack_t *info = rs6000_stack_info ();
13436 operands[0] = source;
13437 operands[1] = scratch;
13439 if (info->lr_save_p)
13441 rtx frame_rtx = stack_pointer_rtx;
13442 HOST_WIDE_INT sp_offset = 0;
13445 if (frame_pointer_needed
13446 || current_function_calls_alloca
13447 || info->total_size > 32767)
13449 tmp = gen_frame_mem (Pmode, frame_rtx);
13450 emit_move_insn (operands[1], tmp);
13451 frame_rtx = operands[1];
13453 else if (info->push_p)
13454 sp_offset = info->total_size;
13456 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13457 tmp = gen_frame_mem (Pmode, tmp);
13458 emit_move_insn (tmp, operands[0]);
13461 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13464 static GTY(()) int set = -1;
13467 get_TOC_alias_set (void)
13470 set = new_alias_set ();
13474 /* This returns nonzero if the current function uses the TOC. This is
13475 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13476 is generated by the ABI_V4 load_toc_* patterns. */
13483 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13486 rtx pat = PATTERN (insn);
13489 if (GET_CODE (pat) == PARALLEL)
13490 for (i = 0; i < XVECLEN (pat, 0); i++)
13492 rtx sub = XVECEXP (pat, 0, i);
13493 if (GET_CODE (sub) == USE)
13495 sub = XEXP (sub, 0);
13496 if (GET_CODE (sub) == UNSPEC
13497 && XINT (sub, 1) == UNSPEC_TOC)
13507 create_TOC_reference (rtx symbol)
13509 return gen_rtx_PLUS (Pmode,
13510 gen_rtx_REG (Pmode, TOC_REGISTER),
13511 gen_rtx_CONST (Pmode,
13512 gen_rtx_MINUS (Pmode, symbol,
13513 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13516 /* If _Unwind_* has been called from within the same module,
13517 toc register is not guaranteed to be saved to 40(1) on function
13518 entry. Save it there in that case. */
13521 rs6000_aix_emit_builtin_unwind_init (void)
13524 rtx stack_top = gen_reg_rtx (Pmode);
13525 rtx opcode_addr = gen_reg_rtx (Pmode);
13526 rtx opcode = gen_reg_rtx (SImode);
13527 rtx tocompare = gen_reg_rtx (SImode);
13528 rtx no_toc_save_needed = gen_label_rtx ();
13530 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
13531 emit_move_insn (stack_top, mem);
13533 mem = gen_frame_mem (Pmode,
13534 gen_rtx_PLUS (Pmode, stack_top,
13535 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13536 emit_move_insn (opcode_addr, mem);
13537 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13538 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13539 : 0xE8410028, SImode));
13541 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13542 SImode, NULL_RTX, NULL_RTX,
13543 no_toc_save_needed);
13545 mem = gen_frame_mem (Pmode,
13546 gen_rtx_PLUS (Pmode, stack_top,
13547 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13548 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13549 emit_label (no_toc_save_needed);
13552 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
13553 and the change to the stack pointer. */
13556 rs6000_emit_stack_tie (void)
13558 rtx mem = gen_frame_mem (BLKmode,
13559 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13561 emit_insn (gen_stack_tie (mem));
13564 /* Emit the correct code for allocating stack space, as insns.
13565 If COPY_R12, make sure a copy of the old frame is left in r12.
13566 The generated code may use hard register 0 as a temporary. */
13569 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13572 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13573 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13574 rtx todec = gen_int_mode (-size, Pmode);
13576 if (INTVAL (todec) != -size)
13578 warning (0, "stack frame too large");
13579 emit_insn (gen_trap ());
13583 if (current_function_limit_stack)
13585 if (REG_P (stack_limit_rtx)
13586 && REGNO (stack_limit_rtx) > 1
13587 && REGNO (stack_limit_rtx) <= 31)
13589 emit_insn (TARGET_32BIT
13590 ? gen_addsi3 (tmp_reg,
13593 : gen_adddi3 (tmp_reg,
13597 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13600 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13602 && DEFAULT_ABI == ABI_V4)
13604 rtx toload = gen_rtx_CONST (VOIDmode,
13605 gen_rtx_PLUS (Pmode,
13609 emit_insn (gen_elf_high (tmp_reg, toload));
13610 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13611 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13615 warning (0, "stack limit expression is not supported");
13618 if (copy_r12 || ! TARGET_UPDATE)
13619 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13625 /* Need a note here so that try_split doesn't get confused. */
13626 if (get_last_insn () == NULL_RTX)
13627 emit_note (NOTE_INSN_DELETED);
13628 insn = emit_move_insn (tmp_reg, todec);
13629 try_split (PATTERN (insn), insn, 0);
13633 insn = emit_insn (TARGET_32BIT
13634 ? gen_movsi_update (stack_reg, stack_reg,
13636 : gen_movdi_di_update (stack_reg, stack_reg,
13637 todec, stack_reg));
13641 insn = emit_insn (TARGET_32BIT
13642 ? gen_addsi3 (stack_reg, stack_reg, todec)
13643 : gen_adddi3 (stack_reg, stack_reg, todec));
13644 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13645 gen_rtx_REG (Pmode, 12));
13648 RTX_FRAME_RELATED_P (insn) = 1;
13650 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13651 gen_rtx_SET (VOIDmode, stack_reg,
13652 gen_rtx_PLUS (Pmode, stack_reg,
13657 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13658 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13659 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13660 deduce these equivalences by itself so it wasn't necessary to hold
13661 its hand so much. */
13664 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
13665 rtx reg2, rtx rreg)
13669 /* copy_rtx will not make unique copies of registers, so we need to
13670 ensure we don't have unwanted sharing here. */
13672 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13675 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13677 real = copy_rtx (PATTERN (insn));
13679 if (reg2 != NULL_RTX)
13680 real = replace_rtx (real, reg2, rreg);
13682 real = replace_rtx (real, reg,
13683 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13684 STACK_POINTER_REGNUM),
13687 /* We expect that 'real' is either a SET or a PARALLEL containing
13688 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13689 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13691 if (GET_CODE (real) == SET)
13695 temp = simplify_rtx (SET_SRC (set));
13697 SET_SRC (set) = temp;
13698 temp = simplify_rtx (SET_DEST (set));
13700 SET_DEST (set) = temp;
13701 if (GET_CODE (SET_DEST (set)) == MEM)
13703 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13705 XEXP (SET_DEST (set), 0) = temp;
13712 gcc_assert (GET_CODE (real) == PARALLEL);
13713 for (i = 0; i < XVECLEN (real, 0); i++)
13714 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13716 rtx set = XVECEXP (real, 0, i);
13718 temp = simplify_rtx (SET_SRC (set));
13720 SET_SRC (set) = temp;
13721 temp = simplify_rtx (SET_DEST (set));
13723 SET_DEST (set) = temp;
13724 if (GET_CODE (SET_DEST (set)) == MEM)
13726 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13728 XEXP (SET_DEST (set), 0) = temp;
13730 RTX_FRAME_RELATED_P (set) = 1;
13735 real = spe_synthesize_frame_save (real);
13737 RTX_FRAME_RELATED_P (insn) = 1;
13738 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13743 /* Given an SPE frame note, return a PARALLEL of SETs with the
13744 original note, plus a synthetic register save. */
13747 spe_synthesize_frame_save (rtx real)
13749 rtx synth, offset, reg, real2;
13751 if (GET_CODE (real) != SET
13752 || GET_MODE (SET_SRC (real)) != V2SImode)
13755 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13756 frame related note. The parallel contains a set of the register
13757 being saved, and another set to a synthetic register (n+1200).
13758 This is so we can differentiate between 64-bit and 32-bit saves.
13759 Words cannot describe this nastiness. */
13761 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13762 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13763 && GET_CODE (SET_SRC (real)) == REG);
13766 (set (mem (plus (reg x) (const y)))
13769 (set (mem (plus (reg x) (const y+4)))
13773 real2 = copy_rtx (real);
13774 PUT_MODE (SET_DEST (real2), SImode);
13775 reg = SET_SRC (real2);
13776 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13777 synth = copy_rtx (real2);
13779 if (BYTES_BIG_ENDIAN)
13781 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13782 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13785 reg = SET_SRC (synth);
13787 synth = replace_rtx (synth, reg,
13788 gen_rtx_REG (SImode, REGNO (reg) + 1200));
13790 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13791 synth = replace_rtx (synth, offset,
13792 GEN_INT (INTVAL (offset)
13793 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13795 RTX_FRAME_RELATED_P (synth) = 1;
13796 RTX_FRAME_RELATED_P (real2) = 1;
13797 if (BYTES_BIG_ENDIAN)
13798 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13800 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13805 /* Returns an insn that has a vrsave set operation with the
13806 appropriate CLOBBERs. */
13809 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
13812 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
13813 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13816 = gen_rtx_SET (VOIDmode,
13818 gen_rtx_UNSPEC_VOLATILE (SImode,
13819 gen_rtvec (2, reg, vrsave),
13820 UNSPECV_SET_VRSAVE));
13824 /* We need to clobber the registers in the mask so the scheduler
13825 does not move sets to VRSAVE before sets of AltiVec registers.
13827 However, if the function receives nonlocal gotos, reload will set
13828 all call saved registers live. We will end up with:
13830 (set (reg 999) (mem))
13831 (parallel [ (set (reg vrsave) (unspec blah))
13832 (clobber (reg 999))])
13834 The clobber will cause the store into reg 999 to be dead, and
13835 flow will attempt to delete an epilogue insn. In this case, we
13836 need an unspec use/set of the register. */
13838 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13839 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13841 if (!epiloguep || call_used_regs [i])
13842 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13843 gen_rtx_REG (V4SImode, i));
13846 rtx reg = gen_rtx_REG (V4SImode, i);
13849 = gen_rtx_SET (VOIDmode,
13851 gen_rtx_UNSPEC (V4SImode,
13852 gen_rtvec (1, reg), 27));
13856 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13858 for (i = 0; i < nclobs; ++i)
13859 XVECEXP (insn, 0, i) = clobs[i];
13864 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13865 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13868 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13869 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13871 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13872 rtx replacea, replaceb;
13874 int_rtx = GEN_INT (offset);
13876 /* Some cases that need register indexed addressing. */
13877 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13878 || (TARGET_E500_DOUBLE && mode == DFmode)
13880 && SPE_VECTOR_MODE (mode)
13881 && !SPE_CONST_OFFSET_OK (offset)))
13883 /* Whomever calls us must make sure r11 is available in the
13884 flow path of instructions in the prologue. */
13885 offset_rtx = gen_rtx_REG (Pmode, 11);
13886 emit_move_insn (offset_rtx, int_rtx);
13888 replacea = offset_rtx;
13889 replaceb = int_rtx;
13893 offset_rtx = int_rtx;
13894 replacea = NULL_RTX;
13895 replaceb = NULL_RTX;
13898 reg = gen_rtx_REG (mode, regno);
13899 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13900 mem = gen_frame_mem (mode, addr);
13902 insn = emit_move_insn (mem, reg);
13904 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13907 /* Emit an offset memory reference suitable for a frame store, while
13908 converting to a valid addressing mode. */
13911 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13913 rtx int_rtx, offset_rtx;
13915 int_rtx = GEN_INT (offset);
13917 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13918 || (TARGET_E500_DOUBLE && mode == DFmode))
13920 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13921 emit_move_insn (offset_rtx, int_rtx);
13924 offset_rtx = int_rtx;
13926 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13929 /* Look for user-defined global regs. We should not save and restore these,
13930 and cannot use stmw/lmw if there are any in its range. */
13933 no_global_regs_above (int first_greg)
13936 for (i = 0; i < 32 - first_greg; i++)
13937 if (global_regs[first_greg + i])
13942 #ifndef TARGET_FIX_AND_CONTINUE
13943 #define TARGET_FIX_AND_CONTINUE 0
13946 /* Emit function prologue as insns. */
13949 rs6000_emit_prologue (void)
13951 rs6000_stack_t *info = rs6000_stack_info ();
13952 enum machine_mode reg_mode = Pmode;
13953 int reg_size = TARGET_32BIT ? 4 : 8;
13954 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13955 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13956 rtx frame_reg_rtx = sp_reg_rtx;
13957 rtx cr_save_rtx = NULL_RTX;
13959 int saving_FPRs_inline;
13960 int using_store_multiple;
13961 HOST_WIDE_INT sp_offset = 0;
13963 if (TARGET_FIX_AND_CONTINUE)
13965 /* gdb on darwin arranges to forward a function from the old
13966 address by modifying the first 5 instructions of the function
13967 to branch to the overriding function. This is necessary to
13968 permit function pointers that point to the old function to
13969 actually forward to the new function. */
13970 emit_insn (gen_nop ());
13971 emit_insn (gen_nop ());
13972 emit_insn (gen_nop ());
13973 emit_insn (gen_nop ());
13974 emit_insn (gen_nop ());
13977 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13979 reg_mode = V2SImode;
13983 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13984 && (!TARGET_SPE_ABI
13985 || info->spe_64bit_regs_used == 0)
13986 && info->first_gp_reg_save < 31
13987 && no_global_regs_above (info->first_gp_reg_save));
13988 saving_FPRs_inline = (info->first_fp_reg_save == 64
13989 || FP_SAVE_INLINE (info->first_fp_reg_save)
13990 || current_function_calls_eh_return
13991 || cfun->machine->ra_need_lr);
13993 /* For V.4, update stack before we do any saving and set back pointer. */
13995 && (DEFAULT_ABI == ABI_V4
13996 || current_function_calls_eh_return))
13998 if (info->total_size < 32767)
13999 sp_offset = info->total_size;
14001 frame_reg_rtx = frame_ptr_rtx;
14002 rs6000_emit_allocate_stack (info->total_size,
14003 (frame_reg_rtx != sp_reg_rtx
14004 && (info->cr_save_p
14006 || info->first_fp_reg_save < 64
14007 || info->first_gp_reg_save < 32
14009 if (frame_reg_rtx != sp_reg_rtx)
14010 rs6000_emit_stack_tie ();
14013 /* Handle world saves specially here. */
14014 if (WORLD_SAVE_P (info))
14020 /* save_world expects lr in r0. */
14021 if (info->lr_save_p)
14023 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14024 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14025 RTX_FRAME_RELATED_P (insn) = 1;
14028 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14029 assumptions about the offsets of various bits of the stack
14031 gcc_assert (info->gp_save_offset == -220
14032 && info->fp_save_offset == -144
14033 && info->lr_save_offset == 8
14034 && info->cr_save_offset == 4
14037 && (!current_function_calls_eh_return
14038 || info->ehrd_offset == -432)
14039 && info->vrsave_save_offset == -224
14040 && info->altivec_save_offset == (-224 -16 -192));
14042 treg = gen_rtx_REG (SImode, 11);
14043 emit_move_insn (treg, GEN_INT (-info->total_size));
14045 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14046 in R11. It also clobbers R12, so beware! */
14048 /* Preserve CR2 for save_world prologues */
14050 sz += 32 - info->first_gp_reg_save;
14051 sz += 64 - info->first_fp_reg_save;
14052 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14053 p = rtvec_alloc (sz);
14055 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14056 gen_rtx_REG (Pmode,
14057 LINK_REGISTER_REGNUM));
14058 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14059 gen_rtx_SYMBOL_REF (Pmode,
14061 /* We do floats first so that the instruction pattern matches
14063 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14065 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14066 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14067 GEN_INT (info->fp_save_offset
14068 + sp_offset + 8 * i));
14069 rtx mem = gen_frame_mem (DFmode, addr);
14071 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14073 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14075 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14076 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14077 GEN_INT (info->altivec_save_offset
14078 + sp_offset + 16 * i));
14079 rtx mem = gen_frame_mem (V4SImode, addr);
14081 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14083 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14085 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14086 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14087 GEN_INT (info->gp_save_offset
14088 + sp_offset + reg_size * i));
14089 rtx mem = gen_frame_mem (reg_mode, addr);
14091 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14095 /* CR register traditionally saved as CR2. */
14096 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14097 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14098 GEN_INT (info->cr_save_offset
14100 rtx mem = gen_frame_mem (reg_mode, addr);
14102 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14104 /* Prevent any attempt to delete the setting of r0 and treg! */
14105 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14106 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14107 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14109 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14110 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14111 NULL_RTX, NULL_RTX);
14113 if (current_function_calls_eh_return)
14118 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14119 if (regno == INVALID_REGNUM)
14121 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14122 info->ehrd_offset + sp_offset
14123 + reg_size * (int) i,
14129 /* Save AltiVec registers if needed. */
14130 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14134 /* There should be a non inline version of this, for when we
14135 are saving lots of vector registers. */
14136 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14137 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14139 rtx areg, savereg, mem;
14142 offset = info->altivec_save_offset + sp_offset
14143 + 16 * (i - info->first_altivec_reg_save);
14145 savereg = gen_rtx_REG (V4SImode, i);
14147 areg = gen_rtx_REG (Pmode, 0);
14148 emit_move_insn (areg, GEN_INT (offset));
14150 /* AltiVec addressing mode is [reg+reg]. */
14151 mem = gen_frame_mem (V4SImode,
14152 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14154 insn = emit_move_insn (mem, savereg);
14156 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14157 areg, GEN_INT (offset));
14161 /* VRSAVE is a bit vector representing which AltiVec registers
14162 are used. The OS uses this to determine which vector
14163 registers to save on a context switch. We need to save
14164 VRSAVE on the stack frame, add whatever AltiVec registers we
14165 used in this function, and do the corresponding magic in the
14168 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14169 && info->vrsave_mask != 0)
14171 rtx reg, mem, vrsave;
14174 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14175 as frame_reg_rtx and r11 as the static chain pointer for
14176 nested functions. */
14177 reg = gen_rtx_REG (SImode, 0);
14178 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14180 emit_insn (gen_get_vrsave_internal (reg));
14182 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14184 if (!WORLD_SAVE_P (info))
14187 offset = info->vrsave_save_offset + sp_offset;
14188 mem = gen_frame_mem (SImode,
14189 gen_rtx_PLUS (Pmode, frame_reg_rtx,
14190 GEN_INT (offset)));
14191 insn = emit_move_insn (mem, reg);
14194 /* Include the registers in the mask. */
14195 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14197 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14200 /* If we use the link register, get it into r0. */
14201 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14203 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14204 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14205 RTX_FRAME_RELATED_P (insn) = 1;
14208 /* If we need to save CR, put it into r12. */
14209 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14213 cr_save_rtx = gen_rtx_REG (SImode, 12);
14214 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14215 RTX_FRAME_RELATED_P (insn) = 1;
14216 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14217 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14218 But that's OK. All we have to do is specify that _one_ condition
14219 code register is saved in this stack slot. The thrower's epilogue
14220 will then restore all the call-saved registers.
14221 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14222 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14223 gen_rtx_REG (SImode, CR2_REGNO));
14224 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14229 /* Do any required saving of fpr's. If only one or two to save, do
14230 it ourselves. Otherwise, call function. */
14231 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14234 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14235 if ((regs_ever_live[info->first_fp_reg_save+i]
14236 && ! call_used_regs[info->first_fp_reg_save+i]))
14237 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14238 info->first_fp_reg_save + i,
14239 info->fp_save_offset + sp_offset + 8 * i,
14242 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14246 const char *alloc_rname;
14248 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14250 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14251 gen_rtx_REG (Pmode,
14252 LINK_REGISTER_REGNUM));
14253 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14254 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14255 alloc_rname = ggc_strdup (rname);
14256 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14257 gen_rtx_SYMBOL_REF (Pmode,
14259 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14261 rtx addr, reg, mem;
14262 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14263 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14264 GEN_INT (info->fp_save_offset
14265 + sp_offset + 8*i));
14266 mem = gen_frame_mem (DFmode, addr);
14268 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14270 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14271 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14272 NULL_RTX, NULL_RTX);
14275 /* Save GPRs. This is done as a PARALLEL if we are using
14276 the store-multiple instructions. */
14277 if (!WORLD_SAVE_P (info) && using_store_multiple)
14281 p = rtvec_alloc (32 - info->first_gp_reg_save);
14282 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14284 rtx addr, reg, mem;
14285 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14286 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14287 GEN_INT (info->gp_save_offset
14290 mem = gen_frame_mem (reg_mode, addr);
14292 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14294 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14295 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14296 NULL_RTX, NULL_RTX);
14298 else if (!WORLD_SAVE_P (info))
14301 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14302 if ((regs_ever_live[info->first_gp_reg_save + i]
14303 && (!call_used_regs[info->first_gp_reg_save + i]
14304 || (i + info->first_gp_reg_save
14305 == RS6000_PIC_OFFSET_TABLE_REGNUM
14306 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14307 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14308 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14309 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14311 rtx addr, reg, mem;
14312 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14314 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14316 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14319 if (!SPE_CONST_OFFSET_OK (offset))
14321 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14322 emit_move_insn (b, GEN_INT (offset));
14325 b = GEN_INT (offset);
14327 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14328 mem = gen_frame_mem (V2SImode, addr);
14329 insn = emit_move_insn (mem, reg);
14331 if (GET_CODE (b) == CONST_INT)
14332 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14333 NULL_RTX, NULL_RTX);
14335 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14336 b, GEN_INT (offset));
14340 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14341 GEN_INT (info->gp_save_offset
14344 mem = gen_frame_mem (reg_mode, addr);
14346 insn = emit_move_insn (mem, reg);
14347 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14348 NULL_RTX, NULL_RTX);
14353 /* ??? There's no need to emit actual instructions here, but it's the
14354 easiest way to get the frame unwind information emitted. */
14355 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
14357 unsigned int i, regno;
14359 /* In AIX ABI we need to pretend we save r2 here. */
14362 rtx addr, reg, mem;
14364 reg = gen_rtx_REG (reg_mode, 2);
14365 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14366 GEN_INT (sp_offset + 5 * reg_size));
14367 mem = gen_frame_mem (reg_mode, addr);
14369 insn = emit_move_insn (mem, reg);
14370 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14371 NULL_RTX, NULL_RTX);
14372 PATTERN (insn) = gen_blockage ();
14377 regno = EH_RETURN_DATA_REGNO (i);
14378 if (regno == INVALID_REGNUM)
14381 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14382 info->ehrd_offset + sp_offset
14383 + reg_size * (int) i,
14388 /* Save lr if we used it. */
14389 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14391 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14392 GEN_INT (info->lr_save_offset + sp_offset));
14393 rtx reg = gen_rtx_REG (Pmode, 0);
14394 rtx mem = gen_rtx_MEM (Pmode, addr);
14395 /* This should not be of frame_alias_set, because of
14396 __builtin_return_address. */
14398 insn = emit_move_insn (mem, reg);
14399 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14400 NULL_RTX, NULL_RTX);
14403 /* Save CR if we use any that must be preserved. */
14404 if (!WORLD_SAVE_P (info) && info->cr_save_p)
14406 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14407 GEN_INT (info->cr_save_offset + sp_offset));
14408 rtx mem = gen_frame_mem (SImode, addr);
14409 /* See the large comment above about why CR2_REGNO is used. */
14410 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14412 /* If r12 was used to hold the original sp, copy cr into r0 now
14414 if (REGNO (frame_reg_rtx) == 12)
14418 cr_save_rtx = gen_rtx_REG (SImode, 0);
14419 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14420 RTX_FRAME_RELATED_P (insn) = 1;
14421 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14422 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14427 insn = emit_move_insn (mem, cr_save_rtx);
14429 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14430 NULL_RTX, NULL_RTX);
14433 /* Update stack and set back pointer unless this is V.4,
14434 for which it was done previously. */
14435 if (!WORLD_SAVE_P (info) && info->push_p
14436 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14437 rs6000_emit_allocate_stack (info->total_size, FALSE);
14439 /* Set frame pointer, if needed. */
14440 if (frame_pointer_needed)
14442 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14444 RTX_FRAME_RELATED_P (insn) = 1;
14447 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
14448 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14449 || (DEFAULT_ABI == ABI_V4
14450 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14451 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14453 /* If emit_load_toc_table will use the link register, we need to save
14454 it. We use R12 for this purpose because emit_load_toc_table
14455 can use register 0. This allows us to use a plain 'blr' to return
14456 from the procedure more often. */
14457 int save_LR_around_toc_setup = (TARGET_ELF
14458 && DEFAULT_ABI != ABI_AIX
14460 && ! info->lr_save_p
14461 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14462 if (save_LR_around_toc_setup)
14464 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14466 insn = emit_move_insn (frame_ptr_rtx, lr);
14467 rs6000_maybe_dead (insn);
14468 RTX_FRAME_RELATED_P (insn) = 1;
14470 rs6000_emit_load_toc_table (TRUE);
14472 insn = emit_move_insn (lr, frame_ptr_rtx);
14473 rs6000_maybe_dead (insn);
14474 RTX_FRAME_RELATED_P (insn) = 1;
14477 rs6000_emit_load_toc_table (TRUE);
14481 if (DEFAULT_ABI == ABI_DARWIN
14482 && flag_pic && current_function_uses_pic_offset_table)
14484 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14485 rtx src = machopic_function_base_sym ();
14487 /* Save and restore LR locally around this call (in R0). */
14488 if (!info->lr_save_p)
14489 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14491 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14493 insn = emit_move_insn (gen_rtx_REG (Pmode,
14494 RS6000_PIC_OFFSET_TABLE_REGNUM),
14496 rs6000_maybe_dead (insn);
14498 if (!info->lr_save_p)
14499 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14504 /* Write function prologue. */
14507 rs6000_output_function_prologue (FILE *file,
14508 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14510 rs6000_stack_t *info = rs6000_stack_info ();
14512 if (TARGET_DEBUG_STACK)
14513 debug_stack_info (info);
14515 /* Write .extern for any function we will call to save and restore
14517 if (info->first_fp_reg_save < 64
14518 && !FP_SAVE_INLINE (info->first_fp_reg_save))
14519 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14520 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14521 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14522 RESTORE_FP_SUFFIX);
14524 /* Write .extern for AIX common mode routines, if needed. */
14525 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14527 fputs ("\t.extern __mulh\n", file);
14528 fputs ("\t.extern __mull\n", file);
14529 fputs ("\t.extern __divss\n", file);
14530 fputs ("\t.extern __divus\n", file);
14531 fputs ("\t.extern __quoss\n", file);
14532 fputs ("\t.extern __quous\n", file);
14533 common_mode_defined = 1;
14536 if (! HAVE_prologue)
14540 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14541 the "toplevel" insn chain. */
14542 emit_note (NOTE_INSN_DELETED);
14543 rs6000_emit_prologue ();
14544 emit_note (NOTE_INSN_DELETED);
14546 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14550 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14552 INSN_ADDRESSES_NEW (insn, addr);
14557 if (TARGET_DEBUG_STACK)
14558 debug_rtx_list (get_insns (), 100);
14559 final (get_insns (), file, FALSE);
14563 rs6000_pic_labelno++;
14566 /* Emit function epilogue as insns.
14568 At present, dwarf2out_frame_debug_expr doesn't understand
14569 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14570 anywhere in the epilogue. Most of the insns below would in any case
14571 need special notes to explain where r11 is in relation to the stack. */
14574 rs6000_emit_epilogue (int sibcall)
14576 rs6000_stack_t *info;
14577 int restoring_FPRs_inline;
14578 int using_load_multiple;
14579 int using_mfcr_multiple;
14580 int use_backchain_to_restore_sp;
14582 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14583 rtx frame_reg_rtx = sp_reg_rtx;
14584 enum machine_mode reg_mode = Pmode;
14585 int reg_size = TARGET_32BIT ? 4 : 8;
14588 info = rs6000_stack_info ();
14590 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14592 reg_mode = V2SImode;
14596 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14597 && (!TARGET_SPE_ABI
14598 || info->spe_64bit_regs_used == 0)
14599 && info->first_gp_reg_save < 31
14600 && no_global_regs_above (info->first_gp_reg_save));
14601 restoring_FPRs_inline = (sibcall
14602 || current_function_calls_eh_return
14603 || info->first_fp_reg_save == 64
14604 || FP_SAVE_INLINE (info->first_fp_reg_save));
14605 use_backchain_to_restore_sp = (frame_pointer_needed
14606 || current_function_calls_alloca
14607 || info->total_size > 32767);
14608 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14609 || rs6000_cpu == PROCESSOR_PPC603
14610 || rs6000_cpu == PROCESSOR_PPC750
14613 if (WORLD_SAVE_P (info))
14617 const char *alloc_rname;
14620 /* eh_rest_world_r10 will return to the location saved in the LR
14621 stack slot (which is not likely to be our caller.)
14622 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14623 rest_world is similar, except any R10 parameter is ignored.
14624 The exception-handling stuff that was here in 2.95 is no
14625 longer necessary. */
14629 + 32 - info->first_gp_reg_save
14630 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14631 + 63 + 1 - info->first_fp_reg_save);
14633 strcpy (rname, ((current_function_calls_eh_return) ?
14634 "*eh_rest_world_r10" : "*rest_world"));
14635 alloc_rname = ggc_strdup (rname);
14638 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14639 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14640 gen_rtx_REG (Pmode,
14641 LINK_REGISTER_REGNUM));
14643 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14644 /* The instruction pattern requires a clobber here;
14645 it is shared with the restVEC helper. */
14647 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14650 /* CR register traditionally saved as CR2. */
14651 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14652 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14653 GEN_INT (info->cr_save_offset));
14654 rtx mem = gen_frame_mem (reg_mode, addr);
14656 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14659 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14661 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14662 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14663 GEN_INT (info->gp_save_offset
14665 rtx mem = gen_frame_mem (reg_mode, addr);
14667 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14669 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14671 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14672 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14673 GEN_INT (info->altivec_save_offset
14675 rtx mem = gen_frame_mem (V4SImode, addr);
14677 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14679 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
14681 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14682 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14683 GEN_INT (info->fp_save_offset
14685 rtx mem = gen_frame_mem (DFmode, addr);
14687 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14690 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
14692 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
14694 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
14696 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
14698 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
14699 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14704 /* If we have a frame pointer, a call to alloca, or a large stack
14705 frame, restore the old stack pointer using the backchain. Otherwise,
14706 we know what size to update it with. */
14707 if (use_backchain_to_restore_sp)
14709 /* Under V.4, don't reset the stack pointer until after we're done
14710 loading the saved registers. */
14711 if (DEFAULT_ABI == ABI_V4)
14712 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
14714 emit_move_insn (frame_reg_rtx,
14715 gen_rtx_MEM (Pmode, sp_reg_rtx));
14718 else if (info->push_p)
14720 if (DEFAULT_ABI == ABI_V4
14721 || current_function_calls_eh_return)
14722 sp_offset = info->total_size;
14725 emit_insn (TARGET_32BIT
14726 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14727 GEN_INT (info->total_size))
14728 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14729 GEN_INT (info->total_size)));
14733 /* Restore AltiVec registers if needed. */
14734 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14738 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14739 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14741 rtx addr, areg, mem;
14743 areg = gen_rtx_REG (Pmode, 0);
14745 (areg, GEN_INT (info->altivec_save_offset
14747 + 16 * (i - info->first_altivec_reg_save)));
14749 /* AltiVec addressing mode is [reg+reg]. */
14750 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14751 mem = gen_frame_mem (V4SImode, addr);
14753 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14757 /* Restore VRSAVE if needed. */
14758 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14759 && info->vrsave_mask != 0)
14761 rtx addr, mem, reg;
14763 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14764 GEN_INT (info->vrsave_save_offset + sp_offset));
14765 mem = gen_frame_mem (SImode, addr);
14766 reg = gen_rtx_REG (SImode, 12);
14767 emit_move_insn (reg, mem);
14769 emit_insn (generate_set_vrsave (reg, info, 1));
14772 /* Get the old lr if we saved it. */
14773 if (info->lr_save_p)
14775 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14776 info->lr_save_offset + sp_offset);
14778 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
14781 /* Get the old cr if we saved it. */
14782 if (info->cr_save_p)
14784 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14785 GEN_INT (info->cr_save_offset + sp_offset));
14786 rtx mem = gen_frame_mem (SImode, addr);
14788 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14791 /* Set LR here to try to overlap restores below. */
14792 if (info->lr_save_p)
14793 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14794 gen_rtx_REG (Pmode, 0));
14796 /* Load exception handler data registers, if needed. */
14797 if (current_function_calls_eh_return)
14799 unsigned int i, regno;
14803 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14804 GEN_INT (sp_offset + 5 * reg_size));
14805 rtx mem = gen_frame_mem (reg_mode, addr);
14807 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14814 regno = EH_RETURN_DATA_REGNO (i);
14815 if (regno == INVALID_REGNUM)
14818 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14819 info->ehrd_offset + sp_offset
14820 + reg_size * (int) i);
14822 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14826 /* Restore GPRs. This is done as a PARALLEL if we are using
14827 the load-multiple instructions. */
14828 if (using_load_multiple)
14831 p = rtvec_alloc (32 - info->first_gp_reg_save);
14832 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14834 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14835 GEN_INT (info->gp_save_offset
14838 rtx mem = gen_frame_mem (reg_mode, addr);
14841 gen_rtx_SET (VOIDmode,
14842 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14845 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14848 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14849 if ((regs_ever_live[info->first_gp_reg_save + i]
14850 && (!call_used_regs[info->first_gp_reg_save + i]
14851 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14852 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14853 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14854 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14855 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14857 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14858 GEN_INT (info->gp_save_offset
14861 rtx mem = gen_frame_mem (reg_mode, addr);
14863 /* Restore 64-bit quantities for SPE. */
14864 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14866 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14869 if (!SPE_CONST_OFFSET_OK (offset))
14871 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14872 emit_move_insn (b, GEN_INT (offset));
14875 b = GEN_INT (offset);
14877 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14878 mem = gen_frame_mem (V2SImode, addr);
14881 emit_move_insn (gen_rtx_REG (reg_mode,
14882 info->first_gp_reg_save + i), mem);
14885 /* Restore fpr's if we need to do it without calling a function. */
14886 if (restoring_FPRs_inline)
14887 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14888 if ((regs_ever_live[info->first_fp_reg_save+i]
14889 && ! call_used_regs[info->first_fp_reg_save+i]))
14892 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14893 GEN_INT (info->fp_save_offset
14896 mem = gen_frame_mem (DFmode, addr);
14898 emit_move_insn (gen_rtx_REG (DFmode,
14899 info->first_fp_reg_save + i),
14903 /* If we saved cr, restore it here. Just those that were used. */
14904 if (info->cr_save_p)
14906 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14909 if (using_mfcr_multiple)
14911 for (i = 0; i < 8; i++)
14912 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14914 gcc_assert (count);
14917 if (using_mfcr_multiple && count > 1)
14922 p = rtvec_alloc (count);
14925 for (i = 0; i < 8; i++)
14926 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14928 rtvec r = rtvec_alloc (2);
14929 RTVEC_ELT (r, 0) = r12_rtx;
14930 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14931 RTVEC_ELT (p, ndx) =
14932 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14933 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14936 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14937 gcc_assert (ndx == count);
14940 for (i = 0; i < 8; i++)
14941 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14943 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14949 /* If this is V.4, unwind the stack pointer after all of the loads
14950 have been done. We need to emit a block here so that sched
14951 doesn't decide to move the sp change before the register restores
14952 (which may not have any obvious dependency on the stack). This
14953 doesn't hurt performance, because there is no scheduling that can
14954 be done after this point. */
14955 if (DEFAULT_ABI == ABI_V4
14956 || current_function_calls_eh_return)
14958 if (frame_reg_rtx != sp_reg_rtx)
14959 rs6000_emit_stack_tie ();
14961 if (use_backchain_to_restore_sp)
14963 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14965 else if (sp_offset != 0)
14967 emit_insn (TARGET_32BIT
14968 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14969 GEN_INT (sp_offset))
14970 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14971 GEN_INT (sp_offset)));
14975 if (current_function_calls_eh_return)
14977 rtx sa = EH_RETURN_STACKADJ_RTX;
14978 emit_insn (TARGET_32BIT
14979 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14980 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14986 if (! restoring_FPRs_inline)
14987 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14989 p = rtvec_alloc (2);
14991 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14992 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14993 gen_rtx_REG (Pmode,
14994 LINK_REGISTER_REGNUM));
14996 /* If we have to restore more than two FP registers, branch to the
14997 restore function. It will return to our caller. */
14998 if (! restoring_FPRs_inline)
15002 const char *alloc_rname;
15004 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
15005 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
15006 alloc_rname = ggc_strdup (rname);
15007 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15008 gen_rtx_SYMBOL_REF (Pmode,
15011 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15014 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15015 GEN_INT (info->fp_save_offset + 8*i));
15016 mem = gen_frame_mem (DFmode, addr);
15018 RTVEC_ELT (p, i+3) =
15019 gen_rtx_SET (VOIDmode,
15020 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15025 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15029 /* Write function epilogue. */
15032 rs6000_output_function_epilogue (FILE *file,
15033 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15035 rs6000_stack_t *info = rs6000_stack_info ();
15037 if (! HAVE_epilogue)
15039 rtx insn = get_last_insn ();
15040 /* If the last insn was a BARRIER, we don't have to write anything except
15041 the trace table. */
15042 if (GET_CODE (insn) == NOTE)
15043 insn = prev_nonnote_insn (insn);
15044 if (insn == 0 || GET_CODE (insn) != BARRIER)
15046 /* This is slightly ugly, but at least we don't have two
15047 copies of the epilogue-emitting code. */
15050 /* A NOTE_INSN_DELETED is supposed to be at the start
15051 and end of the "toplevel" insn chain. */
15052 emit_note (NOTE_INSN_DELETED);
15053 rs6000_emit_epilogue (FALSE);
15054 emit_note (NOTE_INSN_DELETED);
15056 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15060 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15062 INSN_ADDRESSES_NEW (insn, addr);
15067 if (TARGET_DEBUG_STACK)
15068 debug_rtx_list (get_insns (), 100);
15069 final (get_insns (), file, FALSE);
15075 macho_branch_islands ();
15076 /* Mach-O doesn't support labels at the end of objects, so if
15077 it looks like we might want one, insert a NOP. */
15079 rtx insn = get_last_insn ();
15082 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15083 insn = PREV_INSN (insn);
15087 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15088 fputs ("\tnop\n", file);
15092 /* Output a traceback table here. See /usr/include/sys/debug.h for info
15095 We don't output a traceback table if -finhibit-size-directive was
15096 used. The documentation for -finhibit-size-directive reads
15097 ``don't output a @code{.size} assembler directive, or anything
15098 else that would cause trouble if the function is split in the
15099 middle, and the two halves are placed at locations far apart in
15100 memory.'' The traceback table has this property, since it
15101 includes the offset from the start of the function to the
15102 traceback table itself.
15104 System V.4 Powerpc's (and the embedded ABI derived from it) use a
15105 different traceback table. */
15106 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15107 && rs6000_traceback != traceback_none)
15109 const char *fname = NULL;
15110 const char *language_string = lang_hooks.name;
15111 int fixed_parms = 0, float_parms = 0, parm_info = 0;
15113 int optional_tbtab;
15115 if (rs6000_traceback == traceback_full)
15116 optional_tbtab = 1;
15117 else if (rs6000_traceback == traceback_part)
15118 optional_tbtab = 0;
15120 optional_tbtab = !optimize_size && !TARGET_ELF;
15122 if (optional_tbtab)
15124 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15125 while (*fname == '.') /* V.4 encodes . in the name */
15128 /* Need label immediately before tbtab, so we can compute
15129 its offset from the function start. */
15130 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15131 ASM_OUTPUT_LABEL (file, fname);
15134 /* The .tbtab pseudo-op can only be used for the first eight
15135 expressions, since it can't handle the possibly variable
15136 length fields that follow. However, if you omit the optional
15137 fields, the assembler outputs zeros for all optional fields
15138 anyways, giving each variable length field is minimum length
15139 (as defined in sys/debug.h). Thus we can not use the .tbtab
15140 pseudo-op at all. */
15142 /* An all-zero word flags the start of the tbtab, for debuggers
15143 that have to find it by searching forward from the entry
15144 point or from the current pc. */
15145 fputs ("\t.long 0\n", file);
15147 /* Tbtab format type. Use format type 0. */
15148 fputs ("\t.byte 0,", file);
15150 /* Language type. Unfortunately, there does not seem to be any
15151 official way to discover the language being compiled, so we
15152 use language_string.
15153 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15154 Java is 13. Objective-C is 14. */
15155 if (! strcmp (language_string, "GNU C"))
15157 else if (! strcmp (language_string, "GNU F77")
15158 || ! strcmp (language_string, "GNU F95"))
15160 else if (! strcmp (language_string, "GNU Pascal"))
15162 else if (! strcmp (language_string, "GNU Ada"))
15164 else if (! strcmp (language_string, "GNU C++"))
15166 else if (! strcmp (language_string, "GNU Java"))
15168 else if (! strcmp (language_string, "GNU Objective-C"))
15171 gcc_unreachable ();
15172 fprintf (file, "%d,", i);
15174 /* 8 single bit fields: global linkage (not set for C extern linkage,
15175 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15176 from start of procedure stored in tbtab, internal function, function
15177 has controlled storage, function has no toc, function uses fp,
15178 function logs/aborts fp operations. */
15179 /* Assume that fp operations are used if any fp reg must be saved. */
15180 fprintf (file, "%d,",
15181 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15183 /* 6 bitfields: function is interrupt handler, name present in
15184 proc table, function calls alloca, on condition directives
15185 (controls stack walks, 3 bits), saves condition reg, saves
15187 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15188 set up as a frame pointer, even when there is no alloca call. */
15189 fprintf (file, "%d,",
15190 ((optional_tbtab << 6)
15191 | ((optional_tbtab & frame_pointer_needed) << 5)
15192 | (info->cr_save_p << 1)
15193 | (info->lr_save_p)));
15195 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15197 fprintf (file, "%d,",
15198 (info->push_p << 7) | (64 - info->first_fp_reg_save));
15200 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15201 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15203 if (optional_tbtab)
15205 /* Compute the parameter info from the function decl argument
15208 int next_parm_info_bit = 31;
15210 for (decl = DECL_ARGUMENTS (current_function_decl);
15211 decl; decl = TREE_CHAIN (decl))
15213 rtx parameter = DECL_INCOMING_RTL (decl);
15214 enum machine_mode mode = GET_MODE (parameter);
15216 if (GET_CODE (parameter) == REG)
15218 if (SCALAR_FLOAT_MODE_P (mode))
15236 gcc_unreachable ();
15239 /* If only one bit will fit, don't or in this entry. */
15240 if (next_parm_info_bit > 0)
15241 parm_info |= (bits << (next_parm_info_bit - 1));
15242 next_parm_info_bit -= 2;
15246 fixed_parms += ((GET_MODE_SIZE (mode)
15247 + (UNITS_PER_WORD - 1))
15249 next_parm_info_bit -= 1;
15255 /* Number of fixed point parameters. */
15256 /* This is actually the number of words of fixed point parameters; thus
15257 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15258 fprintf (file, "%d,", fixed_parms);
15260 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15262 /* This is actually the number of fp registers that hold parameters;
15263 and thus the maximum value is 13. */
15264 /* Set parameters on stack bit if parameters are not in their original
15265 registers, regardless of whether they are on the stack? Xlc
15266 seems to set the bit when not optimizing. */
15267 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15269 if (! optional_tbtab)
15272 /* Optional fields follow. Some are variable length. */
15274 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15275 11 double float. */
15276 /* There is an entry for each parameter in a register, in the order that
15277 they occur in the parameter list. Any intervening arguments on the
15278 stack are ignored. If the list overflows a long (max possible length
15279 34 bits) then completely leave off all elements that don't fit. */
15280 /* Only emit this long if there was at least one parameter. */
15281 if (fixed_parms || float_parms)
15282 fprintf (file, "\t.long %d\n", parm_info);
15284 /* Offset from start of code to tb table. */
15285 fputs ("\t.long ", file);
15286 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15288 RS6000_OUTPUT_BASENAME (file, fname);
15290 assemble_name (file, fname);
15292 rs6000_output_function_entry (file, fname);
15295 /* Interrupt handler mask. */
15296 /* Omit this long, since we never set the interrupt handler bit
15299 /* Number of CTL (controlled storage) anchors. */
15300 /* Omit this long, since the has_ctl bit is never set above. */
15302 /* Displacement into stack of each CTL anchor. */
15303 /* Omit this list of longs, because there are no CTL anchors. */
15305 /* Length of function name. */
15308 fprintf (file, "\t.short %d\n", (int) strlen (fname));
15310 /* Function name. */
15311 assemble_string (fname, strlen (fname));
15313 /* Register for alloca automatic storage; this is always reg 31.
15314 Only emit this if the alloca bit was set above. */
15315 if (frame_pointer_needed)
15316 fputs ("\t.byte 31\n", file);
15318 fputs ("\t.align 2\n", file);
15322 /* A C compound statement that outputs the assembler code for a thunk
15323 function, used to implement C++ virtual function calls with
15324 multiple inheritance. The thunk acts as a wrapper around a virtual
15325 function, adjusting the implicit object parameter before handing
15326 control off to the real function.
15328 First, emit code to add the integer DELTA to the location that
15329 contains the incoming first argument. Assume that this argument
15330 contains a pointer, and is the one used to pass the `this' pointer
15331 in C++. This is the incoming argument *before* the function
15332 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15333 values of all other incoming arguments.
15335 After the addition, emit code to jump to FUNCTION, which is a
15336 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15337 not touch the return address. Hence returning from FUNCTION will
15338 return to whoever called the current `thunk'.
15340 The effect must be as if FUNCTION had been called directly with the
15341 adjusted first argument. This macro is responsible for emitting
15342 all of the code for a thunk function; output_function_prologue()
15343 and output_function_epilogue() are not invoked.
15345 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15346 been extracted from it.) It might possibly be useful on some
15347 targets, but probably not.
15349 If you do not define this macro, the target-independent code in the
15350 C++ frontend will generate a less efficient heavyweight thunk that
15351 calls FUNCTION instead of jumping to it. The generic approach does
15352 not support varargs. */
15355 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15356 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15359 rtx this, insn, funexp;
15361 reload_completed = 1;
15362 epilogue_completed = 1;
15363 no_new_pseudos = 1;
15364 reset_block_changes ();
15366 /* Mark the end of the (empty) prologue. */
15367 emit_note (NOTE_INSN_PROLOGUE_END);
15369 /* Find the "this" pointer. If the function returns a structure,
15370 the structure return pointer is in r3. */
15371 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15372 this = gen_rtx_REG (Pmode, 4);
15374 this = gen_rtx_REG (Pmode, 3);
15376 /* Apply the constant offset, if required. */
15379 rtx delta_rtx = GEN_INT (delta);
15380 emit_insn (TARGET_32BIT
15381 ? gen_addsi3 (this, this, delta_rtx)
15382 : gen_adddi3 (this, this, delta_rtx));
15385 /* Apply the offset from the vtable, if required. */
15388 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15389 rtx tmp = gen_rtx_REG (Pmode, 12);
15391 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15392 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15394 emit_insn (TARGET_32BIT
15395 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15396 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15397 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15401 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15403 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15405 emit_insn (TARGET_32BIT
15406 ? gen_addsi3 (this, this, tmp)
15407 : gen_adddi3 (this, this, tmp));
15410 /* Generate a tail call to the target function. */
15411 if (!TREE_USED (function))
15413 assemble_external (function);
15414 TREE_USED (function) = 1;
15416 funexp = XEXP (DECL_RTL (function), 0);
15417 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15420 if (MACHOPIC_INDIRECT)
15421 funexp = machopic_indirect_call_target (funexp);
15424 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15425 generate sibcall RTL explicitly. */
15426 insn = emit_call_insn (
15427 gen_rtx_PARALLEL (VOIDmode,
15429 gen_rtx_CALL (VOIDmode,
15430 funexp, const0_rtx),
15431 gen_rtx_USE (VOIDmode, const0_rtx),
15432 gen_rtx_USE (VOIDmode,
15433 gen_rtx_REG (SImode,
15434 LINK_REGISTER_REGNUM)),
15435 gen_rtx_RETURN (VOIDmode))));
15436 SIBLING_CALL_P (insn) = 1;
15439 /* Run just enough of rest_of_compilation to get the insns emitted.
15440 There's not really enough bulk here to make other passes such as
15441 instruction scheduling worth while. Note that use_thunk calls
15442 assemble_start_function and assemble_end_function. */
15443 insn = get_insns ();
15444 insn_locators_initialize ();
15445 shorten_branches (insn);
15446 final_start_function (insn, file, 1);
15447 final (insn, file, 1);
15448 final_end_function ();
15450 reload_completed = 0;
15451 epilogue_completed = 0;
15452 no_new_pseudos = 0;
15455 /* A quick summary of the various types of 'constant-pool tables'
15458 Target Flags Name One table per
15459 AIX (none) AIX TOC object file
15460 AIX -mfull-toc AIX TOC object file
15461 AIX -mminimal-toc AIX minimal TOC translation unit
15462 SVR4/EABI (none) SVR4 SDATA object file
15463 SVR4/EABI -fpic SVR4 pic object file
15464 SVR4/EABI -fPIC SVR4 PIC translation unit
15465 SVR4/EABI -mrelocatable EABI TOC function
15466 SVR4/EABI -maix AIX TOC object file
15467 SVR4/EABI -maix -mminimal-toc
15468 AIX minimal TOC translation unit
15470 Name Reg. Set by entries contains:
15471 made by addrs? fp? sum?
15473 AIX TOC 2 crt0 as Y option option
15474 AIX minimal TOC 30 prolog gcc Y Y option
15475 SVR4 SDATA 13 crt0 gcc N Y N
15476 SVR4 pic 30 prolog ld Y not yet N
15477 SVR4 PIC 30 prolog gcc Y option option
15478 EABI TOC 30 prolog gcc Y option option
15482 /* Hash functions for the hash table. */
15485 rs6000_hash_constant (rtx k)
15487 enum rtx_code code = GET_CODE (k);
15488 enum machine_mode mode = GET_MODE (k);
15489 unsigned result = (code << 3) ^ mode;
15490 const char *format;
15493 format = GET_RTX_FORMAT (code);
15494 flen = strlen (format);
15500 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15503 if (mode != VOIDmode)
15504 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15516 for (; fidx < flen; fidx++)
15517 switch (format[fidx])
15522 const char *str = XSTR (k, fidx);
15523 len = strlen (str);
15524 result = result * 613 + len;
15525 for (i = 0; i < len; i++)
15526 result = result * 613 + (unsigned) str[i];
15531 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15535 result = result * 613 + (unsigned) XINT (k, fidx);
15538 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15539 result = result * 613 + (unsigned) XWINT (k, fidx);
15543 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15544 result = result * 613 + (unsigned) (XWINT (k, fidx)
15551 gcc_unreachable ();
15558 toc_hash_function (const void *hash_entry)
15560 const struct toc_hash_struct *thc =
15561 (const struct toc_hash_struct *) hash_entry;
15562 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15565 /* Compare H1 and H2 for equivalence. */
15568 toc_hash_eq (const void *h1, const void *h2)
15570 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15571 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15573 if (((const struct toc_hash_struct *) h1)->key_mode
15574 != ((const struct toc_hash_struct *) h2)->key_mode)
15577 return rtx_equal_p (r1, r2);
15580 /* These are the names given by the C++ front-end to vtables, and
15581 vtable-like objects. Ideally, this logic should not be here;
15582 instead, there should be some programmatic way of inquiring as
15583 to whether or not an object is a vtable. */
15585 #define VTABLE_NAME_P(NAME) \
15586 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
15587 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15588 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
15589 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
15590 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15593 rs6000_output_symbol_ref (FILE *file, rtx x)
15595 /* Currently C++ toc references to vtables can be emitted before it
15596 is decided whether the vtable is public or private. If this is
15597 the case, then the linker will eventually complain that there is
15598 a reference to an unknown section. Thus, for vtables only,
15599 we emit the TOC reference to reference the symbol and not the
15601 const char *name = XSTR (x, 0);
15603 if (VTABLE_NAME_P (name))
15605 RS6000_OUTPUT_BASENAME (file, name);
15608 assemble_name (file, name);
15611 /* Output a TOC entry. We derive the entry name from what is being
15615 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15618 const char *name = buf;
15619 const char *real_name;
15621 HOST_WIDE_INT offset = 0;
15623 gcc_assert (!TARGET_NO_TOC);
15625 /* When the linker won't eliminate them, don't output duplicate
15626 TOC entries (this happens on AIX if there is any kind of TOC,
15627 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15629 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15631 struct toc_hash_struct *h;
15634 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
15635 time because GGC is not initialized at that point. */
15636 if (toc_hash_table == NULL)
15637 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15638 toc_hash_eq, NULL);
15640 h = ggc_alloc (sizeof (*h));
15642 h->key_mode = mode;
15643 h->labelno = labelno;
15645 found = htab_find_slot (toc_hash_table, h, 1);
15646 if (*found == NULL)
15648 else /* This is indeed a duplicate.
15649 Set this label equal to that label. */
15651 fputs ("\t.set ", file);
15652 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15653 fprintf (file, "%d,", labelno);
15654 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15655 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15661 /* If we're going to put a double constant in the TOC, make sure it's
15662 aligned properly when strict alignment is on. */
15663 if (GET_CODE (x) == CONST_DOUBLE
15664 && STRICT_ALIGNMENT
15665 && GET_MODE_BITSIZE (mode) >= 64
15666 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15667 ASM_OUTPUT_ALIGN (file, 3);
15670 (*targetm.asm_out.internal_label) (file, "LC", labelno);
15672 /* Handle FP constants specially. Note that if we have a minimal
15673 TOC, things we put here aren't actually in the TOC, so we can allow
15675 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15677 REAL_VALUE_TYPE rv;
15680 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15681 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15685 if (TARGET_MINIMAL_TOC)
15686 fputs (DOUBLE_INT_ASM_OP, file);
15688 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15689 k[0] & 0xffffffff, k[1] & 0xffffffff,
15690 k[2] & 0xffffffff, k[3] & 0xffffffff);
15691 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15692 k[0] & 0xffffffff, k[1] & 0xffffffff,
15693 k[2] & 0xffffffff, k[3] & 0xffffffff);
15698 if (TARGET_MINIMAL_TOC)
15699 fputs ("\t.long ", file);
15701 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15702 k[0] & 0xffffffff, k[1] & 0xffffffff,
15703 k[2] & 0xffffffff, k[3] & 0xffffffff);
15704 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15705 k[0] & 0xffffffff, k[1] & 0xffffffff,
15706 k[2] & 0xffffffff, k[3] & 0xffffffff);
15710 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
15712 REAL_VALUE_TYPE rv;
15715 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15716 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
15720 if (TARGET_MINIMAL_TOC)
15721 fputs (DOUBLE_INT_ASM_OP, file);
15723 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15724 k[0] & 0xffffffff, k[1] & 0xffffffff);
15725 fprintf (file, "0x%lx%08lx\n",
15726 k[0] & 0xffffffff, k[1] & 0xffffffff);
15731 if (TARGET_MINIMAL_TOC)
15732 fputs ("\t.long ", file);
15734 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15735 k[0] & 0xffffffff, k[1] & 0xffffffff);
15736 fprintf (file, "0x%lx,0x%lx\n",
15737 k[0] & 0xffffffff, k[1] & 0xffffffff);
15741 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
15743 REAL_VALUE_TYPE rv;
15746 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15747 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15751 if (TARGET_MINIMAL_TOC)
15752 fputs (DOUBLE_INT_ASM_OP, file);
15754 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15755 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
15760 if (TARGET_MINIMAL_TOC)
15761 fputs ("\t.long ", file);
15763 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15764 fprintf (file, "0x%lx\n", l & 0xffffffff);
15768 else if (GET_MODE (x) == VOIDmode
15769 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
15771 unsigned HOST_WIDE_INT low;
15772 HOST_WIDE_INT high;
15774 if (GET_CODE (x) == CONST_DOUBLE)
15776 low = CONST_DOUBLE_LOW (x);
15777 high = CONST_DOUBLE_HIGH (x);
15780 #if HOST_BITS_PER_WIDE_INT == 32
15783 high = (low & 0x80000000) ? ~0 : 0;
15787 low = INTVAL (x) & 0xffffffff;
15788 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
15792 /* TOC entries are always Pmode-sized, but since this
15793 is a bigendian machine then if we're putting smaller
15794 integer constants in the TOC we have to pad them.
15795 (This is still a win over putting the constants in
15796 a separate constant pool, because then we'd have
15797 to have both a TOC entry _and_ the actual constant.)
15799 For a 32-bit target, CONST_INT values are loaded and shifted
15800 entirely within `low' and can be stored in one TOC entry. */
15802 /* It would be easy to make this work, but it doesn't now. */
15803 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
15805 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
15807 #if HOST_BITS_PER_WIDE_INT == 32
15808 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15809 POINTER_SIZE, &low, &high, 0);
15812 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15813 high = (HOST_WIDE_INT) low >> 32;
15820 if (TARGET_MINIMAL_TOC)
15821 fputs (DOUBLE_INT_ASM_OP, file);
15823 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15824 (long) high & 0xffffffff, (long) low & 0xffffffff);
15825 fprintf (file, "0x%lx%08lx\n",
15826 (long) high & 0xffffffff, (long) low & 0xffffffff);
15831 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15833 if (TARGET_MINIMAL_TOC)
15834 fputs ("\t.long ", file);
15836 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15837 (long) high & 0xffffffff, (long) low & 0xffffffff);
15838 fprintf (file, "0x%lx,0x%lx\n",
15839 (long) high & 0xffffffff, (long) low & 0xffffffff);
15843 if (TARGET_MINIMAL_TOC)
15844 fputs ("\t.long ", file);
15846 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15847 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15853 if (GET_CODE (x) == CONST)
15855 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
15857 base = XEXP (XEXP (x, 0), 0);
15858 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15861 switch (GET_CODE (base))
15864 name = XSTR (base, 0);
15868 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15869 CODE_LABEL_NUMBER (XEXP (base, 0)));
15873 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15877 gcc_unreachable ();
15880 real_name = (*targetm.strip_name_encoding) (name);
15881 if (TARGET_MINIMAL_TOC)
15882 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15885 fprintf (file, "\t.tc %s", real_name);
15888 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
15890 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
15892 fputs ("[TC],", file);
15895 /* Currently C++ toc references to vtables can be emitted before it
15896 is decided whether the vtable is public or private. If this is
15897 the case, then the linker will eventually complain that there is
15898 a TOC reference to an unknown section. Thus, for vtables only,
15899 we emit the TOC reference to reference the symbol and not the
15901 if (VTABLE_NAME_P (name))
15903 RS6000_OUTPUT_BASENAME (file, name);
15905 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
15906 else if (offset > 0)
15907 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
15910 output_addr_const (file, x);
15914 /* Output an assembler pseudo-op to write an ASCII string of N characters
15915 starting at P to FILE.
15917 On the RS/6000, we have to do this using the .byte operation and
15918 write out special characters outside the quoted string.
15919 Also, the assembler is broken; very long strings are truncated,
15920 so we must artificially break them up early. */
15923 output_ascii (FILE *file, const char *p, int n)
15926 int i, count_string;
15927 const char *for_string = "\t.byte \"";
15928 const char *for_decimal = "\t.byte ";
15929 const char *to_close = NULL;
15932 for (i = 0; i < n; i++)
15935 if (c >= ' ' && c < 0177)
15938 fputs (for_string, file);
15941 /* Write two quotes to get one. */
15949 for_decimal = "\"\n\t.byte ";
15953 if (count_string >= 512)
15955 fputs (to_close, file);
15957 for_string = "\t.byte \"";
15958 for_decimal = "\t.byte ";
15966 fputs (for_decimal, file);
15967 fprintf (file, "%d", c);
15969 for_string = "\n\t.byte \"";
15970 for_decimal = ", ";
15976 /* Now close the string if we have written one. Then end the line. */
15978 fputs (to_close, file);
15981 /* Generate a unique section name for FILENAME for a section type
15982 represented by SECTION_DESC. Output goes into BUF.
15984 SECTION_DESC can be any string, as long as it is different for each
15985 possible section type.
15987 We name the section in the same manner as xlc. The name begins with an
15988 underscore followed by the filename (after stripping any leading directory
15989 names) with the last period replaced by the string SECTION_DESC. If
15990 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15994 rs6000_gen_section_name (char **buf, const char *filename,
15995 const char *section_desc)
15997 const char *q, *after_last_slash, *last_period = 0;
16001 after_last_slash = filename;
16002 for (q = filename; *q; q++)
16005 after_last_slash = q + 1;
16006 else if (*q == '.')
16010 len = strlen (after_last_slash) + strlen (section_desc) + 2;
16011 *buf = (char *) xmalloc (len);
16016 for (q = after_last_slash; *q; q++)
16018 if (q == last_period)
16020 strcpy (p, section_desc);
16021 p += strlen (section_desc);
16025 else if (ISALNUM (*q))
16029 if (last_period == 0)
16030 strcpy (p, section_desc);
16035 /* Emit profile function. */
16038 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16040 /* Non-standard profiling for kernels, which just saves LR then calls
16041 _mcount without worrying about arg saves. The idea is to change
16042 the function prologue as little as possible as it isn't easy to
16043 account for arg save/restore code added just for _mcount. */
16044 if (TARGET_PROFILE_KERNEL)
16047 if (DEFAULT_ABI == ABI_AIX)
16049 #ifndef NO_PROFILE_COUNTERS
16050 # define NO_PROFILE_COUNTERS 0
16052 if (NO_PROFILE_COUNTERS)
16053 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16057 const char *label_name;
16060 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16061 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16062 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16064 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16068 else if (DEFAULT_ABI == ABI_DARWIN)
16070 const char *mcount_name = RS6000_MCOUNT;
16071 int caller_addr_regno = LINK_REGISTER_REGNUM;
16073 /* Be conservative and always set this, at least for now. */
16074 current_function_uses_pic_offset_table = 1;
16077 /* For PIC code, set up a stub and collect the caller's address
16078 from r0, which is where the prologue puts it. */
16079 if (MACHOPIC_INDIRECT
16080 && current_function_uses_pic_offset_table)
16081 caller_addr_regno = 0;
16083 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16085 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16089 /* Write function profiler code. */
16092 output_function_profiler (FILE *file, int labelno)
16096 switch (DEFAULT_ABI)
16099 gcc_unreachable ();
16104 warning (0, "no profiling of 64-bit code for this ABI");
16107 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16108 fprintf (file, "\tmflr %s\n", reg_names[0]);
16109 if (NO_PROFILE_COUNTERS)
16111 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16112 reg_names[0], reg_names[1]);
16114 else if (TARGET_SECURE_PLT && flag_pic)
16116 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16117 reg_names[0], reg_names[1]);
16118 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16119 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16120 reg_names[12], reg_names[12]);
16121 assemble_name (file, buf);
16122 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16123 assemble_name (file, buf);
16124 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16126 else if (flag_pic == 1)
16128 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16129 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16130 reg_names[0], reg_names[1]);
16131 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16132 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16133 assemble_name (file, buf);
16134 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16136 else if (flag_pic > 1)
16138 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16139 reg_names[0], reg_names[1]);
16140 /* Now, we need to get the address of the label. */
16141 fputs ("\tbcl 20,31,1f\n\t.long ", file);
16142 assemble_name (file, buf);
16143 fputs ("-.\n1:", file);
16144 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16145 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16146 reg_names[0], reg_names[11]);
16147 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16148 reg_names[0], reg_names[0], reg_names[11]);
16152 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16153 assemble_name (file, buf);
16154 fputs ("@ha\n", file);
16155 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16156 reg_names[0], reg_names[1]);
16157 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16158 assemble_name (file, buf);
16159 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16162 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
16163 fprintf (file, "\tbl %s%s\n",
16164 RS6000_MCOUNT, flag_pic ? "@plt" : "");
16169 if (!TARGET_PROFILE_KERNEL)
16171 /* Don't do anything, done in output_profile_hook (). */
16175 gcc_assert (!TARGET_32BIT);
16177 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16178 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16180 if (cfun->static_chain_decl != NULL)
16182 asm_fprintf (file, "\tstd %s,24(%s)\n",
16183 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16184 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16185 asm_fprintf (file, "\tld %s,24(%s)\n",
16186 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16189 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16196 /* Power4 load update and store update instructions are cracked into a
16197 load or store and an integer insn which are executed in the same cycle.
16198 Branches have their own dispatch slot which does not count against the
16199 GCC issue rate, but it changes the program flow so there are no other
16200 instructions to issue in this cycle. */
16203 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16204 int verbose ATTRIBUTE_UNUSED,
16205 rtx insn, int more)
16207 if (GET_CODE (PATTERN (insn)) == USE
16208 || GET_CODE (PATTERN (insn)) == CLOBBER)
16211 if (rs6000_sched_groups)
16213 if (is_microcoded_insn (insn))
16215 else if (is_cracked_insn (insn))
16216 return more > 2 ? more - 2 : 0;
16222 /* Adjust the cost of a scheduling dependency. Return the new cost of
16223 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16226 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16228 if (! recog_memoized (insn))
16231 if (REG_NOTE_KIND (link) != 0)
16234 if (REG_NOTE_KIND (link) == 0)
16236 /* Data dependency; DEP_INSN writes a register that INSN reads
16237 some cycles later. */
16239 /* Separate a load from a narrower, dependent store. */
16240 if (rs6000_sched_groups
16241 && GET_CODE (PATTERN (insn)) == SET
16242 && GET_CODE (PATTERN (dep_insn)) == SET
16243 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16244 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16245 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16246 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16249 switch (get_attr_type (insn))
16252 /* Tell the first scheduling pass about the latency between
16253 a mtctr and bctr (and mtlr and br/blr). The first
16254 scheduling pass will not know about this latency since
16255 the mtctr instruction, which has the latency associated
16256 to it, will be generated by reload. */
16257 return TARGET_POWER ? 5 : 4;
16259 /* Leave some extra cycles between a compare and its
16260 dependent branch, to inhibit expensive mispredicts. */
16261 if ((rs6000_cpu_attr == CPU_PPC603
16262 || rs6000_cpu_attr == CPU_PPC604
16263 || rs6000_cpu_attr == CPU_PPC604E
16264 || rs6000_cpu_attr == CPU_PPC620
16265 || rs6000_cpu_attr == CPU_PPC630
16266 || rs6000_cpu_attr == CPU_PPC750
16267 || rs6000_cpu_attr == CPU_PPC7400
16268 || rs6000_cpu_attr == CPU_PPC7450
16269 || rs6000_cpu_attr == CPU_POWER4
16270 || rs6000_cpu_attr == CPU_POWER5)
16271 && recog_memoized (dep_insn)
16272 && (INSN_CODE (dep_insn) >= 0)
16273 && (get_attr_type (dep_insn) == TYPE_CMP
16274 || get_attr_type (dep_insn) == TYPE_COMPARE
16275 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16276 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16277 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16278 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16279 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16280 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16285 /* Fall out to return default cost. */
16291 /* The function returns a true if INSN is microcoded.
16292 Return false otherwise. */
16295 is_microcoded_insn (rtx insn)
16297 if (!insn || !INSN_P (insn)
16298 || GET_CODE (PATTERN (insn)) == USE
16299 || GET_CODE (PATTERN (insn)) == CLOBBER)
16302 if (rs6000_sched_groups)
16304 enum attr_type type = get_attr_type (insn);
16305 if (type == TYPE_LOAD_EXT_U
16306 || type == TYPE_LOAD_EXT_UX
16307 || type == TYPE_LOAD_UX
16308 || type == TYPE_STORE_UX
16309 || type == TYPE_MFCR)
16316 /* The function returns a nonzero value if INSN can be scheduled only
16317 as the first insn in a dispatch group ("dispatch-slot restricted").
16318 In this case, the returned value indicates how many dispatch slots
16319 the insn occupies (at the beginning of the group).
16320 Return 0 otherwise. */
16323 is_dispatch_slot_restricted (rtx insn)
16325 enum attr_type type;
16327 if (!rs6000_sched_groups)
16331 || insn == NULL_RTX
16332 || GET_CODE (insn) == NOTE
16333 || GET_CODE (PATTERN (insn)) == USE
16334 || GET_CODE (PATTERN (insn)) == CLOBBER)
16337 type = get_attr_type (insn);
16344 case TYPE_DELAYED_CR:
16345 case TYPE_CR_LOGICAL:
16358 if (rs6000_cpu == PROCESSOR_POWER5
16359 && is_cracked_insn (insn))
16365 /* The function returns true if INSN is cracked into 2 instructions
16366 by the processor (and therefore occupies 2 issue slots). */
16369 is_cracked_insn (rtx insn)
16371 if (!insn || !INSN_P (insn)
16372 || GET_CODE (PATTERN (insn)) == USE
16373 || GET_CODE (PATTERN (insn)) == CLOBBER)
16376 if (rs6000_sched_groups)
16378 enum attr_type type = get_attr_type (insn);
16379 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16380 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16381 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16382 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16383 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16384 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16385 || type == TYPE_IDIV || type == TYPE_LDIV
16386 || type == TYPE_INSERT_WORD)
16393 /* The function returns true if INSN can be issued only from
16394 the branch slot. */
16397 is_branch_slot_insn (rtx insn)
16399 if (!insn || !INSN_P (insn)
16400 || GET_CODE (PATTERN (insn)) == USE
16401 || GET_CODE (PATTERN (insn)) == CLOBBER)
16404 if (rs6000_sched_groups)
16406 enum attr_type type = get_attr_type (insn);
16407 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16415 /* A C statement (sans semicolon) to update the integer scheduling
16416 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16417 INSN earlier, reduce the priority to execute INSN later. Do not
16418 define this macro if you do not need to adjust the scheduling
16419 priorities of insns. */
16422 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16424 /* On machines (like the 750) which have asymmetric integer units,
16425 where one integer unit can do multiply and divides and the other
16426 can't, reduce the priority of multiply/divide so it is scheduled
16427 before other integer operations. */
16430 if (! INSN_P (insn))
16433 if (GET_CODE (PATTERN (insn)) == USE)
16436 switch (rs6000_cpu_attr) {
16438 switch (get_attr_type (insn))
16445 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16446 priority, priority);
16447 if (priority >= 0 && priority < 0x01000000)
16454 if (is_dispatch_slot_restricted (insn)
16455 && reload_completed
16456 && current_sched_info->sched_max_insns_priority
16457 && rs6000_sched_restricted_insns_priority)
16460 /* Prioritize insns that can be dispatched only in the first
16462 if (rs6000_sched_restricted_insns_priority == 1)
16463 /* Attach highest priority to insn. This means that in
16464 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16465 precede 'priority' (critical path) considerations. */
16466 return current_sched_info->sched_max_insns_priority;
16467 else if (rs6000_sched_restricted_insns_priority == 2)
16468 /* Increase priority of insn by a minimal amount. This means that in
16469 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16470 considerations precede dispatch-slot restriction considerations. */
16471 return (priority + 1);
16477 /* Return how many instructions the machine can issue per cycle. */
16480 rs6000_issue_rate (void)
16482 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16483 if (!reload_completed)
16486 switch (rs6000_cpu_attr) {
16487 case CPU_RIOS1: /* ? */
16489 case CPU_PPC601: /* ? */
16512 /* Return how many instructions to look ahead for better insn
16516 rs6000_use_sched_lookahead (void)
16518 if (rs6000_cpu_attr == CPU_PPC8540)
16523 /* Determine is PAT refers to memory. */
16526 is_mem_ref (rtx pat)
16532 if (GET_CODE (pat) == MEM)
16535 /* Recursively process the pattern. */
16536 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16538 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16541 ret |= is_mem_ref (XEXP (pat, i));
16542 else if (fmt[i] == 'E')
16543 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16544 ret |= is_mem_ref (XVECEXP (pat, i, j));
16550 /* Determine if PAT is a PATTERN of a load insn. */
16553 is_load_insn1 (rtx pat)
16555 if (!pat || pat == NULL_RTX)
16558 if (GET_CODE (pat) == SET)
16559 return is_mem_ref (SET_SRC (pat));
16561 if (GET_CODE (pat) == PARALLEL)
16565 for (i = 0; i < XVECLEN (pat, 0); i++)
16566 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16573 /* Determine if INSN loads from memory. */
16576 is_load_insn (rtx insn)
16578 if (!insn || !INSN_P (insn))
16581 if (GET_CODE (insn) == CALL_INSN)
16584 return is_load_insn1 (PATTERN (insn));
16587 /* Determine if PAT is a PATTERN of a store insn. */
16590 is_store_insn1 (rtx pat)
16592 if (!pat || pat == NULL_RTX)
16595 if (GET_CODE (pat) == SET)
16596 return is_mem_ref (SET_DEST (pat));
16598 if (GET_CODE (pat) == PARALLEL)
16602 for (i = 0; i < XVECLEN (pat, 0); i++)
16603 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16610 /* Determine if INSN stores to memory. */
16613 is_store_insn (rtx insn)
16615 if (!insn || !INSN_P (insn))
16618 return is_store_insn1 (PATTERN (insn));
16621 /* Returns whether the dependence between INSN and NEXT is considered
16622 costly by the given target. */
16625 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16628 /* If the flag is not enabled - no dependence is considered costly;
16629 allow all dependent insns in the same group.
16630 This is the most aggressive option. */
16631 if (rs6000_sched_costly_dep == no_dep_costly)
16634 /* If the flag is set to 1 - a dependence is always considered costly;
16635 do not allow dependent instructions in the same group.
16636 This is the most conservative option. */
16637 if (rs6000_sched_costly_dep == all_deps_costly)
16640 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16641 && is_load_insn (next)
16642 && is_store_insn (insn))
16643 /* Prevent load after store in the same group. */
16646 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16647 && is_load_insn (next)
16648 && is_store_insn (insn)
16649 && (!link || (int) REG_NOTE_KIND (link) == 0))
16650 /* Prevent load after store in the same group if it is a true
16654 /* The flag is set to X; dependences with latency >= X are considered costly,
16655 and will not be scheduled in the same group. */
16656 if (rs6000_sched_costly_dep <= max_dep_latency
16657 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16663 /* Return the next insn after INSN that is found before TAIL is reached,
16664 skipping any "non-active" insns - insns that will not actually occupy
16665 an issue slot. Return NULL_RTX if such an insn is not found. */
16668 get_next_active_insn (rtx insn, rtx tail)
16670 if (insn == NULL_RTX || insn == tail)
16675 insn = NEXT_INSN (insn);
16676 if (insn == NULL_RTX || insn == tail)
16681 || (NONJUMP_INSN_P (insn)
16682 && GET_CODE (PATTERN (insn)) != USE
16683 && GET_CODE (PATTERN (insn)) != CLOBBER
16684 && INSN_CODE (insn) != CODE_FOR_stack_tie))
16690 /* Return whether the presence of INSN causes a dispatch group termination
16691 of group WHICH_GROUP.
16693 If WHICH_GROUP == current_group, this function will return true if INSN
16694 causes the termination of the current group (i.e, the dispatch group to
16695 which INSN belongs). This means that INSN will be the last insn in the
16696 group it belongs to.
16698 If WHICH_GROUP == previous_group, this function will return true if INSN
16699 causes the termination of the previous group (i.e, the dispatch group that
16700 precedes the group to which INSN belongs). This means that INSN will be
16701 the first insn in the group it belongs to). */
16704 insn_terminates_group_p (rtx insn, enum group_termination which_group)
16706 enum attr_type type;
16711 type = get_attr_type (insn);
16713 if (is_microcoded_insn (insn))
16716 if (which_group == current_group)
16718 if (is_branch_slot_insn (insn))
16722 else if (which_group == previous_group)
16724 if (is_dispatch_slot_restricted (insn))
16732 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16733 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16736 is_costly_group (rtx *group_insns, rtx next_insn)
16741 int issue_rate = rs6000_issue_rate ();
16743 for (i = 0; i < issue_rate; i++)
16745 rtx insn = group_insns[i];
16748 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
16750 rtx next = XEXP (link, 0);
16751 if (next == next_insn)
16753 cost = insn_cost (insn, link, next_insn);
16754 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16763 /* Utility of the function redefine_groups.
16764 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16765 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16766 to keep it "far" (in a separate group) from GROUP_INSNS, following
16767 one of the following schemes, depending on the value of the flag
16768 -minsert_sched_nops = X:
16769 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16770 in order to force NEXT_INSN into a separate group.
16771 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16772 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16773 insertion (has a group just ended, how many vacant issue slots remain in the
16774 last group, and how many dispatch groups were encountered so far). */
16777 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16778 rtx next_insn, bool *group_end, int can_issue_more,
16783 int issue_rate = rs6000_issue_rate ();
16784 bool end = *group_end;
16787 if (next_insn == NULL_RTX)
16788 return can_issue_more;
16790 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16791 return can_issue_more;
16793 force = is_costly_group (group_insns, next_insn);
16795 return can_issue_more;
16797 if (sched_verbose > 6)
16798 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
16799 *group_count ,can_issue_more);
16801 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16804 can_issue_more = 0;
16806 /* Since only a branch can be issued in the last issue_slot, it is
16807 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16808 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16809 in this case the last nop will start a new group and the branch
16810 will be forced to the new group. */
16811 if (can_issue_more && !is_branch_slot_insn (next_insn))
16814 while (can_issue_more > 0)
16817 emit_insn_before (nop, next_insn);
16825 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16827 int n_nops = rs6000_sched_insert_nops;
16829 /* Nops can't be issued from the branch slot, so the effective
16830 issue_rate for nops is 'issue_rate - 1'. */
16831 if (can_issue_more == 0)
16832 can_issue_more = issue_rate;
16834 if (can_issue_more == 0)
16836 can_issue_more = issue_rate - 1;
16839 for (i = 0; i < issue_rate; i++)
16841 group_insns[i] = 0;
16848 emit_insn_before (nop, next_insn);
16849 if (can_issue_more == issue_rate - 1) /* new group begins */
16852 if (can_issue_more == 0)
16854 can_issue_more = issue_rate - 1;
16857 for (i = 0; i < issue_rate; i++)
16859 group_insns[i] = 0;
16865 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16868 /* Is next_insn going to start a new group? */
16871 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16872 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16873 || (can_issue_more < issue_rate &&
16874 insn_terminates_group_p (next_insn, previous_group)));
16875 if (*group_end && end)
16878 if (sched_verbose > 6)
16879 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16880 *group_count, can_issue_more);
16881 return can_issue_more;
16884 return can_issue_more;
16887 /* This function tries to synch the dispatch groups that the compiler "sees"
16888 with the dispatch groups that the processor dispatcher is expected to
16889 form in practice. It tries to achieve this synchronization by forcing the
16890 estimated processor grouping on the compiler (as opposed to the function
16891 'pad_goups' which tries to force the scheduler's grouping on the processor).
16893 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16894 examines the (estimated) dispatch groups that will be formed by the processor
16895 dispatcher. It marks these group boundaries to reflect the estimated
16896 processor grouping, overriding the grouping that the scheduler had marked.
16897 Depending on the value of the flag '-minsert-sched-nops' this function can
16898 force certain insns into separate groups or force a certain distance between
16899 them by inserting nops, for example, if there exists a "costly dependence"
16902 The function estimates the group boundaries that the processor will form as
16903 follows: It keeps track of how many vacant issue slots are available after
16904 each insn. A subsequent insn will start a new group if one of the following
16906 - no more vacant issue slots remain in the current dispatch group.
16907 - only the last issue slot, which is the branch slot, is vacant, but the next
16908 insn is not a branch.
16909 - only the last 2 or less issue slots, including the branch slot, are vacant,
16910 which means that a cracked insn (which occupies two issue slots) can't be
16911 issued in this group.
16912 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16913 start a new group. */
16916 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16918 rtx insn, next_insn;
16920 int can_issue_more;
16923 int group_count = 0;
16927 issue_rate = rs6000_issue_rate ();
16928 group_insns = alloca (issue_rate * sizeof (rtx));
16929 for (i = 0; i < issue_rate; i++)
16931 group_insns[i] = 0;
16933 can_issue_more = issue_rate;
16935 insn = get_next_active_insn (prev_head_insn, tail);
16938 while (insn != NULL_RTX)
16940 slot = (issue_rate - can_issue_more);
16941 group_insns[slot] = insn;
16943 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16944 if (insn_terminates_group_p (insn, current_group))
16945 can_issue_more = 0;
16947 next_insn = get_next_active_insn (insn, tail);
16948 if (next_insn == NULL_RTX)
16949 return group_count + 1;
16951 /* Is next_insn going to start a new group? */
16953 = (can_issue_more == 0
16954 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16955 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16956 || (can_issue_more < issue_rate &&
16957 insn_terminates_group_p (next_insn, previous_group)));
16959 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16960 next_insn, &group_end, can_issue_more,
16966 can_issue_more = 0;
16967 for (i = 0; i < issue_rate; i++)
16969 group_insns[i] = 0;
16973 if (GET_MODE (next_insn) == TImode && can_issue_more)
16974 PUT_MODE (next_insn, VOIDmode);
16975 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16976 PUT_MODE (next_insn, TImode);
16979 if (can_issue_more == 0)
16980 can_issue_more = issue_rate;
16983 return group_count;
16986 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16987 dispatch group boundaries that the scheduler had marked. Pad with nops
16988 any dispatch groups which have vacant issue slots, in order to force the
16989 scheduler's grouping on the processor dispatcher. The function
16990 returns the number of dispatch groups found. */
16993 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16995 rtx insn, next_insn;
16998 int can_issue_more;
17000 int group_count = 0;
17002 /* Initialize issue_rate. */
17003 issue_rate = rs6000_issue_rate ();
17004 can_issue_more = issue_rate;
17006 insn = get_next_active_insn (prev_head_insn, tail);
17007 next_insn = get_next_active_insn (insn, tail);
17009 while (insn != NULL_RTX)
17012 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17014 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17016 if (next_insn == NULL_RTX)
17021 /* If the scheduler had marked group termination at this location
17022 (between insn and next_indn), and neither insn nor next_insn will
17023 force group termination, pad the group with nops to force group
17026 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17027 && !insn_terminates_group_p (insn, current_group)
17028 && !insn_terminates_group_p (next_insn, previous_group))
17030 if (!is_branch_slot_insn (next_insn))
17033 while (can_issue_more)
17036 emit_insn_before (nop, next_insn);
17041 can_issue_more = issue_rate;
17046 next_insn = get_next_active_insn (insn, tail);
17049 return group_count;
17052 /* The following function is called at the end of scheduling BB.
17053 After reload, it inserts nops at insn group bundling. */
17056 rs6000_sched_finish (FILE *dump, int sched_verbose)
17061 fprintf (dump, "=== Finishing schedule.\n");
17063 if (reload_completed && rs6000_sched_groups)
17065 if (rs6000_sched_insert_nops == sched_finish_none)
17068 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17069 n_groups = pad_groups (dump, sched_verbose,
17070 current_sched_info->prev_head,
17071 current_sched_info->next_tail);
17073 n_groups = redefine_groups (dump, sched_verbose,
17074 current_sched_info->prev_head,
17075 current_sched_info->next_tail);
17077 if (sched_verbose >= 6)
17079 fprintf (dump, "ngroups = %d\n", n_groups);
17080 print_rtl (dump, current_sched_info->prev_head);
17081 fprintf (dump, "Done finish_sched\n");
17086 /* Length in units of the trampoline for entering a nested function. */
17089 rs6000_trampoline_size (void)
17093 switch (DEFAULT_ABI)
17096 gcc_unreachable ();
17099 ret = (TARGET_32BIT) ? 12 : 24;
17104 ret = (TARGET_32BIT) ? 40 : 48;
17111 /* Emit RTL insns to initialize the variable parts of a trampoline.
17112 FNADDR is an RTX for the address of the function's pure code.
17113 CXT is an RTX for the static chain value for the function. */
17116 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17118 enum machine_mode pmode = Pmode;
17119 int regsize = (TARGET_32BIT) ? 4 : 8;
17120 rtx ctx_reg = force_reg (pmode, cxt);
17122 switch (DEFAULT_ABI)
17125 gcc_unreachable ();
17127 /* Macros to shorten the code expansions below. */
17128 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
17129 #define MEM_PLUS(addr,offset) \
17130 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
17132 /* Under AIX, just build the 3 word function descriptor */
17135 rtx fn_reg = gen_reg_rtx (pmode);
17136 rtx toc_reg = gen_reg_rtx (pmode);
17137 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17138 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17139 emit_move_insn (MEM_DEREF (addr), fn_reg);
17140 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17141 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17145 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17148 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
17149 FALSE, VOIDmode, 4,
17151 GEN_INT (rs6000_trampoline_size ()), SImode,
17161 /* Table of valid machine attributes. */
17163 const struct attribute_spec rs6000_attribute_table[] =
17165 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17166 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
17167 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17168 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17169 #ifdef SUBTARGET_ATTRIBUTE_TABLE
17170 SUBTARGET_ATTRIBUTE_TABLE,
17172 { NULL, 0, 0, false, false, false, NULL }
17175 /* Handle the "altivec" attribute. The attribute may have
17176 arguments as follows:
17178 __attribute__((altivec(vector__)))
17179 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17180 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17182 and may appear more than once (e.g., 'vector bool char') in a
17183 given declaration. */
17186 rs6000_handle_altivec_attribute (tree *node,
17187 tree name ATTRIBUTE_UNUSED,
17189 int flags ATTRIBUTE_UNUSED,
17190 bool *no_add_attrs)
17192 tree type = *node, result = NULL_TREE;
17193 enum machine_mode mode;
17196 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17197 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17198 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17201 while (POINTER_TYPE_P (type)
17202 || TREE_CODE (type) == FUNCTION_TYPE
17203 || TREE_CODE (type) == METHOD_TYPE
17204 || TREE_CODE (type) == ARRAY_TYPE)
17205 type = TREE_TYPE (type);
17207 mode = TYPE_MODE (type);
17209 /* Check for invalid AltiVec type qualifiers. */
17210 if (type == long_unsigned_type_node || type == long_integer_type_node)
17213 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17214 else if (rs6000_warn_altivec_long)
17215 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17217 else if (type == long_long_unsigned_type_node
17218 || type == long_long_integer_type_node)
17219 error ("use of %<long long%> in AltiVec types is invalid");
17220 else if (type == double_type_node)
17221 error ("use of %<double%> in AltiVec types is invalid");
17222 else if (type == long_double_type_node)
17223 error ("use of %<long double%> in AltiVec types is invalid");
17224 else if (type == boolean_type_node)
17225 error ("use of boolean types in AltiVec types is invalid");
17226 else if (TREE_CODE (type) == COMPLEX_TYPE)
17227 error ("use of %<complex%> in AltiVec types is invalid");
17229 switch (altivec_type)
17232 unsigned_p = TYPE_UNSIGNED (type);
17236 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17239 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17242 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17244 case SFmode: result = V4SF_type_node; break;
17245 /* If the user says 'vector int bool', we may be handed the 'bool'
17246 attribute _before_ the 'vector' attribute, and so select the
17247 proper type in the 'b' case below. */
17248 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17256 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17257 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17258 case QImode: case V16QImode: result = bool_V16QI_type_node;
17265 case V8HImode: result = pixel_V8HI_type_node;
17271 if (result && result != type && TYPE_READONLY (type))
17272 result = build_qualified_type (result, TYPE_QUAL_CONST);
17274 *no_add_attrs = true; /* No need to hang on to the attribute. */
17277 *node = reconstruct_complex_type (*node, result);
17282 /* AltiVec defines four built-in scalar types that serve as vector
17283 elements; we must teach the compiler how to mangle them. */
17285 static const char *
17286 rs6000_mangle_fundamental_type (tree type)
17288 if (type == bool_char_type_node) return "U6__boolc";
17289 if (type == bool_short_type_node) return "U6__bools";
17290 if (type == pixel_type_node) return "u7__pixel";
17291 if (type == bool_int_type_node) return "U6__booli";
17293 /* For all other types, use normal C++ mangling. */
17297 /* Handle a "longcall" or "shortcall" attribute; arguments as in
17298 struct attribute_spec.handler. */
17301 rs6000_handle_longcall_attribute (tree *node, tree name,
17302 tree args ATTRIBUTE_UNUSED,
17303 int flags ATTRIBUTE_UNUSED,
17304 bool *no_add_attrs)
17306 if (TREE_CODE (*node) != FUNCTION_TYPE
17307 && TREE_CODE (*node) != FIELD_DECL
17308 && TREE_CODE (*node) != TYPE_DECL)
17310 warning (OPT_Wattributes, "%qs attribute only applies to functions",
17311 IDENTIFIER_POINTER (name));
17312 *no_add_attrs = true;
17318 /* Set longcall attributes on all functions declared when
17319 rs6000_default_long_calls is true. */
17321 rs6000_set_default_type_attributes (tree type)
17323 if (rs6000_default_long_calls
17324 && (TREE_CODE (type) == FUNCTION_TYPE
17325 || TREE_CODE (type) == METHOD_TYPE))
17326 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17328 TYPE_ATTRIBUTES (type));
17331 /* Return a reference suitable for calling a function with the
17332 longcall attribute. */
17335 rs6000_longcall_ref (rtx call_ref)
17337 const char *call_name;
17340 if (GET_CODE (call_ref) != SYMBOL_REF)
17343 /* System V adds '.' to the internal name, so skip them. */
17344 call_name = XSTR (call_ref, 0);
17345 if (*call_name == '.')
17347 while (*call_name == '.')
17350 node = get_identifier (call_name);
17351 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17354 return force_reg (Pmode, call_ref);
17357 #ifdef USING_ELFOS_H
17359 /* A C statement or statements to switch to the appropriate section
17360 for output of RTX in mode MODE. You can assume that RTX is some
17361 kind of constant in RTL. The argument MODE is redundant except in
17362 the case of a `const_int' rtx. Select the section by calling
17363 `text_section' or one of the alternatives for other sections.
17365 Do not define this macro if you put all constants in the read-only
17369 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17370 unsigned HOST_WIDE_INT align)
17372 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17375 default_elf_select_rtx_section (mode, x, align);
17378 /* A C statement or statements to switch to the appropriate
17379 section for output of DECL. DECL is either a `VAR_DECL' node
17380 or a constant of some sort. RELOC indicates whether forming
17381 the initial value of DECL requires link-time relocations. */
17384 rs6000_elf_select_section (tree decl, int reloc,
17385 unsigned HOST_WIDE_INT align)
17387 /* Pretend that we're always building for a shared library when
17388 ABI_AIX, because otherwise we end up with dynamic relocations
17389 in read-only sections. This happens for function pointers,
17390 references to vtables in typeinfo, and probably other cases. */
17391 default_elf_select_section_1 (decl, reloc, align,
17392 flag_pic || DEFAULT_ABI == ABI_AIX);
17395 /* A C statement to build up a unique section name, expressed as a
17396 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17397 RELOC indicates whether the initial value of EXP requires
17398 link-time relocations. If you do not define this macro, GCC will use
17399 the symbol name prefixed by `.' as the section name. Note - this
17400 macro can now be called for uninitialized data items as well as
17401 initialized data and functions. */
17404 rs6000_elf_unique_section (tree decl, int reloc)
17406 /* As above, pretend that we're always building for a shared library
17407 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
17408 default_unique_section_1 (decl, reloc,
17409 flag_pic || DEFAULT_ABI == ABI_AIX);
17412 /* For a SYMBOL_REF, set generic flags and then perform some
17413 target-specific processing.
17415 When the AIX ABI is requested on a non-AIX system, replace the
17416 function name with the real name (with a leading .) rather than the
17417 function descriptor name. This saves a lot of overriding code to
17418 read the prefixes. */
17421 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17423 default_encode_section_info (decl, rtl, first);
17426 && TREE_CODE (decl) == FUNCTION_DECL
17428 && DEFAULT_ABI == ABI_AIX)
17430 rtx sym_ref = XEXP (rtl, 0);
17431 size_t len = strlen (XSTR (sym_ref, 0));
17432 char *str = alloca (len + 2);
17434 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17435 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17440 rs6000_elf_in_small_data_p (tree decl)
17442 if (rs6000_sdata == SDATA_NONE)
17445 /* We want to merge strings, so we never consider them small data. */
17446 if (TREE_CODE (decl) == STRING_CST)
17449 /* Functions are never in the small data area. */
17450 if (TREE_CODE (decl) == FUNCTION_DECL)
17453 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17455 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17456 if (strcmp (section, ".sdata") == 0
17457 || strcmp (section, ".sdata2") == 0
17458 || strcmp (section, ".sbss") == 0
17459 || strcmp (section, ".sbss2") == 0
17460 || strcmp (section, ".PPC.EMB.sdata0") == 0
17461 || strcmp (section, ".PPC.EMB.sbss0") == 0)
17466 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17469 && (unsigned HOST_WIDE_INT) size <= g_switch_value
17470 /* If it's not public, and we're not going to reference it there,
17471 there's no need to put it in the small data section. */
17472 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17479 #endif /* USING_ELFOS_H */
17482 /* Return a REG that occurs in ADDR with coefficient 1.
17483 ADDR can be effectively incremented by incrementing REG.
17485 r0 is special and we must not select it as an address
17486 register by this routine since our caller will try to
17487 increment the returned register via an "la" instruction. */
17490 find_addr_reg (rtx addr)
17492 while (GET_CODE (addr) == PLUS)
17494 if (GET_CODE (XEXP (addr, 0)) == REG
17495 && REGNO (XEXP (addr, 0)) != 0)
17496 addr = XEXP (addr, 0);
17497 else if (GET_CODE (XEXP (addr, 1)) == REG
17498 && REGNO (XEXP (addr, 1)) != 0)
17499 addr = XEXP (addr, 1);
17500 else if (CONSTANT_P (XEXP (addr, 0)))
17501 addr = XEXP (addr, 1);
17502 else if (CONSTANT_P (XEXP (addr, 1)))
17503 addr = XEXP (addr, 0);
17505 gcc_unreachable ();
17507 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17512 rs6000_fatal_bad_address (rtx op)
17514 fatal_insn ("bad address", op);
17519 static tree branch_island_list = 0;
17521 /* Remember to generate a branch island for far calls to the given
17525 add_compiler_branch_island (tree label_name, tree function_name,
17528 tree branch_island = build_tree_list (function_name, label_name);
17529 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17530 TREE_CHAIN (branch_island) = branch_island_list;
17531 branch_island_list = branch_island;
17534 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17535 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17536 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17537 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17539 /* Generate far-jump branch islands for everything on the
17540 branch_island_list. Invoked immediately after the last instruction
17541 of the epilogue has been emitted; the branch-islands must be
17542 appended to, and contiguous with, the function body. Mach-O stubs
17543 are generated in machopic_output_stub(). */
17546 macho_branch_islands (void)
17549 tree branch_island;
17551 for (branch_island = branch_island_list;
17553 branch_island = TREE_CHAIN (branch_island))
17555 const char *label =
17556 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17558 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17559 char name_buf[512];
17560 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17561 if (name[0] == '*' || name[0] == '&')
17562 strcpy (name_buf, name+1);
17566 strcpy (name_buf+1, name);
17568 strcpy (tmp_buf, "\n");
17569 strcat (tmp_buf, label);
17570 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17571 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17572 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17573 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17576 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17577 strcat (tmp_buf, label);
17578 strcat (tmp_buf, "_pic\n");
17579 strcat (tmp_buf, label);
17580 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
17582 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17583 strcat (tmp_buf, name_buf);
17584 strcat (tmp_buf, " - ");
17585 strcat (tmp_buf, label);
17586 strcat (tmp_buf, "_pic)\n");
17588 strcat (tmp_buf, "\tmtlr r0\n");
17590 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17591 strcat (tmp_buf, name_buf);
17592 strcat (tmp_buf, " - ");
17593 strcat (tmp_buf, label);
17594 strcat (tmp_buf, "_pic)\n");
17596 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17600 strcat (tmp_buf, ":\nlis r12,hi16(");
17601 strcat (tmp_buf, name_buf);
17602 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17603 strcat (tmp_buf, name_buf);
17604 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17606 output_asm_insn (tmp_buf, 0);
17607 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17608 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17609 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17610 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17613 branch_island_list = 0;
17616 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
17617 already there or not. */
17620 no_previous_def (tree function_name)
17622 tree branch_island;
17623 for (branch_island = branch_island_list;
17625 branch_island = TREE_CHAIN (branch_island))
17626 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17631 /* GET_PREV_LABEL gets the label name from the previous definition of
17635 get_prev_label (tree function_name)
17637 tree branch_island;
17638 for (branch_island = branch_island_list;
17640 branch_island = TREE_CHAIN (branch_island))
17641 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17642 return BRANCH_ISLAND_LABEL_NAME (branch_island);
17646 /* INSN is either a function call or a millicode call. It may have an
17647 unconditional jump in its delay slot.
17649 CALL_DEST is the routine we are calling. */
17652 output_call (rtx insn, rtx *operands, int dest_operand_number,
17653 int cookie_operand_number)
17655 static char buf[256];
17656 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17657 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
17660 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
17662 if (no_previous_def (funname))
17664 int line_number = 0;
17665 rtx label_rtx = gen_label_rtx ();
17666 char *label_buf, temp_buf[256];
17667 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17668 CODE_LABEL_NUMBER (label_rtx));
17669 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17670 labelname = get_identifier (label_buf);
17671 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17673 line_number = NOTE_LINE_NUMBER (insn);
17674 add_compiler_branch_island (labelname, funname, line_number);
17677 labelname = get_prev_label (funname);
17679 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17680 instruction will reach 'foo', otherwise link as 'bl L42'".
17681 "L42" should be a 'branch island', that will do a far jump to
17682 'foo'. Branch islands are generated in
17683 macho_branch_islands(). */
17684 sprintf (buf, "jbsr %%z%d,%.246s",
17685 dest_operand_number, IDENTIFIER_POINTER (labelname));
17688 sprintf (buf, "bl %%z%d", dest_operand_number);
17692 /* Generate PIC and indirect symbol stubs. */
17695 machopic_output_stub (FILE *file, const char *symb, const char *stub)
17697 unsigned int length;
17698 char *symbol_name, *lazy_ptr_name;
17699 char *local_label_0;
17700 static int label = 0;
17702 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17703 symb = (*targetm.strip_name_encoding) (symb);
17706 length = strlen (symb);
17707 symbol_name = alloca (length + 32);
17708 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17710 lazy_ptr_name = alloca (length + 32);
17711 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17714 machopic_picsymbol_stub1_section ();
17716 machopic_symbol_stub1_section ();
17720 fprintf (file, "\t.align 5\n");
17722 fprintf (file, "%s:\n", stub);
17723 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17726 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
17727 sprintf (local_label_0, "\"L%011d$spb\"", label);
17729 fprintf (file, "\tmflr r0\n");
17730 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17731 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17732 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17733 lazy_ptr_name, local_label_0);
17734 fprintf (file, "\tmtlr r0\n");
17735 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17736 (TARGET_64BIT ? "ldu" : "lwzu"),
17737 lazy_ptr_name, local_label_0);
17738 fprintf (file, "\tmtctr r12\n");
17739 fprintf (file, "\tbctr\n");
17743 fprintf (file, "\t.align 4\n");
17745 fprintf (file, "%s:\n", stub);
17746 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17748 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
17749 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17750 (TARGET_64BIT ? "ldu" : "lwzu"),
17752 fprintf (file, "\tmtctr r12\n");
17753 fprintf (file, "\tbctr\n");
17756 machopic_lazy_symbol_ptr_section ();
17757 fprintf (file, "%s:\n", lazy_ptr_name);
17758 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17759 fprintf (file, "%sdyld_stub_binding_helper\n",
17760 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
17763 /* Legitimize PIC addresses. If the address is already
17764 position-independent, we return ORIG. Newly generated
17765 position-independent addresses go into a reg. This is REG if non
17766 zero, otherwise we allocate register(s) as necessary. */
17768 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
17771 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
17776 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17777 reg = gen_reg_rtx (Pmode);
17779 if (GET_CODE (orig) == CONST)
17783 if (GET_CODE (XEXP (orig, 0)) == PLUS
17784 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17787 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
17789 /* Use a different reg for the intermediate value, as
17790 it will be marked UNCHANGING. */
17791 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17792 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17795 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17798 if (GET_CODE (offset) == CONST_INT)
17800 if (SMALL_INT (offset))
17801 return plus_constant (base, INTVAL (offset));
17802 else if (! reload_in_progress && ! reload_completed)
17803 offset = force_reg (Pmode, offset);
17806 rtx mem = force_const_mem (Pmode, orig);
17807 return machopic_legitimize_pic_address (mem, Pmode, reg);
17810 return gen_rtx_PLUS (Pmode, base, offset);
17813 /* Fall back on generic machopic code. */
17814 return machopic_legitimize_pic_address (orig, mode, reg);
17817 /* This is just a placeholder to make linking work without having to
17818 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17819 ever needed for Darwin (not too likely!) this would have to get a
17820 real definition. */
17827 /* Output a .machine directive for the Darwin assembler, and call
17828 the generic start_file routine. */
17831 rs6000_darwin_file_start (void)
17833 static const struct
17839 { "ppc64", "ppc64", MASK_64BIT },
17840 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17841 { "power4", "ppc970", 0 },
17842 { "G5", "ppc970", 0 },
17843 { "7450", "ppc7450", 0 },
17844 { "7400", "ppc7400", MASK_ALTIVEC },
17845 { "G4", "ppc7400", 0 },
17846 { "750", "ppc750", 0 },
17847 { "740", "ppc750", 0 },
17848 { "G3", "ppc750", 0 },
17849 { "604e", "ppc604e", 0 },
17850 { "604", "ppc604", 0 },
17851 { "603e", "ppc603", 0 },
17852 { "603", "ppc603", 0 },
17853 { "601", "ppc601", 0 },
17854 { NULL, "ppc", 0 } };
17855 const char *cpu_id = "";
17858 rs6000_file_start ();
17860 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17861 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17862 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17863 && rs6000_select[i].string[0] != '\0')
17864 cpu_id = rs6000_select[i].string;
17866 /* Look through the mapping array. Pick the first name that either
17867 matches the argument, has a bit set in IF_SET that is also set
17868 in the target flags, or has a NULL name. */
17871 while (mapping[i].arg != NULL
17872 && strcmp (mapping[i].arg, cpu_id) != 0
17873 && (mapping[i].if_set & target_flags) == 0)
17876 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17879 #endif /* TARGET_MACHO */
17882 static unsigned int
17883 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17885 return default_section_type_flags_1 (decl, name, reloc,
17886 flag_pic || DEFAULT_ABI == ABI_AIX);
17889 /* Record an element in the table of global constructors. SYMBOL is
17890 a SYMBOL_REF of the function to be called; PRIORITY is a number
17891 between 0 and MAX_INIT_PRIORITY.
17893 This differs from default_named_section_asm_out_constructor in
17894 that we have special handling for -mrelocatable. */
17897 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17899 const char *section = ".ctors";
17902 if (priority != DEFAULT_INIT_PRIORITY)
17904 sprintf (buf, ".ctors.%.5u",
17905 /* Invert the numbering so the linker puts us in the proper
17906 order; constructors are run from right to left, and the
17907 linker sorts in increasing order. */
17908 MAX_INIT_PRIORITY - priority);
17912 named_section_flags (section, SECTION_WRITE);
17913 assemble_align (POINTER_SIZE);
17915 if (TARGET_RELOCATABLE)
17917 fputs ("\t.long (", asm_out_file);
17918 output_addr_const (asm_out_file, symbol);
17919 fputs (")@fixup\n", asm_out_file);
17922 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17926 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17928 const char *section = ".dtors";
17931 if (priority != DEFAULT_INIT_PRIORITY)
17933 sprintf (buf, ".dtors.%.5u",
17934 /* Invert the numbering so the linker puts us in the proper
17935 order; constructors are run from right to left, and the
17936 linker sorts in increasing order. */
17937 MAX_INIT_PRIORITY - priority);
17941 named_section_flags (section, SECTION_WRITE);
17942 assemble_align (POINTER_SIZE);
17944 if (TARGET_RELOCATABLE)
17946 fputs ("\t.long (", asm_out_file);
17947 output_addr_const (asm_out_file, symbol);
17948 fputs (")@fixup\n", asm_out_file);
17951 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17955 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17959 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17960 ASM_OUTPUT_LABEL (file, name);
17961 fputs (DOUBLE_INT_ASM_OP, file);
17962 rs6000_output_function_entry (file, name);
17963 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17966 fputs ("\t.size\t", file);
17967 assemble_name (file, name);
17968 fputs (",24\n\t.type\t.", file);
17969 assemble_name (file, name);
17970 fputs (",@function\n", file);
17971 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17973 fputs ("\t.globl\t.", file);
17974 assemble_name (file, name);
17979 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17980 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17981 rs6000_output_function_entry (file, name);
17982 fputs (":\n", file);
17986 if (TARGET_RELOCATABLE
17987 && !TARGET_SECURE_PLT
17988 && (get_pool_size () != 0 || current_function_profile)
17993 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17995 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17996 fprintf (file, "\t.long ");
17997 assemble_name (file, buf);
17999 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18000 assemble_name (file, buf);
18004 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18005 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18007 if (DEFAULT_ABI == ABI_AIX)
18009 const char *desc_name, *orig_name;
18011 orig_name = (*targetm.strip_name_encoding) (name);
18012 desc_name = orig_name;
18013 while (*desc_name == '.')
18016 if (TREE_PUBLIC (decl))
18017 fprintf (file, "\t.globl %s\n", desc_name);
18019 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18020 fprintf (file, "%s:\n", desc_name);
18021 fprintf (file, "\t.long %s\n", orig_name);
18022 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18023 if (DEFAULT_ABI == ABI_AIX)
18024 fputs ("\t.long 0\n", file);
18025 fprintf (file, "\t.previous\n");
18027 ASM_OUTPUT_LABEL (file, name);
18031 rs6000_elf_end_indicate_exec_stack (void)
18034 file_end_indicate_exec_stack ();
18040 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18042 fputs (GLOBAL_ASM_OP, stream);
18043 RS6000_OUTPUT_BASENAME (stream, name);
18044 putc ('\n', stream);
18048 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18049 tree decl ATTRIBUTE_UNUSED)
18052 static const char * const suffix[3] = { "PR", "RO", "RW" };
18054 if (flags & SECTION_CODE)
18056 else if (flags & SECTION_WRITE)
18061 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18062 (flags & SECTION_CODE) ? "." : "",
18063 name, suffix[smclass], flags & SECTION_ENTSIZE);
18067 rs6000_xcoff_select_section (tree decl, int reloc,
18068 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18070 if (decl_readonly_section_1 (decl, reloc, 1))
18072 if (TREE_PUBLIC (decl))
18073 read_only_data_section ();
18075 read_only_private_data_section ();
18079 if (TREE_PUBLIC (decl))
18082 private_data_section ();
18087 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18091 /* Use select_section for private and uninitialized data. */
18092 if (!TREE_PUBLIC (decl)
18093 || DECL_COMMON (decl)
18094 || DECL_INITIAL (decl) == NULL_TREE
18095 || DECL_INITIAL (decl) == error_mark_node
18096 || (flag_zero_initialized_in_bss
18097 && initializer_zerop (DECL_INITIAL (decl))))
18100 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18101 name = (*targetm.strip_name_encoding) (name);
18102 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18105 /* Select section for constant in constant pool.
18107 On RS/6000, all constants are in the private read-only data area.
18108 However, if this is being placed in the TOC it must be output as a
18112 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18113 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18115 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18118 read_only_private_data_section ();
18121 /* Remove any trailing [DS] or the like from the symbol name. */
18123 static const char *
18124 rs6000_xcoff_strip_name_encoding (const char *name)
18129 len = strlen (name);
18130 if (name[len - 1] == ']')
18131 return ggc_alloc_string (name, len - 4);
18136 /* Section attributes. AIX is always PIC. */
18138 static unsigned int
18139 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18141 unsigned int align;
18142 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18144 /* Align to at least UNIT size. */
18145 if (flags & SECTION_CODE)
18146 align = MIN_UNITS_PER_WORD;
18148 /* Increase alignment of large objects if not already stricter. */
18149 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18150 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18151 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18153 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18156 /* Output at beginning of assembler file.
18158 Initialize the section names for the RS/6000 at this point.
18160 Specify filename, including full path, to assembler.
18162 We want to go into the TOC section so at least one .toc will be emitted.
18163 Also, in order to output proper .bs/.es pairs, we need at least one static
18164 [RW] section emitted.
18166 Finally, declare mcount when profiling to make the assembler happy. */
18169 rs6000_xcoff_file_start (void)
18171 rs6000_gen_section_name (&xcoff_bss_section_name,
18172 main_input_filename, ".bss_");
18173 rs6000_gen_section_name (&xcoff_private_data_section_name,
18174 main_input_filename, ".rw_");
18175 rs6000_gen_section_name (&xcoff_read_only_section_name,
18176 main_input_filename, ".ro_");
18178 fputs ("\t.file\t", asm_out_file);
18179 output_quoted_string (asm_out_file, main_input_filename);
18180 fputc ('\n', asm_out_file);
18181 if (write_symbols != NO_DEBUG)
18182 private_data_section ();
18185 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18186 rs6000_file_start ();
18189 /* Output at end of assembler file.
18190 On the RS/6000, referencing data should automatically pull in text. */
18193 rs6000_xcoff_file_end (void)
18196 fputs ("_section_.text:\n", asm_out_file);
18198 fputs (TARGET_32BIT
18199 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18202 #endif /* TARGET_XCOFF */
18204 /* Compute a (partial) cost for rtx X. Return true if the complete
18205 cost has been computed, and false if subexpressions should be
18206 scanned. In either case, *TOTAL contains the cost result. */
18209 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18211 enum machine_mode mode = GET_MODE (x);
18215 /* On the RS/6000, if it is valid in the insn, it is free. */
18217 if (((outer_code == SET
18218 || outer_code == PLUS
18219 || outer_code == MINUS)
18220 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18221 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
18222 || (outer_code == AND
18223 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18224 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18225 mode == SImode ? 'L' : 'J'))
18226 || mask_operand (x, mode)
18228 && mask64_operand (x, DImode))))
18229 || ((outer_code == IOR || outer_code == XOR)
18230 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18231 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18232 mode == SImode ? 'L' : 'J'))))
18233 || outer_code == ASHIFT
18234 || outer_code == ASHIFTRT
18235 || outer_code == LSHIFTRT
18236 || outer_code == ROTATE
18237 || outer_code == ROTATERT
18238 || outer_code == ZERO_EXTRACT
18239 || (outer_code == MULT
18240 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18241 || ((outer_code == DIV || outer_code == UDIV
18242 || outer_code == MOD || outer_code == UMOD)
18243 && exact_log2 (INTVAL (x)) >= 0)
18244 || (outer_code == COMPARE
18245 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18246 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
18247 || (outer_code == EQ
18248 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18249 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18250 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18251 mode == SImode ? 'L' : 'J'))))
18252 || (outer_code == GTU
18253 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18254 || (outer_code == LTU
18255 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
18260 else if ((outer_code == PLUS
18261 && reg_or_add_cint_operand (x, VOIDmode))
18262 || (outer_code == MINUS
18263 && reg_or_sub_cint_operand (x, VOIDmode))
18264 || ((outer_code == SET
18265 || outer_code == IOR
18266 || outer_code == XOR)
18268 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18270 *total = COSTS_N_INSNS (1);
18277 && ((outer_code == AND
18278 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18279 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
18280 || mask_operand (x, DImode)
18281 || mask64_operand (x, DImode)))
18282 || ((outer_code == IOR || outer_code == XOR)
18283 && CONST_DOUBLE_HIGH (x) == 0
18284 && (CONST_DOUBLE_LOW (x)
18285 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
18290 else if (mode == DImode
18291 && (outer_code == SET
18292 || outer_code == IOR
18293 || outer_code == XOR)
18294 && CONST_DOUBLE_HIGH (x) == 0)
18296 *total = COSTS_N_INSNS (1);
18305 /* When optimizing for size, MEM should be slightly more expensive
18306 than generating address, e.g., (plus (reg) (const)).
18307 L1 cache latency is about two instructions. */
18308 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18316 if (mode == DFmode)
18318 if (GET_CODE (XEXP (x, 0)) == MULT)
18320 /* FNMA accounted in outer NEG. */
18321 if (outer_code == NEG)
18322 *total = rs6000_cost->dmul - rs6000_cost->fp;
18324 *total = rs6000_cost->dmul;
18327 *total = rs6000_cost->fp;
18329 else if (mode == SFmode)
18331 /* FNMA accounted in outer NEG. */
18332 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18335 *total = rs6000_cost->fp;
18338 *total = COSTS_N_INSNS (1);
18342 if (mode == DFmode)
18344 if (GET_CODE (XEXP (x, 0)) == MULT)
18346 /* FNMA accounted in outer NEG. */
18347 if (outer_code == NEG)
18350 *total = rs6000_cost->dmul;
18353 *total = rs6000_cost->fp;
18355 else if (mode == SFmode)
18357 /* FNMA accounted in outer NEG. */
18358 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18361 *total = rs6000_cost->fp;
18364 *total = COSTS_N_INSNS (1);
18368 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18369 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
18371 if (INTVAL (XEXP (x, 1)) >= -256
18372 && INTVAL (XEXP (x, 1)) <= 255)
18373 *total = rs6000_cost->mulsi_const9;
18375 *total = rs6000_cost->mulsi_const;
18377 /* FMA accounted in outer PLUS/MINUS. */
18378 else if ((mode == DFmode || mode == SFmode)
18379 && (outer_code == PLUS || outer_code == MINUS))
18381 else if (mode == DFmode)
18382 *total = rs6000_cost->dmul;
18383 else if (mode == SFmode)
18384 *total = rs6000_cost->fp;
18385 else if (mode == DImode)
18386 *total = rs6000_cost->muldi;
18388 *total = rs6000_cost->mulsi;
18393 if (FLOAT_MODE_P (mode))
18395 *total = mode == DFmode ? rs6000_cost->ddiv
18396 : rs6000_cost->sdiv;
18403 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18404 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18406 if (code == DIV || code == MOD)
18408 *total = COSTS_N_INSNS (2);
18411 *total = COSTS_N_INSNS (1);
18415 if (GET_MODE (XEXP (x, 1)) == DImode)
18416 *total = rs6000_cost->divdi;
18418 *total = rs6000_cost->divsi;
18420 /* Add in shift and subtract for MOD. */
18421 if (code == MOD || code == UMOD)
18422 *total += COSTS_N_INSNS (2);
18426 *total = COSTS_N_INSNS (4);
18430 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18441 *total = COSTS_N_INSNS (1);
18449 /* Handle mul_highpart. */
18450 if (outer_code == TRUNCATE
18451 && GET_CODE (XEXP (x, 0)) == MULT)
18453 if (mode == DImode)
18454 *total = rs6000_cost->muldi;
18456 *total = rs6000_cost->mulsi;
18459 else if (outer_code == AND)
18462 *total = COSTS_N_INSNS (1);
18467 if (GET_CODE (XEXP (x, 0)) == MEM)
18470 *total = COSTS_N_INSNS (1);
18476 if (!FLOAT_MODE_P (mode))
18478 *total = COSTS_N_INSNS (1);
18484 case UNSIGNED_FLOAT:
18487 case FLOAT_TRUNCATE:
18488 *total = rs6000_cost->fp;
18492 if (mode == DFmode)
18495 *total = rs6000_cost->fp;
18499 switch (XINT (x, 1))
18502 *total = rs6000_cost->fp;
18514 *total = COSTS_N_INSNS (1);
18517 else if (FLOAT_MODE_P (mode)
18518 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18520 *total = rs6000_cost->fp;
18528 /* Carry bit requires mode == Pmode.
18529 NEG or PLUS already counted so only add one. */
18531 && (outer_code == NEG || outer_code == PLUS))
18533 *total = COSTS_N_INSNS (1);
18536 if (outer_code == SET)
18538 if (XEXP (x, 1) == const0_rtx)
18540 *total = COSTS_N_INSNS (2);
18543 else if (mode == Pmode)
18545 *total = COSTS_N_INSNS (3);
18554 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
18556 *total = COSTS_N_INSNS (2);
18560 if (outer_code == COMPARE)
18574 /* A C expression returning the cost of moving data from a register of class
18575 CLASS1 to one of CLASS2. */
18578 rs6000_register_move_cost (enum machine_mode mode,
18579 enum reg_class from, enum reg_class to)
18581 /* Moves from/to GENERAL_REGS. */
18582 if (reg_classes_intersect_p (to, GENERAL_REGS)
18583 || reg_classes_intersect_p (from, GENERAL_REGS))
18585 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18588 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18589 return (rs6000_memory_move_cost (mode, from, 0)
18590 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18592 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18594 else if (from == CR_REGS)
18598 /* A move will cost one instruction per GPR moved. */
18599 return 2 * hard_regno_nregs[0][mode];
18602 /* Moving between two similar registers is just one instruction. */
18603 else if (reg_classes_intersect_p (to, from))
18604 return mode == TFmode ? 4 : 2;
18606 /* Everything else has to go through GENERAL_REGS. */
18608 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
18609 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18612 /* A C expressions returning the cost of moving data of MODE from a register to
18616 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
18617 int in ATTRIBUTE_UNUSED)
18619 if (reg_classes_intersect_p (class, GENERAL_REGS))
18620 return 4 * hard_regno_nregs[0][mode];
18621 else if (reg_classes_intersect_p (class, FLOAT_REGS))
18622 return 4 * hard_regno_nregs[32][mode];
18623 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
18624 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
18626 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18629 /* Newton-Raphson approximation of single-precision floating point divide n/d.
18630 Assumes no trapping math and finite arguments. */
18633 rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
18635 rtx x0, e0, e1, y1, u0, v0, one;
18637 x0 = gen_reg_rtx (SFmode);
18638 e0 = gen_reg_rtx (SFmode);
18639 e1 = gen_reg_rtx (SFmode);
18640 y1 = gen_reg_rtx (SFmode);
18641 u0 = gen_reg_rtx (SFmode);
18642 v0 = gen_reg_rtx (SFmode);
18643 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
18645 /* x0 = 1./d estimate */
18646 emit_insn (gen_rtx_SET (VOIDmode, x0,
18647 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
18649 /* e0 = 1. - d * x0 */
18650 emit_insn (gen_rtx_SET (VOIDmode, e0,
18651 gen_rtx_MINUS (SFmode, one,
18652 gen_rtx_MULT (SFmode, d, x0))));
18653 /* e1 = e0 + e0 * e0 */
18654 emit_insn (gen_rtx_SET (VOIDmode, e1,
18655 gen_rtx_PLUS (SFmode,
18656 gen_rtx_MULT (SFmode, e0, e0), e0)));
18657 /* y1 = x0 + e1 * x0 */
18658 emit_insn (gen_rtx_SET (VOIDmode, y1,
18659 gen_rtx_PLUS (SFmode,
18660 gen_rtx_MULT (SFmode, e1, x0), x0)));
18662 emit_insn (gen_rtx_SET (VOIDmode, u0,
18663 gen_rtx_MULT (SFmode, n, y1)));
18664 /* v0 = n - d * u0 */
18665 emit_insn (gen_rtx_SET (VOIDmode, v0,
18666 gen_rtx_MINUS (SFmode, n,
18667 gen_rtx_MULT (SFmode, d, u0))));
18668 /* res = u0 + v0 * y1 */
18669 emit_insn (gen_rtx_SET (VOIDmode, res,
18670 gen_rtx_PLUS (SFmode,
18671 gen_rtx_MULT (SFmode, v0, y1), u0)));
18674 /* Newton-Raphson approximation of double-precision floating point divide n/d.
18675 Assumes no trapping math and finite arguments. */
18678 rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18680 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18682 x0 = gen_reg_rtx (DFmode);
18683 e0 = gen_reg_rtx (DFmode);
18684 e1 = gen_reg_rtx (DFmode);
18685 e2 = gen_reg_rtx (DFmode);
18686 y1 = gen_reg_rtx (DFmode);
18687 y2 = gen_reg_rtx (DFmode);
18688 y3 = gen_reg_rtx (DFmode);
18689 u0 = gen_reg_rtx (DFmode);
18690 v0 = gen_reg_rtx (DFmode);
18691 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18693 /* x0 = 1./d estimate */
18694 emit_insn (gen_rtx_SET (VOIDmode, x0,
18695 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18697 /* e0 = 1. - d * x0 */
18698 emit_insn (gen_rtx_SET (VOIDmode, e0,
18699 gen_rtx_MINUS (DFmode, one,
18700 gen_rtx_MULT (SFmode, d, x0))));
18701 /* y1 = x0 + e0 * x0 */
18702 emit_insn (gen_rtx_SET (VOIDmode, y1,
18703 gen_rtx_PLUS (DFmode,
18704 gen_rtx_MULT (DFmode, e0, x0), x0)));
18706 emit_insn (gen_rtx_SET (VOIDmode, e1,
18707 gen_rtx_MULT (DFmode, e0, e0)));
18708 /* y2 = y1 + e1 * y1 */
18709 emit_insn (gen_rtx_SET (VOIDmode, y2,
18710 gen_rtx_PLUS (DFmode,
18711 gen_rtx_MULT (DFmode, e1, y1), y1)));
18713 emit_insn (gen_rtx_SET (VOIDmode, e2,
18714 gen_rtx_MULT (DFmode, e1, e1)));
18715 /* y3 = y2 + e2 * y2 */
18716 emit_insn (gen_rtx_SET (VOIDmode, y3,
18717 gen_rtx_PLUS (DFmode,
18718 gen_rtx_MULT (DFmode, e2, y2), y2)));
18720 emit_insn (gen_rtx_SET (VOIDmode, u0,
18721 gen_rtx_MULT (DFmode, n, y3)));
18722 /* v0 = n - d * u0 */
18723 emit_insn (gen_rtx_SET (VOIDmode, v0,
18724 gen_rtx_MINUS (DFmode, n,
18725 gen_rtx_MULT (DFmode, d, u0))));
18726 /* res = u0 + v0 * y3 */
18727 emit_insn (gen_rtx_SET (VOIDmode, res,
18728 gen_rtx_PLUS (DFmode,
18729 gen_rtx_MULT (DFmode, v0, y3), u0)));
18732 /* Return an RTX representing where to find the function value of a
18733 function returning MODE. */
18735 rs6000_complex_function_value (enum machine_mode mode)
18737 unsigned int regno;
18739 enum machine_mode inner = GET_MODE_INNER (mode);
18740 unsigned int inner_bytes = GET_MODE_SIZE (inner);
18742 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18743 regno = FP_ARG_RETURN;
18746 regno = GP_ARG_RETURN;
18748 /* 32-bit is OK since it'll go in r3/r4. */
18749 if (TARGET_32BIT && inner_bytes >= 4)
18750 return gen_rtx_REG (mode, regno);
18753 if (inner_bytes >= 8)
18754 return gen_rtx_REG (mode, regno);
18756 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18758 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
18759 GEN_INT (inner_bytes));
18760 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18763 /* Define how to find the value returned by a function.
18764 VALTYPE is the data type of the value (as a tree).
18765 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18766 otherwise, FUNC is 0.
18768 On the SPE, both FPs and vectors are returned in r3.
18770 On RS/6000 an integer value is in r3 and a floating-point value is in
18771 fp1, unless -msoft-float. */
18774 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18776 enum machine_mode mode;
18777 unsigned int regno;
18779 /* Special handling for structs in darwin64. */
18780 if (rs6000_darwin64_abi
18781 && TYPE_MODE (valtype) == BLKmode
18782 && TREE_CODE (valtype) == RECORD_TYPE
18783 && int_size_in_bytes (valtype) > 0)
18785 CUMULATIVE_ARGS valcum;
18789 valcum.fregno = FP_ARG_MIN_REG;
18790 valcum.vregno = ALTIVEC_ARG_MIN_REG;
18791 /* Do a trial code generation as if this were going to be passed as
18792 an argument; if any part goes in memory, we return NULL. */
18793 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
18796 /* Otherwise fall through to standard ABI rules. */
18799 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18801 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18802 return gen_rtx_PARALLEL (DImode,
18804 gen_rtx_EXPR_LIST (VOIDmode,
18805 gen_rtx_REG (SImode, GP_ARG_RETURN),
18807 gen_rtx_EXPR_LIST (VOIDmode,
18808 gen_rtx_REG (SImode,
18809 GP_ARG_RETURN + 1),
18812 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
18814 return gen_rtx_PARALLEL (DCmode,
18816 gen_rtx_EXPR_LIST (VOIDmode,
18817 gen_rtx_REG (SImode, GP_ARG_RETURN),
18819 gen_rtx_EXPR_LIST (VOIDmode,
18820 gen_rtx_REG (SImode,
18821 GP_ARG_RETURN + 1),
18823 gen_rtx_EXPR_LIST (VOIDmode,
18824 gen_rtx_REG (SImode,
18825 GP_ARG_RETURN + 2),
18827 gen_rtx_EXPR_LIST (VOIDmode,
18828 gen_rtx_REG (SImode,
18829 GP_ARG_RETURN + 3),
18832 if ((INTEGRAL_TYPE_P (valtype)
18833 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18834 || POINTER_TYPE_P (valtype))
18835 mode = TARGET_32BIT ? SImode : DImode;
18837 mode = TYPE_MODE (valtype);
18839 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
18840 regno = FP_ARG_RETURN;
18841 else if (TREE_CODE (valtype) == COMPLEX_TYPE
18842 && targetm.calls.split_complex_arg)
18843 return rs6000_complex_function_value (mode);
18844 else if (TREE_CODE (valtype) == VECTOR_TYPE
18845 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
18846 && ALTIVEC_VECTOR_MODE (mode))
18847 regno = ALTIVEC_ARG_RETURN;
18848 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18849 && (mode == DFmode || mode == DCmode))
18850 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18852 regno = GP_ARG_RETURN;
18854 return gen_rtx_REG (mode, regno);
18857 /* Define how to find the value returned by a library function
18858 assuming the value has mode MODE. */
18860 rs6000_libcall_value (enum machine_mode mode)
18862 unsigned int regno;
18864 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18866 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18867 return gen_rtx_PARALLEL (DImode,
18869 gen_rtx_EXPR_LIST (VOIDmode,
18870 gen_rtx_REG (SImode, GP_ARG_RETURN),
18872 gen_rtx_EXPR_LIST (VOIDmode,
18873 gen_rtx_REG (SImode,
18874 GP_ARG_RETURN + 1),
18878 if (SCALAR_FLOAT_MODE_P (mode)
18879 && TARGET_HARD_FLOAT && TARGET_FPRS)
18880 regno = FP_ARG_RETURN;
18881 else if (ALTIVEC_VECTOR_MODE (mode)
18882 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
18883 regno = ALTIVEC_ARG_RETURN;
18884 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
18885 return rs6000_complex_function_value (mode);
18886 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18887 && (mode == DFmode || mode == DCmode))
18888 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18890 regno = GP_ARG_RETURN;
18892 return gen_rtx_REG (mode, regno);
18895 /* Define the offset between two registers, FROM to be eliminated and its
18896 replacement TO, at the start of a routine. */
18898 rs6000_initial_elimination_offset (int from, int to)
18900 rs6000_stack_t *info = rs6000_stack_info ();
18901 HOST_WIDE_INT offset;
18903 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18904 offset = info->push_p ? 0 : -info->total_size;
18905 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18907 offset = info->push_p ? 0 : -info->total_size;
18908 if (FRAME_GROWS_DOWNWARD)
18909 offset += info->fixed_size + info->vars_size + info->parm_size;
18911 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18912 offset = FRAME_GROWS_DOWNWARD
18913 ? info->fixed_size + info->vars_size + info->parm_size
18915 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18916 offset = info->total_size;
18917 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18918 offset = info->push_p ? info->total_size : 0;
18919 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18922 gcc_unreachable ();
18927 /* Return true if TYPE is a SPE or AltiVec opaque type. */
18930 rs6000_is_opaque_type (tree type)
18932 return (type == opaque_V2SI_type_node
18933 || type == opaque_V2SF_type_node
18934 || type == opaque_p_V2SI_type_node
18935 || type == opaque_V4SI_type_node);
18939 rs6000_dwarf_register_span (rtx reg)
18944 && (SPE_VECTOR_MODE (GET_MODE (reg))
18945 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18950 regno = REGNO (reg);
18952 /* The duality of the SPE register size wreaks all kinds of havoc.
18953 This is a way of distinguishing r0 in 32-bits from r0 in
18956 gen_rtx_PARALLEL (VOIDmode,
18959 gen_rtx_REG (SImode, regno + 1200),
18960 gen_rtx_REG (SImode, regno))
18962 gen_rtx_REG (SImode, regno),
18963 gen_rtx_REG (SImode, regno + 1200)));
18966 /* Map internal gcc register numbers to DWARF2 register numbers. */
18969 rs6000_dbx_register_number (unsigned int regno)
18971 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18973 if (regno == MQ_REGNO)
18975 if (regno == LINK_REGISTER_REGNUM)
18977 if (regno == COUNT_REGISTER_REGNUM)
18979 if (CR_REGNO_P (regno))
18980 return regno - CR0_REGNO + 86;
18981 if (regno == XER_REGNO)
18983 if (ALTIVEC_REGNO_P (regno))
18984 return regno - FIRST_ALTIVEC_REGNO + 1124;
18985 if (regno == VRSAVE_REGNO)
18987 if (regno == VSCR_REGNO)
18989 if (regno == SPE_ACC_REGNO)
18991 if (regno == SPEFSCR_REGNO)
18993 /* SPE high reg number. We get these values of regno from
18994 rs6000_dwarf_register_span. */
18995 gcc_assert (regno >= 1200 && regno < 1232);
18999 /* target hook eh_return_filter_mode */
19000 static enum machine_mode
19001 rs6000_eh_return_filter_mode (void)
19003 return TARGET_32BIT ? SImode : word_mode;
19006 /* Target hook for vector_mode_supported_p. */
19008 rs6000_vector_mode_supported_p (enum machine_mode mode)
19011 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19014 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19021 /* Target hook for invalid_arg_for_unprototyped_fn. */
19022 static const char *
19023 invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19025 return (!rs6000_darwin64_abi
19027 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19028 && (funcdecl == NULL_TREE
19029 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19030 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19031 ? N_("AltiVec argument passed to unprototyped function")
19035 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19036 setup by using __stack_chk_fail_local hidden function instead of
19037 calling __stack_chk_fail directly. Otherwise it is better to call
19038 __stack_chk_fail directly. */
19041 rs6000_stack_protect_fail (void)
19043 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19044 ? default_hidden_stack_protect_fail ()
19045 : default_external_stack_protect_fail ();
19048 #include "gt-rs6000.h"