1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
59 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62 #include "gstab.h" /* for N_SLINE */
65 #ifndef TARGET_NO_PROTOTYPE
66 #define TARGET_NO_PROTOTYPE 0
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
72 /* Structure used to define the rs6000 stack */
73 typedef struct rs6000_stack {
74 int first_gp_reg_save; /* first callee saved GP register used */
75 int first_fp_reg_save; /* first callee saved FP register used */
76 int first_altivec_reg_save; /* first callee saved AltiVec register used */
77 int lr_save_p; /* true if the link reg needs to be saved */
78 int cr_save_p; /* true if the CR reg needs to be saved */
79 unsigned int vrsave_mask; /* mask of vec registers to save */
80 int toc_save_p; /* true if the TOC needs to be saved */
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
83 int world_save_p; /* true if we're saving *everything*:
84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
93 int toc_save_offset; /* offset to save the TOC pointer */
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* A C structure for machine-specific, per-function data.
117 This is added to the cfun structure. */
118 typedef struct machine_function GTY(())
120 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
121 int ra_needs_full_frame;
122 /* Some local-dynamic symbol. */
123 const char *some_ld_name;
124 /* Whether the instruction chain has been scanned already. */
125 int insn_chain_scanned_p;
126 /* Flags if __builtin_return_address (0) was used. */
128 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
129 varargs save area. */
130 HOST_WIDE_INT varargs_save_offset;
133 /* Target cpu type */
135 enum processor_type rs6000_cpu;
136 struct rs6000_cpu_select rs6000_select[3] =
138 /* switch name, tune arch */
139 { (const char *)0, "--with-cpu=", 1, 1 },
140 { (const char *)0, "-mcpu=", 1, 1 },
141 { (const char *)0, "-mtune=", 1, 0 },
144 /* Always emit branch hint bits. */
145 static GTY(()) bool rs6000_always_hint;
147 /* Schedule instructions for group formation. */
148 static GTY(()) bool rs6000_sched_groups;
150 /* Support for -msched-costly-dep option. */
151 const char *rs6000_sched_costly_dep_str;
152 enum rs6000_dependence_cost rs6000_sched_costly_dep;
154 /* Support for -minsert-sched-nops option. */
155 const char *rs6000_sched_insert_nops_str;
156 enum rs6000_nop_insertion rs6000_sched_insert_nops;
158 /* Support targetm.vectorize.builtin_mask_for_load. */
159 static GTY(()) tree altivec_builtin_mask_for_load;
161 /* Size of long double */
162 int rs6000_long_double_type_size;
164 /* Whether -mabi=altivec has appeared */
165 int rs6000_altivec_abi;
167 /* Nonzero if we want SPE ABI extensions. */
170 /* Nonzero if floating point operations are done in the GPRs. */
171 int rs6000_float_gprs = 0;
173 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
174 int rs6000_darwin64_abi;
176 /* Set to nonzero once AIX common-mode calls have been defined. */
177 static GTY(()) int common_mode_defined;
179 /* Save information from a "cmpxx" operation until the branch or scc is
181 rtx rs6000_compare_op0, rs6000_compare_op1;
182 int rs6000_compare_fp_p;
184 /* Label number of label created for -mrelocatable, to call to so we can
185 get the address of the GOT section */
186 int rs6000_pic_labelno;
189 /* Which abi to adhere to */
190 const char *rs6000_abi_name;
192 /* Semantics of the small data area */
193 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195 /* Which small data model to use */
196 const char *rs6000_sdata_name = (char *)0;
198 /* Counter for labels which are to be placed in .fixup. */
199 int fixuplabelno = 0;
202 /* Bit size of immediate TLS offsets and string from which it is decoded. */
203 int rs6000_tls_size = 32;
204 const char *rs6000_tls_size_string;
206 /* ABI enumeration available for subtarget to use. */
207 enum rs6000_abi rs6000_current_abi;
209 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
213 const char *rs6000_debug_name;
214 int rs6000_debug_stack; /* debug stack applications */
215 int rs6000_debug_arg; /* debug argument handling */
217 /* Value is TRUE if register/mode pair is acceptable. */
218 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
220 /* Built in types. */
222 tree rs6000_builtin_types[RS6000_BTI_MAX];
223 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
225 const char *rs6000_traceback_name;
227 traceback_default = 0,
233 /* Flag to say the TOC is initialized */
235 char toc_label_name[10];
237 /* Control alignment for fields within structures. */
238 /* String from -malign-XXXXX. */
239 int rs6000_alignment_flags;
241 /* True for any options that were explicitly set. */
243 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
244 bool alignment; /* True if -malign- was used. */
245 bool abi; /* True if -mabi= was used. */
246 bool spe; /* True if -mspe= was used. */
247 bool float_gprs; /* True if -mfloat-gprs= was used. */
248 bool isel; /* True if -misel was used. */
249 bool long_double; /* True if -mlong-double- was used. */
250 } rs6000_explicit_options;
252 struct builtin_description
254 /* mask is not const because we're going to alter it below. This
255 nonsense will go away when we rewrite the -march infrastructure
256 to give us more target flag bits. */
258 const enum insn_code icode;
259 const char *const name;
260 const enum rs6000_builtins code;
263 /* Target cpu costs. */
265 struct processor_costs {
266 const int mulsi; /* cost of SImode multiplication. */
267 const int mulsi_const; /* cost of SImode multiplication by constant. */
268 const int mulsi_const9; /* cost of SImode mult by short constant. */
269 const int muldi; /* cost of DImode multiplication. */
270 const int divsi; /* cost of SImode division. */
271 const int divdi; /* cost of DImode division. */
272 const int fp; /* cost of simple SFmode and DFmode insns. */
273 const int dmul; /* cost of DFmode multiplication (and fmadd). */
274 const int sdiv; /* cost of SFmode division (fdivs). */
275 const int ddiv; /* cost of DFmode division (fdiv). */
278 const struct processor_costs *rs6000_cost;
280 /* Processor costs (relative to an add) */
282 /* Instruction size costs on 32bit processors. */
284 struct processor_costs size32_cost = {
285 COSTS_N_INSNS (1), /* mulsi */
286 COSTS_N_INSNS (1), /* mulsi_const */
287 COSTS_N_INSNS (1), /* mulsi_const9 */
288 COSTS_N_INSNS (1), /* muldi */
289 COSTS_N_INSNS (1), /* divsi */
290 COSTS_N_INSNS (1), /* divdi */
291 COSTS_N_INSNS (1), /* fp */
292 COSTS_N_INSNS (1), /* dmul */
293 COSTS_N_INSNS (1), /* sdiv */
294 COSTS_N_INSNS (1), /* ddiv */
297 /* Instruction size costs on 64bit processors. */
299 struct processor_costs size64_cost = {
300 COSTS_N_INSNS (1), /* mulsi */
301 COSTS_N_INSNS (1), /* mulsi_const */
302 COSTS_N_INSNS (1), /* mulsi_const9 */
303 COSTS_N_INSNS (1), /* muldi */
304 COSTS_N_INSNS (1), /* divsi */
305 COSTS_N_INSNS (1), /* divdi */
306 COSTS_N_INSNS (1), /* fp */
307 COSTS_N_INSNS (1), /* dmul */
308 COSTS_N_INSNS (1), /* sdiv */
309 COSTS_N_INSNS (1), /* ddiv */
312 /* Instruction costs on RIOS1 processors. */
314 struct processor_costs rios1_cost = {
315 COSTS_N_INSNS (5), /* mulsi */
316 COSTS_N_INSNS (4), /* mulsi_const */
317 COSTS_N_INSNS (3), /* mulsi_const9 */
318 COSTS_N_INSNS (5), /* muldi */
319 COSTS_N_INSNS (19), /* divsi */
320 COSTS_N_INSNS (19), /* divdi */
321 COSTS_N_INSNS (2), /* fp */
322 COSTS_N_INSNS (2), /* dmul */
323 COSTS_N_INSNS (19), /* sdiv */
324 COSTS_N_INSNS (19), /* ddiv */
327 /* Instruction costs on RIOS2 processors. */
329 struct processor_costs rios2_cost = {
330 COSTS_N_INSNS (2), /* mulsi */
331 COSTS_N_INSNS (2), /* mulsi_const */
332 COSTS_N_INSNS (2), /* mulsi_const9 */
333 COSTS_N_INSNS (2), /* muldi */
334 COSTS_N_INSNS (13), /* divsi */
335 COSTS_N_INSNS (13), /* divdi */
336 COSTS_N_INSNS (2), /* fp */
337 COSTS_N_INSNS (2), /* dmul */
338 COSTS_N_INSNS (17), /* sdiv */
339 COSTS_N_INSNS (17), /* ddiv */
342 /* Instruction costs on RS64A processors. */
344 struct processor_costs rs64a_cost = {
345 COSTS_N_INSNS (20), /* mulsi */
346 COSTS_N_INSNS (12), /* mulsi_const */
347 COSTS_N_INSNS (8), /* mulsi_const9 */
348 COSTS_N_INSNS (34), /* muldi */
349 COSTS_N_INSNS (65), /* divsi */
350 COSTS_N_INSNS (67), /* divdi */
351 COSTS_N_INSNS (4), /* fp */
352 COSTS_N_INSNS (4), /* dmul */
353 COSTS_N_INSNS (31), /* sdiv */
354 COSTS_N_INSNS (31), /* ddiv */
357 /* Instruction costs on MPCCORE processors. */
359 struct processor_costs mpccore_cost = {
360 COSTS_N_INSNS (2), /* mulsi */
361 COSTS_N_INSNS (2), /* mulsi_const */
362 COSTS_N_INSNS (2), /* mulsi_const9 */
363 COSTS_N_INSNS (2), /* muldi */
364 COSTS_N_INSNS (6), /* divsi */
365 COSTS_N_INSNS (6), /* divdi */
366 COSTS_N_INSNS (4), /* fp */
367 COSTS_N_INSNS (5), /* dmul */
368 COSTS_N_INSNS (10), /* sdiv */
369 COSTS_N_INSNS (17), /* ddiv */
372 /* Instruction costs on PPC403 processors. */
374 struct processor_costs ppc403_cost = {
375 COSTS_N_INSNS (4), /* mulsi */
376 COSTS_N_INSNS (4), /* mulsi_const */
377 COSTS_N_INSNS (4), /* mulsi_const9 */
378 COSTS_N_INSNS (4), /* muldi */
379 COSTS_N_INSNS (33), /* divsi */
380 COSTS_N_INSNS (33), /* divdi */
381 COSTS_N_INSNS (11), /* fp */
382 COSTS_N_INSNS (11), /* dmul */
383 COSTS_N_INSNS (11), /* sdiv */
384 COSTS_N_INSNS (11), /* ddiv */
387 /* Instruction costs on PPC405 processors. */
389 struct processor_costs ppc405_cost = {
390 COSTS_N_INSNS (5), /* mulsi */
391 COSTS_N_INSNS (4), /* mulsi_const */
392 COSTS_N_INSNS (3), /* mulsi_const9 */
393 COSTS_N_INSNS (5), /* muldi */
394 COSTS_N_INSNS (35), /* divsi */
395 COSTS_N_INSNS (35), /* divdi */
396 COSTS_N_INSNS (11), /* fp */
397 COSTS_N_INSNS (11), /* dmul */
398 COSTS_N_INSNS (11), /* sdiv */
399 COSTS_N_INSNS (11), /* ddiv */
402 /* Instruction costs on PPC440 processors. */
404 struct processor_costs ppc440_cost = {
405 COSTS_N_INSNS (3), /* mulsi */
406 COSTS_N_INSNS (2), /* mulsi_const */
407 COSTS_N_INSNS (2), /* mulsi_const9 */
408 COSTS_N_INSNS (3), /* muldi */
409 COSTS_N_INSNS (34), /* divsi */
410 COSTS_N_INSNS (34), /* divdi */
411 COSTS_N_INSNS (5), /* fp */
412 COSTS_N_INSNS (5), /* dmul */
413 COSTS_N_INSNS (19), /* sdiv */
414 COSTS_N_INSNS (33), /* ddiv */
417 /* Instruction costs on PPC601 processors. */
419 struct processor_costs ppc601_cost = {
420 COSTS_N_INSNS (5), /* mulsi */
421 COSTS_N_INSNS (5), /* mulsi_const */
422 COSTS_N_INSNS (5), /* mulsi_const9 */
423 COSTS_N_INSNS (5), /* muldi */
424 COSTS_N_INSNS (36), /* divsi */
425 COSTS_N_INSNS (36), /* divdi */
426 COSTS_N_INSNS (4), /* fp */
427 COSTS_N_INSNS (5), /* dmul */
428 COSTS_N_INSNS (17), /* sdiv */
429 COSTS_N_INSNS (31), /* ddiv */
432 /* Instruction costs on PPC603 processors. */
434 struct processor_costs ppc603_cost = {
435 COSTS_N_INSNS (5), /* mulsi */
436 COSTS_N_INSNS (3), /* mulsi_const */
437 COSTS_N_INSNS (2), /* mulsi_const9 */
438 COSTS_N_INSNS (5), /* muldi */
439 COSTS_N_INSNS (37), /* divsi */
440 COSTS_N_INSNS (37), /* divdi */
441 COSTS_N_INSNS (3), /* fp */
442 COSTS_N_INSNS (4), /* dmul */
443 COSTS_N_INSNS (18), /* sdiv */
444 COSTS_N_INSNS (33), /* ddiv */
447 /* Instruction costs on PPC604 processors. */
449 struct processor_costs ppc604_cost = {
450 COSTS_N_INSNS (4), /* mulsi */
451 COSTS_N_INSNS (4), /* mulsi_const */
452 COSTS_N_INSNS (4), /* mulsi_const9 */
453 COSTS_N_INSNS (4), /* muldi */
454 COSTS_N_INSNS (20), /* divsi */
455 COSTS_N_INSNS (20), /* divdi */
456 COSTS_N_INSNS (3), /* fp */
457 COSTS_N_INSNS (3), /* dmul */
458 COSTS_N_INSNS (18), /* sdiv */
459 COSTS_N_INSNS (32), /* ddiv */
462 /* Instruction costs on PPC604e processors. */
464 struct processor_costs ppc604e_cost = {
465 COSTS_N_INSNS (2), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (2), /* muldi */
469 COSTS_N_INSNS (20), /* divsi */
470 COSTS_N_INSNS (20), /* divdi */
471 COSTS_N_INSNS (3), /* fp */
472 COSTS_N_INSNS (3), /* dmul */
473 COSTS_N_INSNS (18), /* sdiv */
474 COSTS_N_INSNS (32), /* ddiv */
477 /* Instruction costs on PPC620 processors. */
479 struct processor_costs ppc620_cost = {
480 COSTS_N_INSNS (5), /* mulsi */
481 COSTS_N_INSNS (4), /* mulsi_const */
482 COSTS_N_INSNS (3), /* mulsi_const9 */
483 COSTS_N_INSNS (7), /* muldi */
484 COSTS_N_INSNS (21), /* divsi */
485 COSTS_N_INSNS (37), /* divdi */
486 COSTS_N_INSNS (3), /* fp */
487 COSTS_N_INSNS (3), /* dmul */
488 COSTS_N_INSNS (18), /* sdiv */
489 COSTS_N_INSNS (32), /* ddiv */
492 /* Instruction costs on PPC630 processors. */
494 struct processor_costs ppc630_cost = {
495 COSTS_N_INSNS (5), /* mulsi */
496 COSTS_N_INSNS (4), /* mulsi_const */
497 COSTS_N_INSNS (3), /* mulsi_const9 */
498 COSTS_N_INSNS (7), /* muldi */
499 COSTS_N_INSNS (21), /* divsi */
500 COSTS_N_INSNS (37), /* divdi */
501 COSTS_N_INSNS (3), /* fp */
502 COSTS_N_INSNS (3), /* dmul */
503 COSTS_N_INSNS (17), /* sdiv */
504 COSTS_N_INSNS (21), /* ddiv */
507 /* Instruction costs on PPC750 and PPC7400 processors. */
509 struct processor_costs ppc750_cost = {
510 COSTS_N_INSNS (5), /* mulsi */
511 COSTS_N_INSNS (3), /* mulsi_const */
512 COSTS_N_INSNS (2), /* mulsi_const9 */
513 COSTS_N_INSNS (5), /* muldi */
514 COSTS_N_INSNS (17), /* divsi */
515 COSTS_N_INSNS (17), /* divdi */
516 COSTS_N_INSNS (3), /* fp */
517 COSTS_N_INSNS (3), /* dmul */
518 COSTS_N_INSNS (17), /* sdiv */
519 COSTS_N_INSNS (31), /* ddiv */
522 /* Instruction costs on PPC7450 processors. */
524 struct processor_costs ppc7450_cost = {
525 COSTS_N_INSNS (4), /* mulsi */
526 COSTS_N_INSNS (3), /* mulsi_const */
527 COSTS_N_INSNS (3), /* mulsi_const9 */
528 COSTS_N_INSNS (4), /* muldi */
529 COSTS_N_INSNS (23), /* divsi */
530 COSTS_N_INSNS (23), /* divdi */
531 COSTS_N_INSNS (5), /* fp */
532 COSTS_N_INSNS (5), /* dmul */
533 COSTS_N_INSNS (21), /* sdiv */
534 COSTS_N_INSNS (35), /* ddiv */
537 /* Instruction costs on PPC8540 processors. */
539 struct processor_costs ppc8540_cost = {
540 COSTS_N_INSNS (4), /* mulsi */
541 COSTS_N_INSNS (4), /* mulsi_const */
542 COSTS_N_INSNS (4), /* mulsi_const9 */
543 COSTS_N_INSNS (4), /* muldi */
544 COSTS_N_INSNS (19), /* divsi */
545 COSTS_N_INSNS (19), /* divdi */
546 COSTS_N_INSNS (4), /* fp */
547 COSTS_N_INSNS (4), /* dmul */
548 COSTS_N_INSNS (29), /* sdiv */
549 COSTS_N_INSNS (29), /* ddiv */
552 /* Instruction costs on POWER4 and POWER5 processors. */
554 struct processor_costs power4_cost = {
555 COSTS_N_INSNS (3), /* mulsi */
556 COSTS_N_INSNS (2), /* mulsi_const */
557 COSTS_N_INSNS (2), /* mulsi_const9 */
558 COSTS_N_INSNS (4), /* muldi */
559 COSTS_N_INSNS (18), /* divsi */
560 COSTS_N_INSNS (34), /* divdi */
561 COSTS_N_INSNS (3), /* fp */
562 COSTS_N_INSNS (3), /* dmul */
563 COSTS_N_INSNS (17), /* sdiv */
564 COSTS_N_INSNS (17), /* ddiv */
568 static bool rs6000_function_ok_for_sibcall (tree, tree);
569 static const char *rs6000_invalid_within_doloop (rtx);
570 static rtx rs6000_generate_compare (enum rtx_code);
571 static void rs6000_maybe_dead (rtx);
572 static void rs6000_emit_stack_tie (void);
573 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
574 static rtx spe_synthesize_frame_save (rtx);
575 static bool spe_func_has_64bit_regs_p (void);
576 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
578 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
579 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
580 static unsigned rs6000_hash_constant (rtx);
581 static unsigned toc_hash_function (const void *);
582 static int toc_hash_eq (const void *, const void *);
583 static int constant_pool_expr_1 (rtx, int *, int *);
584 static bool constant_pool_expr_p (rtx);
585 static bool legitimate_indexed_address_p (rtx, int);
586 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
587 static struct machine_function * rs6000_init_machine_status (void);
588 static bool rs6000_assemble_integer (rtx, unsigned int, int);
589 static bool no_global_regs_above (int);
590 #ifdef HAVE_GAS_HIDDEN
591 static void rs6000_assemble_visibility (tree, int);
593 static int rs6000_ra_ever_killed (void);
594 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
595 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
596 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
597 static const char *rs6000_mangle_fundamental_type (tree);
598 extern const struct attribute_spec rs6000_attribute_table[];
599 static void rs6000_set_default_type_attributes (tree);
600 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
601 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
602 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
604 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
605 static bool rs6000_return_in_memory (tree, tree);
606 static void rs6000_file_start (void);
608 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
609 static void rs6000_elf_asm_out_constructor (rtx, int);
610 static void rs6000_elf_asm_out_destructor (rtx, int);
611 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
612 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
613 static void rs6000_elf_unique_section (tree, int);
614 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
615 unsigned HOST_WIDE_INT);
616 static void rs6000_elf_encode_section_info (tree, rtx, int)
618 static bool rs6000_elf_in_small_data_p (tree);
621 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
622 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
623 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
624 static void rs6000_xcoff_unique_section (tree, int);
625 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
626 unsigned HOST_WIDE_INT);
627 static const char * rs6000_xcoff_strip_name_encoding (const char *);
628 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
629 static void rs6000_xcoff_file_start (void);
630 static void rs6000_xcoff_file_end (void);
632 static int rs6000_variable_issue (FILE *, int, rtx, int);
633 static bool rs6000_rtx_costs (rtx, int, int, int *);
634 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
635 static bool is_microcoded_insn (rtx);
636 static int is_dispatch_slot_restricted (rtx);
637 static bool is_cracked_insn (rtx);
638 static bool is_branch_slot_insn (rtx);
639 static int rs6000_adjust_priority (rtx, int);
640 static int rs6000_issue_rate (void);
641 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
642 static rtx get_next_active_insn (rtx, rtx);
643 static bool insn_terminates_group_p (rtx , enum group_termination);
644 static bool is_costly_group (rtx *, rtx);
645 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
646 static int redefine_groups (FILE *, int, rtx, rtx);
647 static int pad_groups (FILE *, int, rtx, rtx);
648 static void rs6000_sched_finish (FILE *, int);
649 static int rs6000_use_sched_lookahead (void);
650 static tree rs6000_builtin_mask_for_load (void);
652 static void def_builtin (int, const char *, tree, int);
653 static void rs6000_init_builtins (void);
654 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
655 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
656 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
657 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
658 static void altivec_init_builtins (void);
659 static void rs6000_common_init_builtins (void);
660 static void rs6000_init_libfuncs (void);
662 static void enable_mask_for_builtins (struct builtin_description *, int,
663 enum rs6000_builtins,
664 enum rs6000_builtins);
665 static tree build_opaque_vector_type (tree, int);
666 static void spe_init_builtins (void);
667 static rtx spe_expand_builtin (tree, rtx, bool *);
668 static rtx spe_expand_stv_builtin (enum insn_code, tree);
669 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
670 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
671 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
672 static rs6000_stack_t *rs6000_stack_info (void);
673 static void debug_stack_info (rs6000_stack_t *);
675 static rtx altivec_expand_builtin (tree, rtx, bool *);
676 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
677 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
678 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
679 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
680 static rtx altivec_expand_predicate_builtin (enum insn_code,
681 const char *, tree, rtx);
682 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
683 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
684 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
685 static rtx altivec_expand_vec_set_builtin (tree);
686 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
687 static int get_element_number (tree, tree);
688 static bool rs6000_handle_option (size_t, const char *, int);
689 static void rs6000_parse_tls_size_option (void);
690 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
691 static int first_altivec_reg_to_save (void);
692 static unsigned int compute_vrsave_mask (void);
693 static void compute_save_world_info (rs6000_stack_t *info_ptr);
694 static void is_altivec_return_reg (rtx, void *);
695 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
696 int easy_vector_constant (rtx, enum machine_mode);
697 static bool rs6000_is_opaque_type (tree);
698 static rtx rs6000_dwarf_register_span (rtx);
699 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
700 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
701 static rtx rs6000_tls_get_addr (void);
702 static rtx rs6000_got_sym (void);
703 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
704 static const char *rs6000_get_some_local_dynamic_name (void);
705 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
706 static rtx rs6000_complex_function_value (enum machine_mode);
707 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
708 enum machine_mode, tree);
709 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
711 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
712 tree, HOST_WIDE_INT);
713 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
716 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
719 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
720 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
721 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
722 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
723 enum machine_mode, tree,
725 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
727 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
729 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
731 static void macho_branch_islands (void);
732 static void add_compiler_branch_island (tree, tree, int);
733 static int no_previous_def (tree function_name);
734 static tree get_prev_label (tree function_name);
735 static void rs6000_darwin_file_start (void);
738 static tree rs6000_build_builtin_va_list (void);
739 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
740 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
741 static bool rs6000_vector_mode_supported_p (enum machine_mode);
742 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
744 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
746 static int get_vsel_insn (enum machine_mode);
747 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
748 static tree rs6000_stack_protect_fail (void);
750 const int INSN_NOT_AVAILABLE = -1;
751 static enum machine_mode rs6000_eh_return_filter_mode (void);
753 /* Hash table stuff for keeping track of TOC entries. */
755 struct toc_hash_struct GTY(())
757 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
758 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
760 enum machine_mode key_mode;
764 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
766 /* Default register names. */
767 char rs6000_reg_names[][8] =
769 "0", "1", "2", "3", "4", "5", "6", "7",
770 "8", "9", "10", "11", "12", "13", "14", "15",
771 "16", "17", "18", "19", "20", "21", "22", "23",
772 "24", "25", "26", "27", "28", "29", "30", "31",
773 "0", "1", "2", "3", "4", "5", "6", "7",
774 "8", "9", "10", "11", "12", "13", "14", "15",
775 "16", "17", "18", "19", "20", "21", "22", "23",
776 "24", "25", "26", "27", "28", "29", "30", "31",
777 "mq", "lr", "ctr","ap",
778 "0", "1", "2", "3", "4", "5", "6", "7",
780 /* AltiVec registers. */
781 "0", "1", "2", "3", "4", "5", "6", "7",
782 "8", "9", "10", "11", "12", "13", "14", "15",
783 "16", "17", "18", "19", "20", "21", "22", "23",
784 "24", "25", "26", "27", "28", "29", "30", "31",
787 "spe_acc", "spefscr",
788 /* Soft frame pointer. */
792 #ifdef TARGET_REGNAMES
793 static const char alt_reg_names[][8] =
795 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
796 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
797 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
798 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
799 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
800 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
801 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
802 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
803 "mq", "lr", "ctr", "ap",
804 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
806 /* AltiVec registers. */
807 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
808 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
809 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
810 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
813 "spe_acc", "spefscr",
814 /* Soft frame pointer. */
819 #ifndef MASK_STRICT_ALIGN
820 #define MASK_STRICT_ALIGN 0
822 #ifndef TARGET_PROFILE_KERNEL
823 #define TARGET_PROFILE_KERNEL 0
826 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
827 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
829 /* Initialize the GCC target structure. */
830 #undef TARGET_ATTRIBUTE_TABLE
831 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
832 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
833 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
835 #undef TARGET_ASM_ALIGNED_DI_OP
836 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
838 /* Default unaligned ops are only provided for ELF. Find the ops needed
839 for non-ELF systems. */
840 #ifndef OBJECT_FORMAT_ELF
842 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
844 #undef TARGET_ASM_UNALIGNED_HI_OP
845 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
846 #undef TARGET_ASM_UNALIGNED_SI_OP
847 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
848 #undef TARGET_ASM_UNALIGNED_DI_OP
849 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
852 #undef TARGET_ASM_UNALIGNED_HI_OP
853 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
854 #undef TARGET_ASM_UNALIGNED_SI_OP
855 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
856 #undef TARGET_ASM_UNALIGNED_DI_OP
857 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
858 #undef TARGET_ASM_ALIGNED_DI_OP
859 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
863 /* This hook deals with fixups for relocatable code and DI-mode objects
865 #undef TARGET_ASM_INTEGER
866 #define TARGET_ASM_INTEGER rs6000_assemble_integer
868 #ifdef HAVE_GAS_HIDDEN
869 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
870 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
873 #undef TARGET_HAVE_TLS
874 #define TARGET_HAVE_TLS HAVE_AS_TLS
876 #undef TARGET_CANNOT_FORCE_CONST_MEM
877 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
879 #undef TARGET_ASM_FUNCTION_PROLOGUE
880 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
881 #undef TARGET_ASM_FUNCTION_EPILOGUE
882 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
884 #undef TARGET_SCHED_VARIABLE_ISSUE
885 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
887 #undef TARGET_SCHED_ISSUE_RATE
888 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
889 #undef TARGET_SCHED_ADJUST_COST
890 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
891 #undef TARGET_SCHED_ADJUST_PRIORITY
892 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
893 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
894 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
895 #undef TARGET_SCHED_FINISH
896 #define TARGET_SCHED_FINISH rs6000_sched_finish
898 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
899 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
901 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
902 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
904 #undef TARGET_INIT_BUILTINS
905 #define TARGET_INIT_BUILTINS rs6000_init_builtins
907 #undef TARGET_EXPAND_BUILTIN
908 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
910 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
911 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
913 #undef TARGET_INIT_LIBFUNCS
914 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
917 #undef TARGET_BINDS_LOCAL_P
918 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
921 #undef TARGET_ASM_OUTPUT_MI_THUNK
922 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
924 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
925 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
927 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
928 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
930 #undef TARGET_INVALID_WITHIN_DOLOOP
931 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
933 #undef TARGET_RTX_COSTS
934 #define TARGET_RTX_COSTS rs6000_rtx_costs
935 #undef TARGET_ADDRESS_COST
936 #define TARGET_ADDRESS_COST hook_int_rtx_0
938 #undef TARGET_VECTOR_OPAQUE_P
939 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
941 #undef TARGET_DWARF_REGISTER_SPAN
942 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
944 /* On rs6000, function arguments are promoted, as are function return
946 #undef TARGET_PROMOTE_FUNCTION_ARGS
947 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
948 #undef TARGET_PROMOTE_FUNCTION_RETURN
949 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
951 #undef TARGET_RETURN_IN_MEMORY
952 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
954 #undef TARGET_SETUP_INCOMING_VARARGS
955 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
957 /* Always strict argument naming on rs6000. */
958 #undef TARGET_STRICT_ARGUMENT_NAMING
959 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
960 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
961 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
962 #undef TARGET_SPLIT_COMPLEX_ARG
963 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
964 #undef TARGET_MUST_PASS_IN_STACK
965 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
966 #undef TARGET_PASS_BY_REFERENCE
967 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
968 #undef TARGET_ARG_PARTIAL_BYTES
969 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
971 #undef TARGET_BUILD_BUILTIN_VA_LIST
972 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
974 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
975 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
977 #undef TARGET_EH_RETURN_FILTER_MODE
978 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
980 #undef TARGET_VECTOR_MODE_SUPPORTED_P
981 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
983 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
984 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
986 #undef TARGET_HANDLE_OPTION
987 #define TARGET_HANDLE_OPTION rs6000_handle_option
989 #undef TARGET_DEFAULT_TARGET_FLAGS
990 #define TARGET_DEFAULT_TARGET_FLAGS \
991 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
993 #undef TARGET_STACK_PROTECT_FAIL
994 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
996 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
997 The PowerPC architecture requires only weak consistency among
998 processors--that is, memory accesses between processors need not be
999 sequentially consistent and memory accesses among processors can occur
1000 in any order. The ability to order memory accesses weakly provides
1001 opportunities for more efficient use of the system bus. Unless a
1002 dependency exists, the 604e allows read operations to precede store
1004 #undef TARGET_RELAXED_ORDERING
1005 #define TARGET_RELAXED_ORDERING true
1008 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1009 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1012 struct gcc_target targetm = TARGET_INITIALIZER;
1015 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1018 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1020 /* The GPRs can hold any mode, but values bigger than one register
1021 cannot go past R31. */
1022 if (INT_REGNO_P (regno))
1023 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1025 /* The float registers can only hold floating modes and DImode. */
1026 if (FP_REGNO_P (regno))
1028 (SCALAR_FLOAT_MODE_P (mode)
1029 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1030 || (GET_MODE_CLASS (mode) == MODE_INT
1031 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1033 /* The CR register can only hold CC modes. */
1034 if (CR_REGNO_P (regno))
1035 return GET_MODE_CLASS (mode) == MODE_CC;
1037 if (XER_REGNO_P (regno))
1038 return mode == PSImode;
1040 /* AltiVec only in AldyVec registers. */
1041 if (ALTIVEC_REGNO_P (regno))
1042 return ALTIVEC_VECTOR_MODE (mode);
1044 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1045 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1048 /* We cannot put TImode anywhere except general register and it must be
1049 able to fit within the register set. */
1051 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1054 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1056 rs6000_init_hard_regno_mode_ok (void)
1060 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1061 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1062 if (rs6000_hard_regno_mode_ok (r, m))
1063 rs6000_hard_regno_mode_ok_p[m][r] = true;
1066 /* If not otherwise specified by a target, make 'long double' equivalent to
1069 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1070 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1073 /* Override command line options. Mostly we process the processor
1074 type and sometimes adjust other TARGET_ options. */
1077 rs6000_override_options (const char *default_cpu)
1080 struct rs6000_cpu_select *ptr;
1083 /* Simplifications for entries below. */
1086 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1087 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1090 /* This table occasionally claims that a processor does not support
1091 a particular feature even though it does, but the feature is slower
1092 than the alternative. Thus, it shouldn't be relied on as a
1093 complete description of the processor's support.
1095 Please keep this list in order, and don't forget to update the
1096 documentation in invoke.texi when adding a new processor or
1100 const char *const name; /* Canonical processor name. */
1101 const enum processor_type processor; /* Processor type enum value. */
1102 const int target_enable; /* Target flags to enable. */
1103 } const processor_target_table[]
1104 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1105 {"403", PROCESSOR_PPC403,
1106 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1107 {"405", PROCESSOR_PPC405,
1108 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1109 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_MULHW},
1110 {"440", PROCESSOR_PPC440,
1111 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1112 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_MULHW},
1113 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1114 {"601", PROCESSOR_PPC601,
1115 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1116 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1117 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1118 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1119 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1120 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1121 {"620", PROCESSOR_PPC620,
1122 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1123 {"630", PROCESSOR_PPC630,
1124 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1125 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1126 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1127 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1128 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1129 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1130 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1131 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1132 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1133 /* 8548 has a dummy entry for now. */
1134 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1135 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1136 {"970", PROCESSOR_POWER4,
1137 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1138 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1139 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1140 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1141 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1142 {"G5", PROCESSOR_POWER4,
1143 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1144 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1145 {"power2", PROCESSOR_POWER,
1146 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1147 {"power3", PROCESSOR_PPC630,
1148 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1149 {"power4", PROCESSOR_POWER4,
1150 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1151 {"power5", PROCESSOR_POWER5,
1152 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1153 | MASK_MFCRF | MASK_POPCNTB},
1154 {"power5+", PROCESSOR_POWER5,
1155 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1156 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1157 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1158 {"powerpc64", PROCESSOR_POWERPC64,
1159 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1160 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1161 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1162 {"rios2", PROCESSOR_RIOS2,
1163 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1164 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1165 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1166 {"rs64", PROCESSOR_RS64A,
1167 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1170 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1172 /* Some OSs don't support saving the high part of 64-bit registers on
1173 context switch. Other OSs don't support saving Altivec registers.
1174 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1175 settings; if the user wants either, the user must explicitly specify
1176 them and we won't interfere with the user's specification. */
1179 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1180 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1181 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1182 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW)
1185 rs6000_init_hard_regno_mode_ok ();
1187 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1188 #ifdef OS_MISSING_POWERPC64
1189 if (OS_MISSING_POWERPC64)
1190 set_masks &= ~MASK_POWERPC64;
1192 #ifdef OS_MISSING_ALTIVEC
1193 if (OS_MISSING_ALTIVEC)
1194 set_masks &= ~MASK_ALTIVEC;
1197 /* Don't override by the processor default if given explicitly. */
1198 set_masks &= ~target_flags_explicit;
1200 /* Identify the processor type. */
1201 rs6000_select[0].string = default_cpu;
1202 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1204 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1206 ptr = &rs6000_select[i];
1207 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1209 for (j = 0; j < ptt_size; j++)
1210 if (! strcmp (ptr->string, processor_target_table[j].name))
1212 if (ptr->set_tune_p)
1213 rs6000_cpu = processor_target_table[j].processor;
1215 if (ptr->set_arch_p)
1217 target_flags &= ~set_masks;
1218 target_flags |= (processor_target_table[j].target_enable
1225 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1232 /* If we are optimizing big endian systems for space, use the load/store
1233 multiple and string instructions. */
1234 if (BYTES_BIG_ENDIAN && optimize_size)
1235 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1237 /* Don't allow -mmultiple or -mstring on little endian systems
1238 unless the cpu is a 750, because the hardware doesn't support the
1239 instructions used in little endian mode, and causes an alignment
1240 trap. The 750 does not cause an alignment trap (except when the
1241 target is unaligned). */
1243 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1245 if (TARGET_MULTIPLE)
1247 target_flags &= ~MASK_MULTIPLE;
1248 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1249 warning (0, "-mmultiple is not supported on little endian systems");
1254 target_flags &= ~MASK_STRING;
1255 if ((target_flags_explicit & MASK_STRING) != 0)
1256 warning (0, "-mstring is not supported on little endian systems");
1260 /* Set debug flags */
1261 if (rs6000_debug_name)
1263 if (! strcmp (rs6000_debug_name, "all"))
1264 rs6000_debug_stack = rs6000_debug_arg = 1;
1265 else if (! strcmp (rs6000_debug_name, "stack"))
1266 rs6000_debug_stack = 1;
1267 else if (! strcmp (rs6000_debug_name, "arg"))
1268 rs6000_debug_arg = 1;
1270 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1273 if (rs6000_traceback_name)
1275 if (! strncmp (rs6000_traceback_name, "full", 4))
1276 rs6000_traceback = traceback_full;
1277 else if (! strncmp (rs6000_traceback_name, "part", 4))
1278 rs6000_traceback = traceback_part;
1279 else if (! strncmp (rs6000_traceback_name, "no", 2))
1280 rs6000_traceback = traceback_none;
1282 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1283 rs6000_traceback_name);
1286 if (!rs6000_explicit_options.long_double)
1287 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1289 /* Set Altivec ABI as default for powerpc64 linux. */
1290 if (TARGET_ELF && TARGET_64BIT)
1292 rs6000_altivec_abi = 1;
1293 TARGET_ALTIVEC_VRSAVE = 1;
1296 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1297 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1299 rs6000_darwin64_abi = 1;
1301 darwin_one_byte_bool = 1;
1303 /* Default to natural alignment, for better performance. */
1304 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1307 /* Handle -mtls-size option. */
1308 rs6000_parse_tls_size_option ();
1310 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1311 SUBTARGET_OVERRIDE_OPTIONS;
1313 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1314 SUBSUBTARGET_OVERRIDE_OPTIONS;
1316 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1317 SUB3TARGET_OVERRIDE_OPTIONS;
1323 error ("AltiVec and E500 instructions cannot coexist");
1325 /* The e500 does not have string instructions, and we set
1326 MASK_STRING above when optimizing for size. */
1327 if ((target_flags & MASK_STRING) != 0)
1328 target_flags = target_flags & ~MASK_STRING;
1330 else if (rs6000_select[1].string != NULL)
1332 /* For the powerpc-eabispe configuration, we set all these by
1333 default, so let's unset them if we manually set another
1334 CPU that is not the E500. */
1335 if (!rs6000_explicit_options.abi)
1337 if (!rs6000_explicit_options.spe)
1339 if (!rs6000_explicit_options.float_gprs)
1340 rs6000_float_gprs = 0;
1341 if (!rs6000_explicit_options.isel)
1343 if (!rs6000_explicit_options.long_double)
1344 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1347 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1348 && rs6000_cpu != PROCESSOR_POWER5);
1349 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1350 || rs6000_cpu == PROCESSOR_POWER5);
1352 rs6000_sched_restricted_insns_priority
1353 = (rs6000_sched_groups ? 1 : 0);
1355 /* Handle -msched-costly-dep option. */
1356 rs6000_sched_costly_dep
1357 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1359 if (rs6000_sched_costly_dep_str)
1361 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1362 rs6000_sched_costly_dep = no_dep_costly;
1363 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1364 rs6000_sched_costly_dep = all_deps_costly;
1365 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1366 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1367 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1368 rs6000_sched_costly_dep = store_to_load_dep_costly;
1370 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1373 /* Handle -minsert-sched-nops option. */
1374 rs6000_sched_insert_nops
1375 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1377 if (rs6000_sched_insert_nops_str)
1379 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1380 rs6000_sched_insert_nops = sched_finish_none;
1381 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1382 rs6000_sched_insert_nops = sched_finish_pad_groups;
1383 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1384 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1386 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1389 #ifdef TARGET_REGNAMES
1390 /* If the user desires alternate register names, copy in the
1391 alternate names now. */
1392 if (TARGET_REGNAMES)
1393 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1396 /* Set aix_struct_return last, after the ABI is determined.
1397 If -maix-struct-return or -msvr4-struct-return was explicitly
1398 used, don't override with the ABI default. */
1399 if (!rs6000_explicit_options.aix_struct_ret)
1400 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1402 if (TARGET_LONG_DOUBLE_128
1403 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1404 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1407 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1409 /* We can only guarantee the availability of DI pseudo-ops when
1410 assembling for 64-bit targets. */
1413 targetm.asm_out.aligned_op.di = NULL;
1414 targetm.asm_out.unaligned_op.di = NULL;
1417 /* Set branch target alignment, if not optimizing for size. */
1420 if (rs6000_sched_groups)
1422 if (align_functions <= 0)
1423 align_functions = 16;
1424 if (align_jumps <= 0)
1426 if (align_loops <= 0)
1429 if (align_jumps_max_skip <= 0)
1430 align_jumps_max_skip = 15;
1431 if (align_loops_max_skip <= 0)
1432 align_loops_max_skip = 15;
1435 /* Arrange to save and restore machine status around nested functions. */
1436 init_machine_status = rs6000_init_machine_status;
1438 /* We should always be splitting complex arguments, but we can't break
1439 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1440 if (DEFAULT_ABI != ABI_AIX)
1441 targetm.calls.split_complex_arg = NULL;
1443 /* Initialize rs6000_cost with the appropriate target costs. */
1445 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1449 case PROCESSOR_RIOS1:
1450 rs6000_cost = &rios1_cost;
1453 case PROCESSOR_RIOS2:
1454 rs6000_cost = &rios2_cost;
1457 case PROCESSOR_RS64A:
1458 rs6000_cost = &rs64a_cost;
1461 case PROCESSOR_MPCCORE:
1462 rs6000_cost = &mpccore_cost;
1465 case PROCESSOR_PPC403:
1466 rs6000_cost = &ppc403_cost;
1469 case PROCESSOR_PPC405:
1470 rs6000_cost = &ppc405_cost;
1473 case PROCESSOR_PPC440:
1474 rs6000_cost = &ppc440_cost;
1477 case PROCESSOR_PPC601:
1478 rs6000_cost = &ppc601_cost;
1481 case PROCESSOR_PPC603:
1482 rs6000_cost = &ppc603_cost;
1485 case PROCESSOR_PPC604:
1486 rs6000_cost = &ppc604_cost;
1489 case PROCESSOR_PPC604e:
1490 rs6000_cost = &ppc604e_cost;
1493 case PROCESSOR_PPC620:
1494 rs6000_cost = &ppc620_cost;
1497 case PROCESSOR_PPC630:
1498 rs6000_cost = &ppc630_cost;
1501 case PROCESSOR_PPC750:
1502 case PROCESSOR_PPC7400:
1503 rs6000_cost = &ppc750_cost;
1506 case PROCESSOR_PPC7450:
1507 rs6000_cost = &ppc7450_cost;
1510 case PROCESSOR_PPC8540:
1511 rs6000_cost = &ppc8540_cost;
1514 case PROCESSOR_POWER4:
1515 case PROCESSOR_POWER5:
1516 rs6000_cost = &power4_cost;
1524 /* Implement targetm.vectorize.builtin_mask_for_load. */
1526 rs6000_builtin_mask_for_load (void)
1529 return altivec_builtin_mask_for_load;
1534 /* Handle generic options of the form -mfoo=yes/no.
1535 NAME is the option name.
1536 VALUE is the option value.
1537 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1538 whether the option value is 'yes' or 'no' respectively. */
1540 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1544 else if (!strcmp (value, "yes"))
1546 else if (!strcmp (value, "no"))
1549 error ("unknown -m%s= option specified: '%s'", name, value);
1552 /* Validate and record the size specified with the -mtls-size option. */
1555 rs6000_parse_tls_size_option (void)
1557 if (rs6000_tls_size_string == 0)
1559 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1560 rs6000_tls_size = 16;
1561 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1562 rs6000_tls_size = 32;
1563 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1564 rs6000_tls_size = 64;
1566 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1570 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1572 if (DEFAULT_ABI == ABI_DARWIN)
1573 /* The Darwin libraries never set errno, so we might as well
1574 avoid calling them when that's the only reason we would. */
1575 flag_errno_math = 0;
1577 /* Double growth factor to counter reduced min jump length. */
1578 set_param_value ("max-grow-copy-bb-insns", 16);
1581 /* Implement TARGET_HANDLE_OPTION. */
1584 rs6000_handle_option (size_t code, const char *arg, int value)
1589 target_flags &= ~(MASK_POWER | MASK_POWER2
1590 | MASK_MULTIPLE | MASK_STRING);
1591 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1592 | MASK_MULTIPLE | MASK_STRING);
1594 case OPT_mno_powerpc:
1595 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1596 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1597 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1598 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1601 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1602 | MASK_NO_SUM_IN_TOC);
1603 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1604 | MASK_NO_SUM_IN_TOC);
1605 #ifdef TARGET_USES_SYSV4_OPT
1606 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1607 just the same as -mminimal-toc. */
1608 target_flags |= MASK_MINIMAL_TOC;
1609 target_flags_explicit |= MASK_MINIMAL_TOC;
1613 #ifdef TARGET_USES_SYSV4_OPT
1615 /* Make -mtoc behave like -mminimal-toc. */
1616 target_flags |= MASK_MINIMAL_TOC;
1617 target_flags_explicit |= MASK_MINIMAL_TOC;
1621 #ifdef TARGET_USES_AIX64_OPT
1626 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1627 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1628 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1631 #ifdef TARGET_USES_AIX64_OPT
1636 target_flags &= ~MASK_POWERPC64;
1637 target_flags_explicit |= MASK_POWERPC64;
1640 case OPT_minsert_sched_nops_:
1641 rs6000_sched_insert_nops_str = arg;
1644 case OPT_mminimal_toc:
1647 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1648 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1655 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1656 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1663 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1664 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1668 case OPT_mpowerpc_gpopt:
1669 case OPT_mpowerpc_gfxopt:
1672 target_flags |= MASK_POWERPC;
1673 target_flags_explicit |= MASK_POWERPC;
1677 case OPT_maix_struct_return:
1678 case OPT_msvr4_struct_return:
1679 rs6000_explicit_options.aix_struct_ret = true;
1683 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1687 rs6000_explicit_options.isel = true;
1688 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1692 rs6000_explicit_options.spe = true;
1693 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1694 /* No SPE means 64-bit long doubles, even if an E500. */
1696 rs6000_long_double_type_size = 64;
1700 rs6000_debug_name = arg;
1703 #ifdef TARGET_USES_SYSV4_OPT
1705 rs6000_abi_name = arg;
1709 rs6000_sdata_name = arg;
1712 case OPT_mtls_size_:
1713 rs6000_tls_size_string = arg;
1716 case OPT_mrelocatable:
1719 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1720 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1724 case OPT_mrelocatable_lib:
1727 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1728 | MASK_NO_FP_IN_TOC;
1729 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1730 | MASK_NO_FP_IN_TOC;
1734 target_flags &= ~MASK_RELOCATABLE;
1735 target_flags_explicit |= MASK_RELOCATABLE;
1741 rs6000_explicit_options.abi = true;
1742 if (!strcmp (arg, "altivec"))
1744 rs6000_altivec_abi = 1;
1747 else if (! strcmp (arg, "no-altivec"))
1748 rs6000_altivec_abi = 0;
1749 else if (! strcmp (arg, "spe"))
1752 rs6000_altivec_abi = 0;
1753 if (!TARGET_SPE_ABI)
1754 error ("not configured for ABI: '%s'", arg);
1756 else if (! strcmp (arg, "no-spe"))
1759 /* These are here for testing during development only, do not
1760 document in the manual please. */
1761 else if (! strcmp (arg, "d64"))
1763 rs6000_darwin64_abi = 1;
1764 warning (0, "Using darwin64 ABI");
1766 else if (! strcmp (arg, "d32"))
1768 rs6000_darwin64_abi = 0;
1769 warning (0, "Using old darwin ABI");
1774 error ("unknown ABI specified: '%s'", arg);
1780 rs6000_select[1].string = arg;
1784 rs6000_select[2].string = arg;
1787 case OPT_mtraceback_:
1788 rs6000_traceback_name = arg;
1791 case OPT_mfloat_gprs_:
1792 rs6000_explicit_options.float_gprs = true;
1793 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1794 rs6000_float_gprs = 1;
1795 else if (! strcmp (arg, "double"))
1796 rs6000_float_gprs = 2;
1797 else if (! strcmp (arg, "no"))
1798 rs6000_float_gprs = 0;
1801 error ("invalid option for -mfloat-gprs: '%s'", arg);
1806 case OPT_mlong_double_:
1807 rs6000_explicit_options.long_double = true;
1808 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1809 if (value != 64 && value != 128)
1811 error ("Unknown switch -mlong-double-%s", arg);
1812 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1816 rs6000_long_double_type_size = value;
1819 case OPT_msched_costly_dep_:
1820 rs6000_sched_costly_dep_str = arg;
1824 rs6000_explicit_options.alignment = true;
1825 if (! strcmp (arg, "power"))
1827 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1828 some C library functions, so warn about it. The flag may be
1829 useful for performance studies from time to time though, so
1830 don't disable it entirely. */
1831 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1832 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1833 " it is incompatible with the installed C and C++ libraries");
1834 rs6000_alignment_flags = MASK_ALIGN_POWER;
1836 else if (! strcmp (arg, "natural"))
1837 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1840 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1848 /* Do anything needed at the start of the asm file. */
1851 rs6000_file_start (void)
1855 const char *start = buffer;
1856 struct rs6000_cpu_select *ptr;
1857 const char *default_cpu = TARGET_CPU_DEFAULT;
1858 FILE *file = asm_out_file;
1860 default_file_start ();
1862 #ifdef TARGET_BI_ARCH
1863 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1867 if (flag_verbose_asm)
1869 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1870 rs6000_select[0].string = default_cpu;
1872 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1874 ptr = &rs6000_select[i];
1875 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1877 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1882 if (PPC405_ERRATUM77)
1884 fprintf (file, "%s PPC405CR_ERRATUM77", start);
1888 #ifdef USING_ELFOS_H
1889 switch (rs6000_sdata)
1891 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1892 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1893 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1894 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1897 if (rs6000_sdata && g_switch_value)
1899 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1909 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1917 /* Return nonzero if this function is known to have a null epilogue. */
1920 direct_return (void)
1922 if (reload_completed)
1924 rs6000_stack_t *info = rs6000_stack_info ();
1926 if (info->first_gp_reg_save == 32
1927 && info->first_fp_reg_save == 64
1928 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1929 && ! info->lr_save_p
1930 && ! info->cr_save_p
1931 && info->vrsave_mask == 0
1939 /* Return the number of instructions it takes to form a constant in an
1940 integer register. */
1943 num_insns_constant_wide (HOST_WIDE_INT value)
1945 /* signed constant loadable with {cal|addi} */
1946 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1949 /* constant loadable with {cau|addis} */
1950 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1953 #if HOST_BITS_PER_WIDE_INT == 64
1954 else if (TARGET_POWERPC64)
1956 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1957 HOST_WIDE_INT high = value >> 31;
1959 if (high == 0 || high == -1)
1965 return num_insns_constant_wide (high) + 1;
1967 return (num_insns_constant_wide (high)
1968 + num_insns_constant_wide (low) + 1);
1977 num_insns_constant (rtx op, enum machine_mode mode)
1979 HOST_WIDE_INT low, high;
1981 switch (GET_CODE (op))
1984 #if HOST_BITS_PER_WIDE_INT == 64
1985 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1986 && mask64_operand (op, mode))
1990 return num_insns_constant_wide (INTVAL (op));
1998 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1999 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2000 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2003 if (mode == VOIDmode || mode == DImode)
2005 high = CONST_DOUBLE_HIGH (op);
2006 low = CONST_DOUBLE_LOW (op);
2013 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2014 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2015 high = l[WORDS_BIG_ENDIAN == 0];
2016 low = l[WORDS_BIG_ENDIAN != 0];
2020 return (num_insns_constant_wide (low)
2021 + num_insns_constant_wide (high));
2024 if ((high == 0 && low >= 0)
2025 || (high == -1 && low < 0))
2026 return num_insns_constant_wide (low);
2028 else if (mask64_operand (op, mode))
2032 return num_insns_constant_wide (high) + 1;
2035 return (num_insns_constant_wide (high)
2036 + num_insns_constant_wide (low) + 1);
2045 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2046 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2047 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2048 all items are set to the same value and contain COPIES replicas of the
2049 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2050 operand and the others are set to the value of the operand's msb. */
2053 vspltis_constant (rtx op, unsigned step, unsigned copies)
2055 enum machine_mode mode = GET_MODE (op);
2056 enum machine_mode inner = GET_MODE_INNER (mode);
2059 unsigned nunits = GET_MODE_NUNITS (mode);
2060 unsigned bitsize = GET_MODE_BITSIZE (inner);
2061 unsigned mask = GET_MODE_MASK (inner);
2063 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2064 HOST_WIDE_INT val = INTVAL (last);
2065 HOST_WIDE_INT splat_val = val;
2066 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2068 /* Construct the value to be splatted, if possible. If not, return 0. */
2069 for (i = 2; i <= copies; i *= 2)
2071 HOST_WIDE_INT small_val;
2073 small_val = splat_val >> bitsize;
2075 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2077 splat_val = small_val;
2080 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2081 if (EASY_VECTOR_15 (splat_val))
2084 /* Also check if we can splat, and then add the result to itself. Do so if
2085 the value is positive, of if the splat instruction is using OP's mode;
2086 for splat_val < 0, the splat and the add should use the same mode. */
2087 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2088 && (splat_val >= 0 || (step == 1 && copies == 1)))
2094 /* Check if VAL is present in every STEP-th element, and the
2095 other elements are filled with its most significant bit. */
2096 for (i = 0; i < nunits - 1; ++i)
2098 HOST_WIDE_INT desired_val;
2099 if (((i + 1) & (step - 1)) == 0)
2102 desired_val = msb_val;
2104 if (desired_val != INTVAL (CONST_VECTOR_ELT (op, i)))
2112 /* Return true if OP is of the given MODE and can be synthesized
2113 with a vspltisb, vspltish or vspltisw. */
2116 easy_altivec_constant (rtx op, enum machine_mode mode)
2118 unsigned step, copies;
2120 if (mode == VOIDmode)
2121 mode = GET_MODE (op);
2122 else if (mode != GET_MODE (op))
2125 /* Start with a vspltisw. */
2126 step = GET_MODE_NUNITS (mode) / 4;
2129 if (vspltis_constant (op, step, copies))
2132 /* Then try with a vspltish. */
2138 if (vspltis_constant (op, step, copies))
2141 /* And finally a vspltisb. */
2147 if (vspltis_constant (op, step, copies))
2153 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2154 result is OP. Abort if it is not possible. */
2157 gen_easy_altivec_constant (rtx op)
2159 enum machine_mode mode = GET_MODE (op);
2160 int nunits = GET_MODE_NUNITS (mode);
2161 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2162 unsigned step = nunits / 4;
2163 unsigned copies = 1;
2165 /* Start with a vspltisw. */
2166 if (vspltis_constant (op, step, copies))
2167 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2169 /* Then try with a vspltish. */
2175 if (vspltis_constant (op, step, copies))
2176 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2178 /* And finally a vspltisb. */
2184 if (vspltis_constant (op, step, copies))
2185 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2191 output_vec_const_move (rtx *operands)
2194 enum machine_mode mode;
2199 mode = GET_MODE (dest);
2204 if (zero_constant (vec, mode))
2205 return "vxor %0,%0,%0";
2207 splat_vec = gen_easy_altivec_constant (vec);
2208 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2209 operands[1] = XEXP (splat_vec, 0);
2210 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2213 switch (GET_MODE (splat_vec))
2216 return "vspltisw %0,%1";
2219 return "vspltish %0,%1";
2222 return "vspltisb %0,%1";
2229 gcc_assert (TARGET_SPE);
2231 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2232 pattern of V1DI, V4HI, and V2SF.
2234 FIXME: We should probably return # and add post reload
2235 splitters for these, but this way is so easy ;-). */
2236 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2237 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2238 operands[1] = CONST_VECTOR_ELT (vec, 0);
2239 operands[2] = CONST_VECTOR_ELT (vec, 1);
2241 return "li %0,%1\n\tevmergelo %0,%0,%0";
2243 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2246 /* Initialize vector TARGET to VALS. */
2249 rs6000_expand_vector_init (rtx target, rtx vals)
2251 enum machine_mode mode = GET_MODE (target);
2252 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2253 int n_elts = GET_MODE_NUNITS (mode);
2254 int n_var = 0, one_var = -1;
2255 bool all_same = true, all_const_zero = true;
2259 for (i = 0; i < n_elts; ++i)
2261 x = XVECEXP (vals, 0, i);
2262 if (!CONSTANT_P (x))
2263 ++n_var, one_var = i;
2264 else if (x != CONST0_RTX (inner_mode))
2265 all_const_zero = false;
2267 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2273 if (mode != V4SFmode && all_const_zero)
2275 /* Zero register. */
2276 emit_insn (gen_rtx_SET (VOIDmode, target,
2277 gen_rtx_XOR (mode, target, target)));
2280 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2282 /* Splat immediate. */
2283 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2287 ; /* Splat vector element. */
2290 /* Load from constant pool. */
2291 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2296 /* Store value to stack temp. Load vector element. Splat. */
2299 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2300 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2301 XVECEXP (vals, 0, 0));
2302 x = gen_rtx_UNSPEC (VOIDmode,
2303 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2304 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2306 gen_rtx_SET (VOIDmode,
2309 x = gen_rtx_VEC_SELECT (inner_mode, target,
2310 gen_rtx_PARALLEL (VOIDmode,
2311 gen_rtvec (1, const0_rtx)));
2312 emit_insn (gen_rtx_SET (VOIDmode, target,
2313 gen_rtx_VEC_DUPLICATE (mode, x)));
2317 /* One field is non-constant. Load constant then overwrite
2321 rtx copy = copy_rtx (vals);
2323 /* Load constant part of vector, substitute neighboring value for
2325 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2326 rs6000_expand_vector_init (target, copy);
2328 /* Insert variable. */
2329 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2333 /* Construct the vector in memory one field at a time
2334 and load the whole vector. */
2335 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2336 for (i = 0; i < n_elts; i++)
2337 emit_move_insn (adjust_address_nv (mem, inner_mode,
2338 i * GET_MODE_SIZE (inner_mode)),
2339 XVECEXP (vals, 0, i));
2340 emit_move_insn (target, mem);
2343 /* Set field ELT of TARGET to VAL. */
2346 rs6000_expand_vector_set (rtx target, rtx val, int elt)
2348 enum machine_mode mode = GET_MODE (target);
2349 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2350 rtx reg = gen_reg_rtx (mode);
2352 int width = GET_MODE_SIZE (inner_mode);
2355 /* Load single variable value. */
2356 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2357 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2358 x = gen_rtx_UNSPEC (VOIDmode,
2359 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2360 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2362 gen_rtx_SET (VOIDmode,
2366 /* Linear sequence. */
2367 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2368 for (i = 0; i < 16; ++i)
2369 XVECEXP (mask, 0, i) = GEN_INT (i);
2371 /* Set permute mask to insert element into target. */
2372 for (i = 0; i < width; ++i)
2373 XVECEXP (mask, 0, elt*width + i)
2374 = GEN_INT (i + 0x10);
2375 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2376 x = gen_rtx_UNSPEC (mode,
2377 gen_rtvec (3, target, reg,
2378 force_reg (V16QImode, x)),
2380 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2383 /* Extract field ELT from VEC into TARGET. */
2386 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2388 enum machine_mode mode = GET_MODE (vec);
2389 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2392 /* Allocate mode-sized buffer. */
2393 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2395 /* Add offset to field within buffer matching vector element. */
2396 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2398 /* Store single field into mode-sized buffer. */
2399 x = gen_rtx_UNSPEC (VOIDmode,
2400 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2401 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2403 gen_rtx_SET (VOIDmode,
2406 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2409 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2410 implement ANDing by the mask IN. */
2412 build_mask64_2_operands (rtx in, rtx *out)
2414 #if HOST_BITS_PER_WIDE_INT >= 64
2415 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2418 gcc_assert (GET_CODE (in) == CONST_INT);
2423 /* Assume c initially something like 0x00fff000000fffff. The idea
2424 is to rotate the word so that the middle ^^^^^^ group of zeros
2425 is at the MS end and can be cleared with an rldicl mask. We then
2426 rotate back and clear off the MS ^^ group of zeros with a
2428 c = ~c; /* c == 0xff000ffffff00000 */
2429 lsb = c & -c; /* lsb == 0x0000000000100000 */
2430 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2431 c = ~c; /* c == 0x00fff000000fffff */
2432 c &= -lsb; /* c == 0x00fff00000000000 */
2433 lsb = c & -c; /* lsb == 0x0000100000000000 */
2434 c = ~c; /* c == 0xff000fffffffffff */
2435 c &= -lsb; /* c == 0xff00000000000000 */
2437 while ((lsb >>= 1) != 0)
2438 shift++; /* shift == 44 on exit from loop */
2439 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2440 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2441 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2445 /* Assume c initially something like 0xff000f0000000000. The idea
2446 is to rotate the word so that the ^^^ middle group of zeros
2447 is at the LS end and can be cleared with an rldicr mask. We then
2448 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2450 lsb = c & -c; /* lsb == 0x0000010000000000 */
2451 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2452 c = ~c; /* c == 0x00fff0ffffffffff */
2453 c &= -lsb; /* c == 0x00fff00000000000 */
2454 lsb = c & -c; /* lsb == 0x0000100000000000 */
2455 c = ~c; /* c == 0xff000fffffffffff */
2456 c &= -lsb; /* c == 0xff00000000000000 */
2458 while ((lsb >>= 1) != 0)
2459 shift++; /* shift == 44 on exit from loop */
2460 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2461 m1 >>= shift; /* m1 == 0x0000000000000fff */
2462 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2465 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2466 masks will be all 1's. We are guaranteed more than one transition. */
2467 out[0] = GEN_INT (64 - shift);
2468 out[1] = GEN_INT (m1);
2469 out[2] = GEN_INT (shift);
2470 out[3] = GEN_INT (m2);
2478 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2481 invalid_e500_subreg (rtx op, enum machine_mode mode)
2483 /* Reject (subreg:SI (reg:DF)). */
2484 if (GET_CODE (op) == SUBREG
2486 && REG_P (SUBREG_REG (op))
2487 && GET_MODE (SUBREG_REG (op)) == DFmode)
2490 /* Reject (subreg:DF (reg:DI)). */
2491 if (GET_CODE (op) == SUBREG
2493 && REG_P (SUBREG_REG (op))
2494 && GET_MODE (SUBREG_REG (op)) == DImode)
2500 /* Darwin, AIX increases natural record alignment to doubleword if the first
2501 field is an FP double while the FP fields remain word aligned. */
2504 rs6000_special_round_type_align (tree type, int computed, int specified)
2506 tree field = TYPE_FIELDS (type);
2508 /* Skip all non field decls */
2509 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2510 field = TREE_CHAIN (field);
2512 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2513 return MAX (computed, specified);
2515 return MAX (MAX (computed, specified), 64);
2518 /* Return 1 for an operand in small memory on V.4/eabi. */
2521 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2522 enum machine_mode mode ATTRIBUTE_UNUSED)
2527 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2530 if (DEFAULT_ABI != ABI_V4)
2533 if (GET_CODE (op) == SYMBOL_REF)
2536 else if (GET_CODE (op) != CONST
2537 || GET_CODE (XEXP (op, 0)) != PLUS
2538 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2539 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2544 rtx sum = XEXP (op, 0);
2545 HOST_WIDE_INT summand;
2547 /* We have to be careful here, because it is the referenced address
2548 that must be 32k from _SDA_BASE_, not just the symbol. */
2549 summand = INTVAL (XEXP (sum, 1));
2550 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2553 sym_ref = XEXP (sum, 0);
2556 return SYMBOL_REF_SMALL_P (sym_ref);
2562 /* Return true if either operand is a general purpose register. */
2565 gpr_or_gpr_p (rtx op0, rtx op1)
2567 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2568 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2572 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2575 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2577 switch (GET_CODE (op))
2580 if (RS6000_SYMBOL_REF_TLS_P (op))
2582 else if (CONSTANT_POOL_ADDRESS_P (op))
2584 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2592 else if (! strcmp (XSTR (op, 0), toc_label_name))
2601 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2602 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2604 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2613 constant_pool_expr_p (rtx op)
2617 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2621 toc_relative_expr_p (rtx op)
2625 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2629 legitimate_constant_pool_address_p (rtx x)
2632 && GET_CODE (x) == PLUS
2633 && GET_CODE (XEXP (x, 0)) == REG
2634 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2635 && constant_pool_expr_p (XEXP (x, 1)));
2639 rs6000_legitimate_small_data_p (enum machine_mode mode, rtx x)
2641 return (DEFAULT_ABI == ABI_V4
2642 && !flag_pic && !TARGET_TOC
2643 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2644 && small_data_operand (x, mode));
2647 /* SPE offset addressing is limited to 5-bits worth of double words. */
2648 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2651 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2653 unsigned HOST_WIDE_INT offset, extra;
2655 if (GET_CODE (x) != PLUS)
2657 if (GET_CODE (XEXP (x, 0)) != REG)
2659 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2661 if (legitimate_constant_pool_address_p (x))
2663 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2666 offset = INTVAL (XEXP (x, 1));
2674 /* AltiVec vector modes. Only reg+reg addressing is valid and
2675 constant offset zero should not occur due to canonicalization.
2676 Allow any offset when not strict before reload. */
2683 /* SPE vector modes. */
2684 return SPE_CONST_OFFSET_OK (offset);
2687 if (TARGET_E500_DOUBLE)
2688 return SPE_CONST_OFFSET_OK (offset);
2691 /* On e500v2, we may have:
2693 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2695 Which gets addressed with evldd instructions. */
2696 if (TARGET_E500_DOUBLE)
2697 return SPE_CONST_OFFSET_OK (offset);
2699 if (mode == DFmode || !TARGET_POWERPC64)
2701 else if (offset & 3)
2707 if (mode == TFmode || !TARGET_POWERPC64)
2709 else if (offset & 3)
2720 return (offset < 0x10000) && (offset + extra < 0x10000);
2724 legitimate_indexed_address_p (rtx x, int strict)
2728 if (GET_CODE (x) != PLUS)
2734 if (!REG_P (op0) || !REG_P (op1))
2737 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2738 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2739 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2740 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2744 legitimate_indirect_address_p (rtx x, int strict)
2746 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2750 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2752 if (!TARGET_MACHO || !flag_pic
2753 || mode != SImode || GET_CODE (x) != MEM)
2757 if (GET_CODE (x) != LO_SUM)
2759 if (GET_CODE (XEXP (x, 0)) != REG)
2761 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2765 return CONSTANT_P (x);
2769 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2771 if (GET_CODE (x) != LO_SUM)
2773 if (GET_CODE (XEXP (x, 0)) != REG)
2775 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2777 /* Restrict addressing for DI because of our SUBREG hackery. */
2778 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2782 if (TARGET_ELF || TARGET_MACHO)
2784 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2788 if (GET_MODE_NUNITS (mode) != 1)
2790 if (GET_MODE_BITSIZE (mode) > 64
2791 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2792 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2795 return CONSTANT_P (x);
2802 /* Try machine-dependent ways of modifying an illegitimate address
2803 to be legitimate. If we find one, return the new, valid address.
2804 This is used from only one place: `memory_address' in explow.c.
2806 OLDX is the address as it was before break_out_memory_refs was
2807 called. In some cases it is useful to look at this to decide what
2810 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2812 It is always safe for this function to do nothing. It exists to
2813 recognize opportunities to optimize the output.
2815 On RS/6000, first check for the sum of a register with a constant
2816 integer that is out of range. If so, generate code to add the
2817 constant with the low-order 16 bits masked to the register and force
2818 this result into another register (this can be done with `cau').
2819 Then generate an address of REG+(CONST&0xffff), allowing for the
2820 possibility of bit 16 being a one.
2822 Then check for the sum of a register and something not constant, try to
2823 load the other things into a register and return the sum. */
2826 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2827 enum machine_mode mode)
2829 if (GET_CODE (x) == SYMBOL_REF)
2831 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2833 return rs6000_legitimize_tls_address (x, model);
2836 if (GET_CODE (x) == PLUS
2837 && GET_CODE (XEXP (x, 0)) == REG
2838 && GET_CODE (XEXP (x, 1)) == CONST_INT
2839 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2841 HOST_WIDE_INT high_int, low_int;
2843 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2844 high_int = INTVAL (XEXP (x, 1)) - low_int;
2845 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2846 GEN_INT (high_int)), 0);
2847 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2849 else if (GET_CODE (x) == PLUS
2850 && GET_CODE (XEXP (x, 0)) == REG
2851 && GET_CODE (XEXP (x, 1)) != CONST_INT
2852 && GET_MODE_NUNITS (mode) == 1
2853 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2855 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2857 && (TARGET_POWERPC64 || mode != DImode)
2860 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2861 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2863 else if (ALTIVEC_VECTOR_MODE (mode))
2867 /* Make sure both operands are registers. */
2868 if (GET_CODE (x) == PLUS)
2869 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2870 force_reg (Pmode, XEXP (x, 1)));
2872 reg = force_reg (Pmode, x);
2875 else if (SPE_VECTOR_MODE (mode)
2876 || (TARGET_E500_DOUBLE && (mode == DFmode
2877 || mode == DImode)))
2881 /* We accept [reg + reg] and [reg + OFFSET]. */
2883 if (GET_CODE (x) == PLUS)
2885 rtx op1 = XEXP (x, 0);
2886 rtx op2 = XEXP (x, 1);
2888 op1 = force_reg (Pmode, op1);
2890 if (GET_CODE (op2) != REG
2891 && (GET_CODE (op2) != CONST_INT
2892 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2893 op2 = force_reg (Pmode, op2);
2895 return gen_rtx_PLUS (Pmode, op1, op2);
2898 return force_reg (Pmode, x);
2904 && GET_CODE (x) != CONST_INT
2905 && GET_CODE (x) != CONST_DOUBLE
2907 && GET_MODE_NUNITS (mode) == 1
2908 && (GET_MODE_BITSIZE (mode) <= 32
2909 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2911 rtx reg = gen_reg_rtx (Pmode);
2912 emit_insn (gen_elf_high (reg, x));
2913 return gen_rtx_LO_SUM (Pmode, reg, x);
2915 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2918 && ! MACHO_DYNAMIC_NO_PIC_P
2920 && GET_CODE (x) != CONST_INT
2921 && GET_CODE (x) != CONST_DOUBLE
2923 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2927 rtx reg = gen_reg_rtx (Pmode);
2928 emit_insn (gen_macho_high (reg, x));
2929 return gen_rtx_LO_SUM (Pmode, reg, x);
2932 && constant_pool_expr_p (x)
2933 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2935 return create_TOC_reference (x);
2941 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
2942 We need to emit DTP-relative relocations. */
2945 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2950 fputs ("\t.long\t", file);
2953 fputs (DOUBLE_INT_ASM_OP, file);
2958 output_addr_const (file, x);
2959 fputs ("@dtprel+0x8000", file);
2962 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2964 static GTY(()) rtx rs6000_tls_symbol;
2966 rs6000_tls_get_addr (void)
2968 if (!rs6000_tls_symbol)
2969 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2971 return rs6000_tls_symbol;
2974 /* Construct the SYMBOL_REF for TLS GOT references. */
2976 static GTY(()) rtx rs6000_got_symbol;
2978 rs6000_got_sym (void)
2980 if (!rs6000_got_symbol)
2982 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2983 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2984 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2987 return rs6000_got_symbol;
2990 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2991 this (thread-local) address. */
2994 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2998 dest = gen_reg_rtx (Pmode);
2999 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3005 tlsreg = gen_rtx_REG (Pmode, 13);
3006 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3010 tlsreg = gen_rtx_REG (Pmode, 2);
3011 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3015 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3019 tmp = gen_reg_rtx (Pmode);
3022 tlsreg = gen_rtx_REG (Pmode, 13);
3023 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3027 tlsreg = gen_rtx_REG (Pmode, 2);
3028 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3032 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3034 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3039 rtx r3, got, tga, tmp1, tmp2, eqv;
3041 /* We currently use relocations like @got@tlsgd for tls, which
3042 means the linker will handle allocation of tls entries, placing
3043 them in the .got section. So use a pointer to the .got section,
3044 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3045 or to secondary GOT sections used by 32-bit -fPIC. */
3047 got = gen_rtx_REG (Pmode, 2);
3051 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3054 rtx gsym = rs6000_got_sym ();
3055 got = gen_reg_rtx (Pmode);
3057 rs6000_emit_move (got, gsym, Pmode);
3060 rtx tempLR, tmp3, mem;
3063 tempLR = gen_reg_rtx (Pmode);
3064 tmp1 = gen_reg_rtx (Pmode);
3065 tmp2 = gen_reg_rtx (Pmode);
3066 tmp3 = gen_reg_rtx (Pmode);
3067 mem = gen_const_mem (Pmode, tmp1);
3069 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3070 emit_move_insn (tmp1, tempLR);
3071 emit_move_insn (tmp2, mem);
3072 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3073 last = emit_move_insn (got, tmp3);
3074 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3076 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3078 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3084 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3086 r3 = gen_rtx_REG (Pmode, 3);
3088 insn = gen_tls_gd_64 (r3, got, addr);
3090 insn = gen_tls_gd_32 (r3, got, addr);
3093 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3094 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3095 insn = emit_call_insn (insn);
3096 CONST_OR_PURE_CALL_P (insn) = 1;
3097 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3098 insn = get_insns ();
3100 emit_libcall_block (insn, dest, r3, addr);
3102 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3104 r3 = gen_rtx_REG (Pmode, 3);
3106 insn = gen_tls_ld_64 (r3, got);
3108 insn = gen_tls_ld_32 (r3, got);
3111 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3112 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3113 insn = emit_call_insn (insn);
3114 CONST_OR_PURE_CALL_P (insn) = 1;
3115 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3116 insn = get_insns ();
3118 tmp1 = gen_reg_rtx (Pmode);
3119 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3121 emit_libcall_block (insn, tmp1, r3, eqv);
3122 if (rs6000_tls_size == 16)
3125 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3127 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3129 else if (rs6000_tls_size == 32)
3131 tmp2 = gen_reg_rtx (Pmode);
3133 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3135 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3138 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3140 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3144 tmp2 = gen_reg_rtx (Pmode);
3146 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3148 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3150 insn = gen_rtx_SET (Pmode, dest,
3151 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3157 /* IE, or 64 bit offset LE. */
3158 tmp2 = gen_reg_rtx (Pmode);
3160 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3162 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3165 insn = gen_tls_tls_64 (dest, tmp2, addr);
3167 insn = gen_tls_tls_32 (dest, tmp2, addr);
3175 /* Return 1 if X contains a thread-local symbol. */
3178 rs6000_tls_referenced_p (rtx x)
3180 if (! TARGET_HAVE_TLS)
3183 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3186 /* Return 1 if *X is a thread-local symbol. This is the same as
3187 rs6000_tls_symbol_ref except for the type of the unused argument. */
3190 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3192 return RS6000_SYMBOL_REF_TLS_P (*x);
3195 /* The convention appears to be to define this wherever it is used.
3196 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3197 is now used here. */
3198 #ifndef REG_MODE_OK_FOR_BASE_P
3199 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3202 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3203 replace the input X, or the original X if no replacement is called for.
3204 The output parameter *WIN is 1 if the calling macro should goto WIN,
3207 For RS/6000, we wish to handle large displacements off a base
3208 register by splitting the addend across an addiu/addis and the mem insn.
3209 This cuts number of extra insns needed from 3 to 1.
3211 On Darwin, we use this to generate code for floating point constants.
3212 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3213 The Darwin code is inside #if TARGET_MACHO because only then is
3214 machopic_function_base_name() defined. */
3216 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3217 int opnum, int type,
3218 int ind_levels ATTRIBUTE_UNUSED, int *win)
3220 /* We must recognize output that we have already generated ourselves. */
3221 if (GET_CODE (x) == PLUS
3222 && GET_CODE (XEXP (x, 0)) == PLUS
3223 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3224 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3225 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3227 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3228 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3229 opnum, (enum reload_type)type);
3235 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3236 && GET_CODE (x) == LO_SUM
3237 && GET_CODE (XEXP (x, 0)) == PLUS
3238 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3239 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3240 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3241 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3242 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3243 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3244 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3246 /* Result of previous invocation of this function on Darwin
3247 floating point constant. */
3248 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3249 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3250 opnum, (enum reload_type)type);
3256 /* Force ld/std non-word aligned offset into base register by wrapping
3258 if (GET_CODE (x) == PLUS
3259 && GET_CODE (XEXP (x, 0)) == REG
3260 && REGNO (XEXP (x, 0)) < 32
3261 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3262 && GET_CODE (XEXP (x, 1)) == CONST_INT
3263 && (INTVAL (XEXP (x, 1)) & 3) != 0
3264 && !ALTIVEC_VECTOR_MODE (mode)
3265 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3266 && TARGET_POWERPC64)
3268 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3269 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3270 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3271 opnum, (enum reload_type) type);
3276 if (GET_CODE (x) == PLUS
3277 && GET_CODE (XEXP (x, 0)) == REG
3278 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3279 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3280 && GET_CODE (XEXP (x, 1)) == CONST_INT
3281 && !SPE_VECTOR_MODE (mode)
3282 && !(TARGET_E500_DOUBLE && (mode == DFmode
3284 && !ALTIVEC_VECTOR_MODE (mode))
3286 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3287 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3289 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3291 /* Check for 32-bit overflow. */
3292 if (high + low != val)
3298 /* Reload the high part into a base reg; leave the low part
3299 in the mem directly. */
3301 x = gen_rtx_PLUS (GET_MODE (x),
3302 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3306 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3307 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3308 opnum, (enum reload_type)type);
3313 if (GET_CODE (x) == SYMBOL_REF
3314 && !ALTIVEC_VECTOR_MODE (mode)
3316 && DEFAULT_ABI == ABI_DARWIN
3317 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3319 && DEFAULT_ABI == ABI_V4
3322 /* Don't do this for TFmode, since the result isn't offsettable.
3323 The same goes for DImode without 64-bit gprs. */
3325 && (mode != DImode || TARGET_POWERPC64))
3330 rtx offset = gen_rtx_CONST (Pmode,
3331 gen_rtx_MINUS (Pmode, x,
3332 machopic_function_base_sym ()));
3333 x = gen_rtx_LO_SUM (GET_MODE (x),
3334 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3335 gen_rtx_HIGH (Pmode, offset)), offset);
3339 x = gen_rtx_LO_SUM (GET_MODE (x),
3340 gen_rtx_HIGH (Pmode, x), x);
3342 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3343 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3344 opnum, (enum reload_type)type);
3349 /* Reload an offset address wrapped by an AND that represents the
3350 masking of the lower bits. Strip the outer AND and let reload
3351 convert the offset address into an indirect address. */
3353 && ALTIVEC_VECTOR_MODE (mode)
3354 && GET_CODE (x) == AND
3355 && GET_CODE (XEXP (x, 0)) == PLUS
3356 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3357 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3358 && GET_CODE (XEXP (x, 1)) == CONST_INT
3359 && INTVAL (XEXP (x, 1)) == -16)
3367 && constant_pool_expr_p (x)
3368 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3370 (x) = create_TOC_reference (x);
3378 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3379 that is a valid memory address for an instruction.
3380 The MODE argument is the machine mode for the MEM expression
3381 that wants to use this address.
3383 On the RS/6000, there are four valid address: a SYMBOL_REF that
3384 refers to a constant pool entry of an address (or the sum of it
3385 plus a constant), a short (16-bit signed) constant plus a register,
3386 the sum of two registers, or a register indirect, possibly with an
3387 auto-increment. For DFmode and DImode with a constant plus register,
3388 we must ensure that both words are addressable or PowerPC64 with offset
3391 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3392 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3393 adjacent memory cells are accessed by adding word-sized offsets
3394 during assembly output. */
3396 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3398 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3400 && ALTIVEC_VECTOR_MODE (mode)
3401 && GET_CODE (x) == AND
3402 && GET_CODE (XEXP (x, 1)) == CONST_INT
3403 && INTVAL (XEXP (x, 1)) == -16)
3406 if (RS6000_SYMBOL_REF_TLS_P (x))
3408 if (legitimate_indirect_address_p (x, reg_ok_strict))
3410 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3411 && !ALTIVEC_VECTOR_MODE (mode)
3412 && !SPE_VECTOR_MODE (mode)
3413 /* Restrict addressing for DI because of our SUBREG hackery. */
3414 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3416 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3418 if (rs6000_legitimate_small_data_p (mode, x))
3420 if (legitimate_constant_pool_address_p (x))
3422 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3424 && GET_CODE (x) == PLUS
3425 && GET_CODE (XEXP (x, 0)) == REG
3426 && (XEXP (x, 0) == virtual_stack_vars_rtx
3427 || XEXP (x, 0) == arg_pointer_rtx)
3428 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3430 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3434 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3436 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3437 && (TARGET_POWERPC64 || mode != DImode)
3438 && legitimate_indexed_address_p (x, reg_ok_strict))
3440 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3445 /* Go to LABEL if ADDR (a legitimate address expression)
3446 has an effect that depends on the machine mode it is used for.
3448 On the RS/6000 this is true of all integral offsets (since AltiVec
3449 modes don't allow them) or is a pre-increment or decrement.
3451 ??? Except that due to conceptual problems in offsettable_address_p
3452 we can't really report the problems of integral offsets. So leave
3453 this assuming that the adjustable offset must be valid for the
3454 sub-words of a TFmode operand, which is what we had before. */
3457 rs6000_mode_dependent_address (rtx addr)
3459 switch (GET_CODE (addr))
3462 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3464 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3465 return val + 12 + 0x8000 >= 0x10000;
3474 return TARGET_UPDATE;
3483 /* Return number of consecutive hard regs needed starting at reg REGNO
3484 to hold something of mode MODE.
3485 This is ordinarily the length in words of a value of mode MODE
3486 but can be less for certain modes in special long registers.
3488 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3489 scalar instructions. The upper 32 bits are only available to the
3492 POWER and PowerPC GPRs hold 32 bits worth;
3493 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3496 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3498 if (FP_REGNO_P (regno))
3499 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3501 if (TARGET_E500_DOUBLE && mode == DFmode)
3504 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3505 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3507 if (ALTIVEC_REGNO_P (regno))
3509 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3511 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3514 /* Change register usage conditional on target flags. */
3516 rs6000_conditional_register_usage (void)
3520 /* Set MQ register fixed (already call_used) if not POWER
3521 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3526 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3528 fixed_regs[13] = call_used_regs[13]
3529 = call_really_used_regs[13] = 1;
3531 /* Conditionally disable FPRs. */
3532 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3533 for (i = 32; i < 64; i++)
3534 fixed_regs[i] = call_used_regs[i]
3535 = call_really_used_regs[i] = 1;
3537 /* The TOC register is not killed across calls in a way that is
3538 visible to the compiler. */
3539 if (DEFAULT_ABI == ABI_AIX)
3540 call_really_used_regs[2] = 0;
3542 if (DEFAULT_ABI == ABI_V4
3543 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3545 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3547 if (DEFAULT_ABI == ABI_V4
3548 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3550 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3551 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3552 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3554 if (DEFAULT_ABI == ABI_DARWIN
3555 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3556 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3557 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3558 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3560 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3561 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3562 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3565 global_regs[VSCR_REGNO] = 1;
3569 global_regs[SPEFSCR_REGNO] = 1;
3570 fixed_regs[FIXED_SCRATCH]
3571 = call_used_regs[FIXED_SCRATCH]
3572 = call_really_used_regs[FIXED_SCRATCH] = 1;
3575 if (! TARGET_ALTIVEC)
3577 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3578 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3579 call_really_used_regs[VRSAVE_REGNO] = 1;
3582 if (TARGET_ALTIVEC_ABI)
3583 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3584 call_used_regs[i] = call_really_used_regs[i] = 1;
3587 /* Try to output insns to set TARGET equal to the constant C if it can
3588 be done in less than N insns. Do all computations in MODE.
3589 Returns the place where the output has been placed if it can be
3590 done and the insns have been emitted. If it would take more than N
3591 insns, zero is returned and no insns and emitted. */
3594 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3595 rtx source, int n ATTRIBUTE_UNUSED)
3597 rtx result, insn, set;
3598 HOST_WIDE_INT c0, c1;
3605 dest = gen_reg_rtx (mode);
3606 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3610 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3612 emit_insn (gen_rtx_SET (VOIDmode, result,
3613 GEN_INT (INTVAL (source)
3614 & (~ (HOST_WIDE_INT) 0xffff))));
3615 emit_insn (gen_rtx_SET (VOIDmode, dest,
3616 gen_rtx_IOR (SImode, result,
3617 GEN_INT (INTVAL (source) & 0xffff))));
3622 switch (GET_CODE (source))
3625 c0 = INTVAL (source);
3630 #if HOST_BITS_PER_WIDE_INT >= 64
3631 c0 = CONST_DOUBLE_LOW (source);
3634 c0 = CONST_DOUBLE_LOW (source);
3635 c1 = CONST_DOUBLE_HIGH (source);
3643 result = rs6000_emit_set_long_const (dest, c0, c1);
3650 insn = get_last_insn ();
3651 set = single_set (insn);
3652 if (! CONSTANT_P (SET_SRC (set)))
3653 set_unique_reg_note (insn, REG_EQUAL, source);
3658 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3659 fall back to a straight forward decomposition. We do this to avoid
3660 exponential run times encountered when looking for longer sequences
3661 with rs6000_emit_set_const. */
3663 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3665 if (!TARGET_POWERPC64)
3667 rtx operand1, operand2;
3669 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3671 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3673 emit_move_insn (operand1, GEN_INT (c1));
3674 emit_move_insn (operand2, GEN_INT (c2));
3678 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3681 ud2 = (c1 & 0xffff0000) >> 16;
3682 #if HOST_BITS_PER_WIDE_INT >= 64
3686 ud4 = (c2 & 0xffff0000) >> 16;
3688 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3689 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3692 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3694 emit_move_insn (dest, GEN_INT (ud1));
3697 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3698 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3701 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3704 emit_move_insn (dest, GEN_INT (ud2 << 16));
3706 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3708 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3709 || (ud4 == 0 && ! (ud3 & 0x8000)))
3712 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3715 emit_move_insn (dest, GEN_INT (ud3 << 16));
3718 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3719 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3721 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3726 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3729 emit_move_insn (dest, GEN_INT (ud4 << 16));
3732 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3734 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3736 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3737 GEN_INT (ud2 << 16)));
3739 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3745 /* Helper for the following. Get rid of [r+r] memory refs
3746 in cases where it won't work (TImode, TFmode). */
3749 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3751 if (GET_CODE (operands[0]) == MEM
3752 && GET_CODE (XEXP (operands[0], 0)) != REG
3753 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3754 && ! reload_in_progress)
3756 = replace_equiv_address (operands[0],
3757 copy_addr_to_reg (XEXP (operands[0], 0)));
3759 if (GET_CODE (operands[1]) == MEM
3760 && GET_CODE (XEXP (operands[1], 0)) != REG
3761 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3762 && ! reload_in_progress)
3764 = replace_equiv_address (operands[1],
3765 copy_addr_to_reg (XEXP (operands[1], 0)));
3768 /* Emit a move from SOURCE to DEST in mode MODE. */
3770 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3774 operands[1] = source;
3776 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3777 if (GET_CODE (operands[1]) == CONST_DOUBLE
3778 && ! FLOAT_MODE_P (mode)
3779 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3781 /* FIXME. This should never happen. */
3782 /* Since it seems that it does, do the safe thing and convert
3784 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3786 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3787 || FLOAT_MODE_P (mode)
3788 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3789 || CONST_DOUBLE_LOW (operands[1]) < 0)
3790 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3791 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3793 /* Check if GCC is setting up a block move that will end up using FP
3794 registers as temporaries. We must make sure this is acceptable. */
3795 if (GET_CODE (operands[0]) == MEM
3796 && GET_CODE (operands[1]) == MEM
3798 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3799 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3800 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3801 ? 32 : MEM_ALIGN (operands[0])))
3802 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3804 : MEM_ALIGN (operands[1]))))
3805 && ! MEM_VOLATILE_P (operands [0])
3806 && ! MEM_VOLATILE_P (operands [1]))
3808 emit_move_insn (adjust_address (operands[0], SImode, 0),
3809 adjust_address (operands[1], SImode, 0));
3810 emit_move_insn (adjust_address (operands[0], SImode, 4),
3811 adjust_address (operands[1], SImode, 4));
3815 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3816 && !gpc_reg_operand (operands[1], mode))
3817 operands[1] = force_reg (mode, operands[1]);
3819 if (mode == SFmode && ! TARGET_POWERPC
3820 && TARGET_HARD_FLOAT && TARGET_FPRS
3821 && GET_CODE (operands[0]) == MEM)
3825 if (reload_in_progress || reload_completed)
3826 regnum = true_regnum (operands[1]);
3827 else if (GET_CODE (operands[1]) == REG)
3828 regnum = REGNO (operands[1]);
3832 /* If operands[1] is a register, on POWER it may have
3833 double-precision data in it, so truncate it to single
3835 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3838 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3839 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3840 operands[1] = newreg;
3844 /* Recognize the case where operand[1] is a reference to thread-local
3845 data and load its address to a register. */
3846 if (rs6000_tls_referenced_p (operands[1]))
3848 enum tls_model model;
3849 rtx tmp = operands[1];
3852 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3854 addend = XEXP (XEXP (tmp, 0), 1);
3855 tmp = XEXP (XEXP (tmp, 0), 0);
3858 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3859 model = SYMBOL_REF_TLS_MODEL (tmp);
3860 gcc_assert (model != 0);
3862 tmp = rs6000_legitimize_tls_address (tmp, model);
3865 tmp = gen_rtx_PLUS (mode, tmp, addend);
3866 tmp = force_operand (tmp, operands[0]);
3871 /* Handle the case where reload calls us with an invalid address. */
3872 if (reload_in_progress && mode == Pmode
3873 && (! general_operand (operands[1], mode)
3874 || ! nonimmediate_operand (operands[0], mode)))
3877 /* 128-bit constant floating-point values on Darwin should really be
3878 loaded as two parts. */
3879 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3880 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3881 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3883 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3884 know how to get a DFmode SUBREG of a TFmode. */
3885 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3886 simplify_gen_subreg (DImode, operands[1], mode, 0),
3888 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3889 GET_MODE_SIZE (DImode)),
3890 simplify_gen_subreg (DImode, operands[1], mode,
3891 GET_MODE_SIZE (DImode)),
3896 /* FIXME: In the long term, this switch statement should go away
3897 and be replaced by a sequence of tests based on things like
3903 if (CONSTANT_P (operands[1])
3904 && GET_CODE (operands[1]) != CONST_INT)
3905 operands[1] = force_const_mem (mode, operands[1]);
3909 rs6000_eliminate_indexed_memrefs (operands);
3914 if (CONSTANT_P (operands[1])
3915 && ! easy_fp_constant (operands[1], mode))
3916 operands[1] = force_const_mem (mode, operands[1]);
3927 if (CONSTANT_P (operands[1])
3928 && !easy_vector_constant (operands[1], mode))
3929 operands[1] = force_const_mem (mode, operands[1]);
3934 /* Use default pattern for address of ELF small data */
3937 && DEFAULT_ABI == ABI_V4
3938 && (GET_CODE (operands[1]) == SYMBOL_REF
3939 || GET_CODE (operands[1]) == CONST)
3940 && small_data_operand (operands[1], mode))
3942 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3946 if (DEFAULT_ABI == ABI_V4
3947 && mode == Pmode && mode == SImode
3948 && flag_pic == 1 && got_operand (operands[1], mode))
3950 emit_insn (gen_movsi_got (operands[0], operands[1]));
3954 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3958 && CONSTANT_P (operands[1])
3959 && GET_CODE (operands[1]) != HIGH
3960 && GET_CODE (operands[1]) != CONST_INT)
3962 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3964 /* If this is a function address on -mcall-aixdesc,
3965 convert it to the address of the descriptor. */
3966 if (DEFAULT_ABI == ABI_AIX
3967 && GET_CODE (operands[1]) == SYMBOL_REF
3968 && XSTR (operands[1], 0)[0] == '.')
3970 const char *name = XSTR (operands[1], 0);
3972 while (*name == '.')
3974 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3975 CONSTANT_POOL_ADDRESS_P (new_ref)
3976 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3977 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3978 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3979 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3980 operands[1] = new_ref;
3983 if (DEFAULT_ABI == ABI_DARWIN)
3986 if (MACHO_DYNAMIC_NO_PIC_P)
3988 /* Take care of any required data indirection. */
3989 operands[1] = rs6000_machopic_legitimize_pic_address (
3990 operands[1], mode, operands[0]);
3991 if (operands[0] != operands[1])
3992 emit_insn (gen_rtx_SET (VOIDmode,
3993 operands[0], operands[1]));
3997 emit_insn (gen_macho_high (target, operands[1]));
3998 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4002 emit_insn (gen_elf_high (target, operands[1]));
4003 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4007 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4008 and we have put it in the TOC, we just need to make a TOC-relative
4011 && GET_CODE (operands[1]) == SYMBOL_REF
4012 && constant_pool_expr_p (operands[1])
4013 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4014 get_pool_mode (operands[1])))
4016 operands[1] = create_TOC_reference (operands[1]);
4018 else if (mode == Pmode
4019 && CONSTANT_P (operands[1])
4020 && ((GET_CODE (operands[1]) != CONST_INT
4021 && ! easy_fp_constant (operands[1], mode))
4022 || (GET_CODE (operands[1]) == CONST_INT
4023 && num_insns_constant (operands[1], mode) > 2)
4024 || (GET_CODE (operands[0]) == REG
4025 && FP_REGNO_P (REGNO (operands[0]))))
4026 && GET_CODE (operands[1]) != HIGH
4027 && ! legitimate_constant_pool_address_p (operands[1])
4028 && ! toc_relative_expr_p (operands[1]))
4030 /* Emit a USE operation so that the constant isn't deleted if
4031 expensive optimizations are turned on because nobody
4032 references it. This should only be done for operands that
4033 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4034 This should not be done for operands that contain LABEL_REFs.
4035 For now, we just handle the obvious case. */
4036 if (GET_CODE (operands[1]) != LABEL_REF)
4037 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4040 /* Darwin uses a special PIC legitimizer. */
4041 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4044 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4046 if (operands[0] != operands[1])
4047 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4052 /* If we are to limit the number of things we put in the TOC and
4053 this is a symbol plus a constant we can add in one insn,
4054 just put the symbol in the TOC and add the constant. Don't do
4055 this if reload is in progress. */
4056 if (GET_CODE (operands[1]) == CONST
4057 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4058 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4059 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4060 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4061 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4062 && ! side_effects_p (operands[0]))
4065 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4066 rtx other = XEXP (XEXP (operands[1], 0), 1);
4068 sym = force_reg (mode, sym);
4070 emit_insn (gen_addsi3 (operands[0], sym, other));
4072 emit_insn (gen_adddi3 (operands[0], sym, other));
4076 operands[1] = force_const_mem (mode, operands[1]);
4079 && constant_pool_expr_p (XEXP (operands[1], 0))
4080 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4081 get_pool_constant (XEXP (operands[1], 0)),
4082 get_pool_mode (XEXP (operands[1], 0))))
4085 = gen_const_mem (mode,
4086 create_TOC_reference (XEXP (operands[1], 0)));
4087 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4093 rs6000_eliminate_indexed_memrefs (operands);
4097 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4099 gen_rtx_SET (VOIDmode,
4100 operands[0], operands[1]),
4101 gen_rtx_CLOBBER (VOIDmode,
4102 gen_rtx_SCRATCH (SImode)))));
4111 /* Above, we may have called force_const_mem which may have returned
4112 an invalid address. If we can, fix this up; otherwise, reload will
4113 have to deal with it. */
4114 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4115 operands[1] = validize_mem (operands[1]);
4118 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4121 /* Nonzero if we can use a floating-point register to pass this arg. */
4122 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4123 (SCALAR_FLOAT_MODE_P (MODE) \
4124 && (CUM)->fregno <= FP_ARG_MAX_REG \
4125 && TARGET_HARD_FLOAT && TARGET_FPRS)
4127 /* Nonzero if we can use an AltiVec register to pass this arg. */
4128 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4129 (ALTIVEC_VECTOR_MODE (MODE) \
4130 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4131 && TARGET_ALTIVEC_ABI \
4134 /* Return a nonzero value to say to return the function value in
4135 memory, just as large structures are always returned. TYPE will be
4136 the data type of the value, and FNTYPE will be the type of the
4137 function doing the returning, or @code{NULL} for libcalls.
4139 The AIX ABI for the RS/6000 specifies that all structures are
4140 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4141 specifies that structures <= 8 bytes are returned in r3/r4, but a
4142 draft put them in memory, and GCC used to implement the draft
4143 instead of the final standard. Therefore, aix_struct_return
4144 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4145 compatibility can change DRAFT_V4_STRUCT_RET to override the
4146 default, and -m switches get the final word. See
4147 rs6000_override_options for more details.
4149 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4150 long double support is enabled. These values are returned in memory.
4152 int_size_in_bytes returns -1 for variable size objects, which go in
4153 memory always. The cast to unsigned makes -1 > 8. */
4156 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4158 /* In the darwin64 abi, try to use registers for larger structs
4160 if (rs6000_darwin64_abi
4161 && TREE_CODE (type) == RECORD_TYPE
4162 && int_size_in_bytes (type) > 0)
4164 CUMULATIVE_ARGS valcum;
4168 valcum.fregno = FP_ARG_MIN_REG;
4169 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4170 /* Do a trial code generation as if this were going to be passed
4171 as an argument; if any part goes in memory, we return NULL. */
4172 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4175 /* Otherwise fall through to more conventional ABI rules. */
4178 if (AGGREGATE_TYPE_P (type)
4179 && (aix_struct_return
4180 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4183 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4184 modes only exist for GCC vector types if -maltivec. */
4185 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4186 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4189 /* Return synthetic vectors in memory. */
4190 if (TREE_CODE (type) == VECTOR_TYPE
4191 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4193 static bool warned_for_return_big_vectors = false;
4194 if (!warned_for_return_big_vectors)
4196 warning (0, "GCC vector returned by reference: "
4197 "non-standard ABI extension with no compatibility guarantee");
4198 warned_for_return_big_vectors = true;
4203 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4209 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4210 for a call to a function whose data type is FNTYPE.
4211 For a library call, FNTYPE is 0.
4213 For incoming args we set the number of arguments in the prototype large
4214 so we never return a PARALLEL. */
4217 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4218 rtx libname ATTRIBUTE_UNUSED, int incoming,
4219 int libcall, int n_named_args)
4221 static CUMULATIVE_ARGS zero_cumulative;
4223 *cum = zero_cumulative;
4225 cum->fregno = FP_ARG_MIN_REG;
4226 cum->vregno = ALTIVEC_ARG_MIN_REG;
4227 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4228 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4229 ? CALL_LIBCALL : CALL_NORMAL);
4230 cum->sysv_gregno = GP_ARG_MIN_REG;
4231 cum->stdarg = fntype
4232 && (TYPE_ARG_TYPES (fntype) != 0
4233 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4234 != void_type_node));
4236 cum->nargs_prototype = 0;
4237 if (incoming || cum->prototype)
4238 cum->nargs_prototype = n_named_args;
4240 /* Check for a longcall attribute. */
4241 if ((!fntype && rs6000_default_long_calls)
4243 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4244 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4245 cum->call_cookie |= CALL_LONG;
4247 if (TARGET_DEBUG_ARG)
4249 fprintf (stderr, "\ninit_cumulative_args:");
4252 tree ret_type = TREE_TYPE (fntype);
4253 fprintf (stderr, " ret code = %s,",
4254 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4257 if (cum->call_cookie & CALL_LONG)
4258 fprintf (stderr, " longcall,");
4260 fprintf (stderr, " proto = %d, nargs = %d\n",
4261 cum->prototype, cum->nargs_prototype);
4266 && TARGET_ALTIVEC_ABI
4267 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4269 error ("cannot return value in vector register because"
4270 " altivec instructions are disabled, use -maltivec"
4275 /* Return true if TYPE must be passed on the stack and not in registers. */
4278 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4280 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4281 return must_pass_in_stack_var_size (mode, type);
4283 return must_pass_in_stack_var_size_or_pad (mode, type);
4286 /* If defined, a C expression which determines whether, and in which
4287 direction, to pad out an argument with extra space. The value
4288 should be of type `enum direction': either `upward' to pad above
4289 the argument, `downward' to pad below, or `none' to inhibit
4292 For the AIX ABI structs are always stored left shifted in their
4296 function_arg_padding (enum machine_mode mode, tree type)
4298 #ifndef AGGREGATE_PADDING_FIXED
4299 #define AGGREGATE_PADDING_FIXED 0
4301 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4302 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4305 if (!AGGREGATE_PADDING_FIXED)
4307 /* GCC used to pass structures of the same size as integer types as
4308 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4309 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4310 passed padded downward, except that -mstrict-align further
4311 muddied the water in that multi-component structures of 2 and 4
4312 bytes in size were passed padded upward.
4314 The following arranges for best compatibility with previous
4315 versions of gcc, but removes the -mstrict-align dependency. */
4316 if (BYTES_BIG_ENDIAN)
4318 HOST_WIDE_INT size = 0;
4320 if (mode == BLKmode)
4322 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4323 size = int_size_in_bytes (type);
4326 size = GET_MODE_SIZE (mode);
4328 if (size == 1 || size == 2 || size == 4)
4334 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4336 if (type != 0 && AGGREGATE_TYPE_P (type))
4340 /* Fall back to the default. */
4341 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4344 /* If defined, a C expression that gives the alignment boundary, in bits,
4345 of an argument with the specified mode and type. If it is not defined,
4346 PARM_BOUNDARY is used for all arguments.
4348 V.4 wants long longs to be double word aligned.
4349 Doubleword align SPE vectors.
4350 Quadword align Altivec vectors.
4351 Quadword align large synthetic vector types. */
4354 function_arg_boundary (enum machine_mode mode, tree type)
4356 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4358 else if (SPE_VECTOR_MODE (mode)
4359 || (type && TREE_CODE (type) == VECTOR_TYPE
4360 && int_size_in_bytes (type) >= 8
4361 && int_size_in_bytes (type) < 16))
4363 else if (ALTIVEC_VECTOR_MODE (mode)
4364 || (type && TREE_CODE (type) == VECTOR_TYPE
4365 && int_size_in_bytes (type) >= 16))
4367 else if (rs6000_darwin64_abi && mode == BLKmode
4368 && type && TYPE_ALIGN (type) > 64)
4371 return PARM_BOUNDARY;
4374 /* For a function parm of MODE and TYPE, return the starting word in
4375 the parameter area. NWORDS of the parameter area are already used. */
4378 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4381 unsigned int parm_offset;
4383 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4384 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4385 return nwords + (-(parm_offset + nwords) & align);
4388 /* Compute the size (in words) of a function argument. */
4390 static unsigned long
4391 rs6000_arg_size (enum machine_mode mode, tree type)
4395 if (mode != BLKmode)
4396 size = GET_MODE_SIZE (mode);
4398 size = int_size_in_bytes (type);
4401 return (size + 3) >> 2;
4403 return (size + 7) >> 3;
4406 /* Use this to flush pending int fields. */
4409 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4410 HOST_WIDE_INT bitpos)
4412 unsigned int startbit, endbit;
4413 int intregs, intoffset;
4414 enum machine_mode mode;
4416 if (cum->intoffset == -1)
4419 intoffset = cum->intoffset;
4420 cum->intoffset = -1;
4422 if (intoffset % BITS_PER_WORD != 0)
4424 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4426 if (mode == BLKmode)
4428 /* We couldn't find an appropriate mode, which happens,
4429 e.g., in packed structs when there are 3 bytes to load.
4430 Back intoffset back to the beginning of the word in this
4432 intoffset = intoffset & -BITS_PER_WORD;
4436 startbit = intoffset & -BITS_PER_WORD;
4437 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4438 intregs = (endbit - startbit) / BITS_PER_WORD;
4439 cum->words += intregs;
4442 /* The darwin64 ABI calls for us to recurse down through structs,
4443 looking for elements passed in registers. Unfortunately, we have
4444 to track int register count here also because of misalignments
4445 in powerpc alignment mode. */
4448 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4450 HOST_WIDE_INT startbitpos)
4454 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4455 if (TREE_CODE (f) == FIELD_DECL)
4457 HOST_WIDE_INT bitpos = startbitpos;
4458 tree ftype = TREE_TYPE (f);
4459 enum machine_mode mode = TYPE_MODE (ftype);
4461 if (DECL_SIZE (f) != 0
4462 && host_integerp (bit_position (f), 1))
4463 bitpos += int_bit_position (f);
4465 /* ??? FIXME: else assume zero offset. */
4467 if (TREE_CODE (ftype) == RECORD_TYPE)
4468 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4469 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4471 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4472 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4473 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4475 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4477 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4481 else if (cum->intoffset == -1)
4482 cum->intoffset = bitpos;
4486 /* Update the data in CUM to advance over an argument
4487 of mode MODE and data type TYPE.
4488 (TYPE is null for libcalls where that information may not be available.)
4490 Note that for args passed by reference, function_arg will be called
4491 with MODE and TYPE set to that of the pointer to the arg, not the arg
4495 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4496 tree type, int named, int depth)
4500 /* Only tick off an argument if we're not recursing. */
4502 cum->nargs_prototype--;
4504 if (TARGET_ALTIVEC_ABI
4505 && (ALTIVEC_VECTOR_MODE (mode)
4506 || (type && TREE_CODE (type) == VECTOR_TYPE
4507 && int_size_in_bytes (type) == 16)))
4511 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4514 if (!TARGET_ALTIVEC)
4515 error ("cannot pass argument in vector register because"
4516 " altivec instructions are disabled, use -maltivec"
4519 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4520 even if it is going to be passed in a vector register.
4521 Darwin does the same for variable-argument functions. */
4522 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4523 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4533 /* Vector parameters must be 16-byte aligned. This places
4534 them at 2 mod 4 in terms of words in 32-bit mode, since
4535 the parameter save area starts at offset 24 from the
4536 stack. In 64-bit mode, they just have to start on an
4537 even word, since the parameter save area is 16-byte
4538 aligned. Space for GPRs is reserved even if the argument
4539 will be passed in memory. */
4541 align = (2 - cum->words) & 3;
4543 align = cum->words & 1;
4544 cum->words += align + rs6000_arg_size (mode, type);
4546 if (TARGET_DEBUG_ARG)
4548 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4550 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4551 cum->nargs_prototype, cum->prototype,
4552 GET_MODE_NAME (mode));
4556 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4558 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4561 else if (rs6000_darwin64_abi
4563 && TREE_CODE (type) == RECORD_TYPE
4564 && (size = int_size_in_bytes (type)) > 0)
4566 /* Variable sized types have size == -1 and are
4567 treated as if consisting entirely of ints.
4568 Pad to 16 byte boundary if needed. */
4569 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4570 && (cum->words % 2) != 0)
4572 /* For varargs, we can just go up by the size of the struct. */
4574 cum->words += (size + 7) / 8;
4577 /* It is tempting to say int register count just goes up by
4578 sizeof(type)/8, but this is wrong in a case such as
4579 { int; double; int; } [powerpc alignment]. We have to
4580 grovel through the fields for these too. */
4582 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4583 rs6000_darwin64_record_arg_advance_flush (cum,
4584 size * BITS_PER_UNIT);
4587 else if (DEFAULT_ABI == ABI_V4)
4589 if (TARGET_HARD_FLOAT && TARGET_FPRS
4590 && (mode == SFmode || mode == DFmode))
4592 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4597 cum->words += cum->words & 1;
4598 cum->words += rs6000_arg_size (mode, type);
4603 int n_words = rs6000_arg_size (mode, type);
4604 int gregno = cum->sysv_gregno;
4606 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4607 (r7,r8) or (r9,r10). As does any other 2 word item such
4608 as complex int due to a historical mistake. */
4610 gregno += (1 - gregno) & 1;
4612 /* Multi-reg args are not split between registers and stack. */
4613 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4615 /* Long long and SPE vectors are aligned on the stack.
4616 So are other 2 word items such as complex int due to
4617 a historical mistake. */
4619 cum->words += cum->words & 1;
4620 cum->words += n_words;
4623 /* Note: continuing to accumulate gregno past when we've started
4624 spilling to the stack indicates the fact that we've started
4625 spilling to the stack to expand_builtin_saveregs. */
4626 cum->sysv_gregno = gregno + n_words;
4629 if (TARGET_DEBUG_ARG)
4631 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4632 cum->words, cum->fregno);
4633 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4634 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4635 fprintf (stderr, "mode = %4s, named = %d\n",
4636 GET_MODE_NAME (mode), named);
4641 int n_words = rs6000_arg_size (mode, type);
4642 int start_words = cum->words;
4643 int align_words = rs6000_parm_start (mode, type, start_words);
4645 cum->words = align_words + n_words;
4647 if (SCALAR_FLOAT_MODE_P (mode)
4648 && TARGET_HARD_FLOAT && TARGET_FPRS)
4649 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4651 if (TARGET_DEBUG_ARG)
4653 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4654 cum->words, cum->fregno);
4655 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4656 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4657 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4658 named, align_words - start_words, depth);
4664 spe_build_register_parallel (enum machine_mode mode, int gregno)
4671 r1 = gen_rtx_REG (DImode, gregno);
4672 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4673 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4676 r1 = gen_rtx_REG (DImode, gregno);
4677 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4678 r3 = gen_rtx_REG (DImode, gregno + 2);
4679 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4680 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4687 /* Determine where to put a SIMD argument on the SPE. */
4689 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4692 int gregno = cum->sysv_gregno;
4694 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4695 are passed and returned in a pair of GPRs for ABI compatibility. */
4696 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4698 int n_words = rs6000_arg_size (mode, type);
4700 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4702 gregno += (1 - gregno) & 1;
4704 /* Multi-reg args are not split between registers and stack. */
4705 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4708 return spe_build_register_parallel (mode, gregno);
4712 int n_words = rs6000_arg_size (mode, type);
4714 /* SPE vectors are put in odd registers. */
4715 if (n_words == 2 && (gregno & 1) == 0)
4718 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4721 enum machine_mode m = SImode;
4723 r1 = gen_rtx_REG (m, gregno);
4724 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4725 r2 = gen_rtx_REG (m, gregno + 1);
4726 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4727 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4734 if (gregno <= GP_ARG_MAX_REG)
4735 return gen_rtx_REG (mode, gregno);
4741 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4742 structure between cum->intoffset and bitpos to integer registers. */
4745 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4746 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4748 enum machine_mode mode;
4750 unsigned int startbit, endbit;
4751 int this_regno, intregs, intoffset;
4754 if (cum->intoffset == -1)
4757 intoffset = cum->intoffset;
4758 cum->intoffset = -1;
4760 /* If this is the trailing part of a word, try to only load that
4761 much into the register. Otherwise load the whole register. Note
4762 that in the latter case we may pick up unwanted bits. It's not a
4763 problem at the moment but may wish to revisit. */
4765 if (intoffset % BITS_PER_WORD != 0)
4767 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4769 if (mode == BLKmode)
4771 /* We couldn't find an appropriate mode, which happens,
4772 e.g., in packed structs when there are 3 bytes to load.
4773 Back intoffset back to the beginning of the word in this
4775 intoffset = intoffset & -BITS_PER_WORD;
4782 startbit = intoffset & -BITS_PER_WORD;
4783 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4784 intregs = (endbit - startbit) / BITS_PER_WORD;
4785 this_regno = cum->words + intoffset / BITS_PER_WORD;
4787 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4790 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4794 intoffset /= BITS_PER_UNIT;
4797 regno = GP_ARG_MIN_REG + this_regno;
4798 reg = gen_rtx_REG (mode, regno);
4800 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4803 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4807 while (intregs > 0);
4810 /* Recursive workhorse for the following. */
4813 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
4814 HOST_WIDE_INT startbitpos, rtx rvec[],
4819 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4820 if (TREE_CODE (f) == FIELD_DECL)
4822 HOST_WIDE_INT bitpos = startbitpos;
4823 tree ftype = TREE_TYPE (f);
4824 enum machine_mode mode = TYPE_MODE (ftype);
4826 if (DECL_SIZE (f) != 0
4827 && host_integerp (bit_position (f), 1))
4828 bitpos += int_bit_position (f);
4830 /* ??? FIXME: else assume zero offset. */
4832 if (TREE_CODE (ftype) == RECORD_TYPE)
4833 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4834 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
4839 case SCmode: mode = SFmode; break;
4840 case DCmode: mode = DFmode; break;
4841 case TCmode: mode = TFmode; break;
4845 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4847 = gen_rtx_EXPR_LIST (VOIDmode,
4848 gen_rtx_REG (mode, cum->fregno++),
4849 GEN_INT (bitpos / BITS_PER_UNIT));
4853 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4855 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4857 = gen_rtx_EXPR_LIST (VOIDmode,
4858 gen_rtx_REG (mode, cum->vregno++),
4859 GEN_INT (bitpos / BITS_PER_UNIT));
4861 else if (cum->intoffset == -1)
4862 cum->intoffset = bitpos;
4866 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4867 the register(s) to be used for each field and subfield of a struct
4868 being passed by value, along with the offset of where the
4869 register's value may be found in the block. FP fields go in FP
4870 register, vector fields go in vector registers, and everything
4871 else goes in int registers, packed as in memory.
4873 This code is also used for function return values. RETVAL indicates
4874 whether this is the case.
4876 Much of this is taken from the SPARC V9 port, which has a similar
4877 calling convention. */
4880 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4881 int named, bool retval)
4883 rtx rvec[FIRST_PSEUDO_REGISTER];
4884 int k = 1, kbase = 1;
4885 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4886 /* This is a copy; modifications are not visible to our caller. */
4887 CUMULATIVE_ARGS copy_cum = *orig_cum;
4888 CUMULATIVE_ARGS *cum = ©_cum;
4890 /* Pad to 16 byte boundary if needed. */
4891 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4892 && (cum->words % 2) != 0)
4899 /* Put entries into rvec[] for individual FP and vector fields, and
4900 for the chunks of memory that go in int regs. Note we start at
4901 element 1; 0 is reserved for an indication of using memory, and
4902 may or may not be filled in below. */
4903 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4904 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4906 /* If any part of the struct went on the stack put all of it there.
4907 This hack is because the generic code for
4908 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4909 parts of the struct are not at the beginning. */
4913 return NULL_RTX; /* doesn't go in registers at all */
4915 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4917 if (k > 1 || cum->use_stack)
4918 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
4923 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4926 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4930 rtx rvec[GP_ARG_NUM_REG + 1];
4932 if (align_words >= GP_ARG_NUM_REG)
4935 n_units = rs6000_arg_size (mode, type);
4937 /* Optimize the simple case where the arg fits in one gpr, except in
4938 the case of BLKmode due to assign_parms assuming that registers are
4939 BITS_PER_WORD wide. */
4941 || (n_units == 1 && mode != BLKmode))
4942 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4945 if (align_words + n_units > GP_ARG_NUM_REG)
4946 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4947 using a magic NULL_RTX component.
4948 FIXME: This is not strictly correct. Only some of the arg
4949 belongs in memory, not all of it. However, there isn't any way
4950 to do this currently, apart from building rtx descriptions for
4951 the pieces of memory we want stored. Due to bugs in the generic
4952 code we can't use the normal function_arg_partial_nregs scheme
4953 with the PARALLEL arg description we emit here.
4954 In any case, the code to store the whole arg to memory is often
4955 more efficient than code to store pieces, and we know that space
4956 is available in the right place for the whole arg. */
4957 /* FIXME: This should be fixed since the conversion to
4958 TARGET_ARG_PARTIAL_BYTES. */
4959 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4964 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4965 rtx off = GEN_INT (i++ * 4);
4966 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4968 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4970 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4973 /* Determine where to put an argument to a function.
4974 Value is zero to push the argument on the stack,
4975 or a hard register in which to store the argument.
4977 MODE is the argument's machine mode.
4978 TYPE is the data type of the argument (as a tree).
4979 This is null for libcalls where that information may
4981 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4982 the preceding args and about the function being called. It is
4983 not modified in this routine.
4984 NAMED is nonzero if this argument is a named parameter
4985 (otherwise it is an extra parameter matching an ellipsis).
4987 On RS/6000 the first eight words of non-FP are normally in registers
4988 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4989 Under V.4, the first 8 FP args are in registers.
4991 If this is floating-point and no prototype is specified, we use
4992 both an FP and integer register (or possibly FP reg and stack). Library
4993 functions (when CALL_LIBCALL is set) always have the proper types for args,
4994 so we can pass the FP value just in one register. emit_library_function
4995 doesn't support PARALLEL anyway.
4997 Note that for args passed by reference, function_arg will be called
4998 with MODE and TYPE set to that of the pointer to the arg, not the arg
5002 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5003 tree type, int named)
5005 enum rs6000_abi abi = DEFAULT_ABI;
5007 /* Return a marker to indicate whether CR1 needs to set or clear the
5008 bit that V.4 uses to say fp args were passed in registers.
5009 Assume that we don't need the marker for software floating point,
5010 or compiler generated library calls. */
5011 if (mode == VOIDmode)
5014 && (cum->call_cookie & CALL_LIBCALL) == 0
5016 || (cum->nargs_prototype < 0
5017 && (cum->prototype || TARGET_NO_PROTOTYPE))))
5019 /* For the SPE, we need to crxor CR6 always. */
5021 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5022 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5023 return GEN_INT (cum->call_cookie
5024 | ((cum->fregno == FP_ARG_MIN_REG)
5025 ? CALL_V4_SET_FP_ARGS
5026 : CALL_V4_CLEAR_FP_ARGS));
5029 return GEN_INT (cum->call_cookie);
5032 if (rs6000_darwin64_abi && mode == BLKmode
5033 && TREE_CODE (type) == RECORD_TYPE)
5035 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5036 if (rslt != NULL_RTX)
5038 /* Else fall through to usual handling. */
5041 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5042 if (TARGET_64BIT && ! cum->prototype)
5044 /* Vector parameters get passed in vector register
5045 and also in GPRs or memory, in absence of prototype. */
5048 align_words = (cum->words + 1) & ~1;
5050 if (align_words >= GP_ARG_NUM_REG)
5056 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5058 return gen_rtx_PARALLEL (mode,
5060 gen_rtx_EXPR_LIST (VOIDmode,
5062 gen_rtx_EXPR_LIST (VOIDmode,
5063 gen_rtx_REG (mode, cum->vregno),
5067 return gen_rtx_REG (mode, cum->vregno);
5068 else if (TARGET_ALTIVEC_ABI
5069 && (ALTIVEC_VECTOR_MODE (mode)
5070 || (type && TREE_CODE (type) == VECTOR_TYPE
5071 && int_size_in_bytes (type) == 16)))
5073 if (named || abi == ABI_V4)
5077 /* Vector parameters to varargs functions under AIX or Darwin
5078 get passed in memory and possibly also in GPRs. */
5079 int align, align_words, n_words;
5080 enum machine_mode part_mode;
5082 /* Vector parameters must be 16-byte aligned. This places them at
5083 2 mod 4 in terms of words in 32-bit mode, since the parameter
5084 save area starts at offset 24 from the stack. In 64-bit mode,
5085 they just have to start on an even word, since the parameter
5086 save area is 16-byte aligned. */
5088 align = (2 - cum->words) & 3;
5090 align = cum->words & 1;
5091 align_words = cum->words + align;
5093 /* Out of registers? Memory, then. */
5094 if (align_words >= GP_ARG_NUM_REG)
5097 if (TARGET_32BIT && TARGET_POWERPC64)
5098 return rs6000_mixed_function_arg (mode, type, align_words);
5100 /* The vector value goes in GPRs. Only the part of the
5101 value in GPRs is reported here. */
5103 n_words = rs6000_arg_size (mode, type);
5104 if (align_words + n_words > GP_ARG_NUM_REG)
5105 /* Fortunately, there are only two possibilities, the value
5106 is either wholly in GPRs or half in GPRs and half not. */
5109 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5112 else if (TARGET_SPE_ABI && TARGET_SPE
5113 && (SPE_VECTOR_MODE (mode)
5114 || (TARGET_E500_DOUBLE && (mode == DFmode
5115 || mode == DCmode))))
5116 return rs6000_spe_function_arg (cum, mode, type);
5118 else if (abi == ABI_V4)
5120 if (TARGET_HARD_FLOAT && TARGET_FPRS
5121 && (mode == SFmode || mode == DFmode))
5123 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5124 return gen_rtx_REG (mode, cum->fregno);
5130 int n_words = rs6000_arg_size (mode, type);
5131 int gregno = cum->sysv_gregno;
5133 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5134 (r7,r8) or (r9,r10). As does any other 2 word item such
5135 as complex int due to a historical mistake. */
5137 gregno += (1 - gregno) & 1;
5139 /* Multi-reg args are not split between registers and stack. */
5140 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5143 if (TARGET_32BIT && TARGET_POWERPC64)
5144 return rs6000_mixed_function_arg (mode, type,
5145 gregno - GP_ARG_MIN_REG);
5146 return gen_rtx_REG (mode, gregno);
5151 int align_words = rs6000_parm_start (mode, type, cum->words);
5153 if (USE_FP_FOR_ARG_P (cum, mode, type))
5155 rtx rvec[GP_ARG_NUM_REG + 1];
5159 enum machine_mode fmode = mode;
5160 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5162 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5164 /* Currently, we only ever need one reg here because complex
5165 doubles are split. */
5166 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5168 /* Long double split over regs and memory. */
5172 /* Do we also need to pass this arg in the parameter save
5175 && (cum->nargs_prototype <= 0
5176 || (DEFAULT_ABI == ABI_AIX
5178 && align_words >= GP_ARG_NUM_REG)));
5180 if (!needs_psave && mode == fmode)
5181 return gen_rtx_REG (fmode, cum->fregno);
5186 /* Describe the part that goes in gprs or the stack.
5187 This piece must come first, before the fprs. */
5188 if (align_words < GP_ARG_NUM_REG)
5190 unsigned long n_words = rs6000_arg_size (mode, type);
5192 if (align_words + n_words > GP_ARG_NUM_REG
5193 || (TARGET_32BIT && TARGET_POWERPC64))
5195 /* If this is partially on the stack, then we only
5196 include the portion actually in registers here. */
5197 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5200 if (align_words + n_words > GP_ARG_NUM_REG
5201 && (TARGET_32BIT && TARGET_POWERPC64))
5202 /* Not all of the arg fits in gprs. Say that it
5203 goes in memory too, using a magic NULL_RTX
5204 component. Also see comment in
5205 rs6000_mixed_function_arg for why the normal
5206 function_arg_partial_nregs scheme doesn't work
5208 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5212 r = gen_rtx_REG (rmode,
5213 GP_ARG_MIN_REG + align_words);
5214 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5215 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5217 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5221 /* The whole arg fits in gprs. */
5222 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5223 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5227 /* It's entirely in memory. */
5228 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5231 /* Describe where this piece goes in the fprs. */
5232 r = gen_rtx_REG (fmode, cum->fregno);
5233 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5235 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5237 else if (align_words < GP_ARG_NUM_REG)
5239 if (TARGET_32BIT && TARGET_POWERPC64)
5240 return rs6000_mixed_function_arg (mode, type, align_words);
5242 if (mode == BLKmode)
5245 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5252 /* For an arg passed partly in registers and partly in memory, this is
5253 the number of bytes passed in registers. For args passed entirely in
5254 registers or entirely in memory, zero. When an arg is described by a
5255 PARALLEL, perhaps using more than one register type, this function
5256 returns the number of bytes used by the first element of the PARALLEL. */
5259 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5260 tree type, bool named)
5265 if (DEFAULT_ABI == ABI_V4)
5268 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5269 && cum->nargs_prototype >= 0)
5272 /* In this complicated case we just disable the partial_nregs code. */
5273 if (rs6000_darwin64_abi && mode == BLKmode
5274 && TREE_CODE (type) == RECORD_TYPE
5275 && int_size_in_bytes (type) > 0)
5278 align_words = rs6000_parm_start (mode, type, cum->words);
5280 if (USE_FP_FOR_ARG_P (cum, mode, type)
5281 /* If we are passing this arg in the fixed parameter save area
5282 (gprs or memory) as well as fprs, then this function should
5283 return the number of bytes passed in the parameter save area
5284 rather than bytes passed in fprs. */
5286 && (cum->nargs_prototype <= 0
5287 || (DEFAULT_ABI == ABI_AIX
5289 && align_words >= GP_ARG_NUM_REG))))
5291 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5292 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5293 else if (cum->nargs_prototype >= 0)
5297 if (align_words < GP_ARG_NUM_REG
5298 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5299 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5301 if (ret != 0 && TARGET_DEBUG_ARG)
5302 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5307 /* A C expression that indicates when an argument must be passed by
5308 reference. If nonzero for an argument, a copy of that argument is
5309 made in memory and a pointer to the argument is passed instead of
5310 the argument itself. The pointer is passed in whatever way is
5311 appropriate for passing a pointer to that type.
5313 Under V.4, aggregates and long double are passed by reference.
5315 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5316 reference unless the AltiVec vector extension ABI is in force.
5318 As an extension to all ABIs, variable sized types are passed by
5322 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5323 enum machine_mode mode, tree type,
5324 bool named ATTRIBUTE_UNUSED)
5326 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
5328 if (TARGET_DEBUG_ARG)
5329 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5336 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5338 if (TARGET_DEBUG_ARG)
5339 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5343 if (int_size_in_bytes (type) < 0)
5345 if (TARGET_DEBUG_ARG)
5346 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5350 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5351 modes only exist for GCC vector types if -maltivec. */
5352 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5354 if (TARGET_DEBUG_ARG)
5355 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5359 /* Pass synthetic vectors in memory. */
5360 if (TREE_CODE (type) == VECTOR_TYPE
5361 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5363 static bool warned_for_pass_big_vectors = false;
5364 if (TARGET_DEBUG_ARG)
5365 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5366 if (!warned_for_pass_big_vectors)
5368 warning (0, "GCC vector passed by reference: "
5369 "non-standard ABI extension with no compatibility guarantee");
5370 warned_for_pass_big_vectors = true;
5379 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5382 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5387 for (i = 0; i < nregs; i++)
5389 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5390 if (reload_completed)
5392 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5395 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5396 i * GET_MODE_SIZE (reg_mode));
5399 tem = replace_equiv_address (tem, XEXP (tem, 0));
5403 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5407 /* Perform any needed actions needed for a function that is receiving a
5408 variable number of arguments.
5412 MODE and TYPE are the mode and type of the current parameter.
5414 PRETEND_SIZE is a variable that should be set to the amount of stack
5415 that must be pushed by the prolog to pretend that our caller pushed
5418 Normally, this macro will push all remaining incoming registers on the
5419 stack and set PRETEND_SIZE to the length of the registers pushed. */
5422 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5423 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5426 CUMULATIVE_ARGS next_cum;
5427 int reg_size = TARGET_32BIT ? 4 : 8;
5428 rtx save_area = NULL_RTX, mem;
5429 int first_reg_offset, set;
5431 /* Skip the last named argument. */
5433 function_arg_advance (&next_cum, mode, type, 1, 0);
5435 if (DEFAULT_ABI == ABI_V4)
5437 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5441 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5442 HOST_WIDE_INT offset = 0;
5444 /* Try to optimize the size of the varargs save area.
5445 The ABI requires that ap.reg_save_area is doubleword
5446 aligned, but we don't need to allocate space for all
5447 the bytes, only those to which we actually will save
5449 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5450 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5451 if (TARGET_HARD_FLOAT && TARGET_FPRS
5452 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5453 && cfun->va_list_fpr_size)
5456 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5457 * UNITS_PER_FP_WORD;
5458 if (cfun->va_list_fpr_size
5459 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5460 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5462 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5463 * UNITS_PER_FP_WORD;
5467 offset = -((first_reg_offset * reg_size) & ~7);
5468 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5470 gpr_reg_num = cfun->va_list_gpr_size;
5471 if (reg_size == 4 && (first_reg_offset & 1))
5474 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5477 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5479 - (int) (GP_ARG_NUM_REG * reg_size);
5481 if (gpr_size + fpr_size)
5484 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5485 gcc_assert (GET_CODE (reg_save_area) == MEM);
5486 reg_save_area = XEXP (reg_save_area, 0);
5487 if (GET_CODE (reg_save_area) == PLUS)
5489 gcc_assert (XEXP (reg_save_area, 0)
5490 == virtual_stack_vars_rtx);
5491 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5492 offset += INTVAL (XEXP (reg_save_area, 1));
5495 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5498 cfun->machine->varargs_save_offset = offset;
5499 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5504 first_reg_offset = next_cum.words;
5505 save_area = virtual_incoming_args_rtx;
5507 if (targetm.calls.must_pass_in_stack (mode, type))
5508 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5511 set = get_varargs_alias_set ();
5512 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5513 && cfun->va_list_gpr_size)
5515 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5517 if (va_list_gpr_counter_field)
5519 /* V4 va_list_gpr_size counts number of registers needed. */
5520 if (nregs > cfun->va_list_gpr_size)
5521 nregs = cfun->va_list_gpr_size;
5525 /* char * va_list instead counts number of bytes needed. */
5526 if (nregs > cfun->va_list_gpr_size / reg_size)
5527 nregs = cfun->va_list_gpr_size / reg_size;
5530 mem = gen_rtx_MEM (BLKmode,
5531 plus_constant (save_area,
5532 first_reg_offset * reg_size));
5533 MEM_NOTRAP_P (mem) = 1;
5534 set_mem_alias_set (mem, set);
5535 set_mem_align (mem, BITS_PER_WORD);
5537 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5541 /* Save FP registers if needed. */
5542 if (DEFAULT_ABI == ABI_V4
5543 && TARGET_HARD_FLOAT && TARGET_FPRS
5545 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5546 && cfun->va_list_fpr_size)
5548 int fregno = next_cum.fregno, nregs;
5549 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5550 rtx lab = gen_label_rtx ();
5551 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5552 * UNITS_PER_FP_WORD);
5555 (gen_rtx_SET (VOIDmode,
5557 gen_rtx_IF_THEN_ELSE (VOIDmode,
5558 gen_rtx_NE (VOIDmode, cr1,
5560 gen_rtx_LABEL_REF (VOIDmode, lab),
5564 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5565 fregno++, off += UNITS_PER_FP_WORD, nregs++)
5567 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5568 MEM_NOTRAP_P (mem) = 1;
5569 set_mem_alias_set (mem, set);
5570 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5571 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5578 /* Create the va_list data type. */
5581 rs6000_build_builtin_va_list (void)
5583 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5585 /* For AIX, prefer 'char *' because that's what the system
5586 header files like. */
5587 if (DEFAULT_ABI != ABI_V4)
5588 return build_pointer_type (char_type_node);
5590 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5591 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5593 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5594 unsigned_char_type_node);
5595 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5596 unsigned_char_type_node);
5597 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5599 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5600 short_unsigned_type_node);
5601 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5603 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5606 va_list_gpr_counter_field = f_gpr;
5607 va_list_fpr_counter_field = f_fpr;
5609 DECL_FIELD_CONTEXT (f_gpr) = record;
5610 DECL_FIELD_CONTEXT (f_fpr) = record;
5611 DECL_FIELD_CONTEXT (f_res) = record;
5612 DECL_FIELD_CONTEXT (f_ovf) = record;
5613 DECL_FIELD_CONTEXT (f_sav) = record;
5615 TREE_CHAIN (record) = type_decl;
5616 TYPE_NAME (record) = type_decl;
5617 TYPE_FIELDS (record) = f_gpr;
5618 TREE_CHAIN (f_gpr) = f_fpr;
5619 TREE_CHAIN (f_fpr) = f_res;
5620 TREE_CHAIN (f_res) = f_ovf;
5621 TREE_CHAIN (f_ovf) = f_sav;
5623 layout_type (record);
5625 /* The correct type is an array type of one element. */
5626 return build_array_type (record, build_index_type (size_zero_node));
5629 /* Implement va_start. */
5632 rs6000_va_start (tree valist, rtx nextarg)
5634 HOST_WIDE_INT words, n_gpr, n_fpr;
5635 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5636 tree gpr, fpr, ovf, sav, t;
5638 /* Only SVR4 needs something special. */
5639 if (DEFAULT_ABI != ABI_V4)
5641 std_expand_builtin_va_start (valist, nextarg);
5645 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5646 f_fpr = TREE_CHAIN (f_gpr);
5647 f_res = TREE_CHAIN (f_fpr);
5648 f_ovf = TREE_CHAIN (f_res);
5649 f_sav = TREE_CHAIN (f_ovf);
5651 valist = build_va_arg_indirect_ref (valist);
5652 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5653 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5654 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5655 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5657 /* Count number of gp and fp argument registers used. */
5658 words = current_function_args_info.words;
5659 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5661 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5664 if (TARGET_DEBUG_ARG)
5665 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5666 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5667 words, n_gpr, n_fpr);
5669 if (cfun->va_list_gpr_size)
5671 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5672 build_int_cst (NULL_TREE, n_gpr));
5673 TREE_SIDE_EFFECTS (t) = 1;
5674 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5677 if (cfun->va_list_fpr_size)
5679 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5680 build_int_cst (NULL_TREE, n_fpr));
5681 TREE_SIDE_EFFECTS (t) = 1;
5682 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5685 /* Find the overflow area. */
5686 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5688 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5689 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5690 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5691 TREE_SIDE_EFFECTS (t) = 1;
5692 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5694 /* If there were no va_arg invocations, don't set up the register
5696 if (!cfun->va_list_gpr_size
5697 && !cfun->va_list_fpr_size
5698 && n_gpr < GP_ARG_NUM_REG
5699 && n_fpr < FP_ARG_V4_MAX_REG)
5702 /* Find the register save area. */
5703 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5704 if (cfun->machine->varargs_save_offset)
5705 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5706 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5707 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5708 TREE_SIDE_EFFECTS (t) = 1;
5709 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5712 /* Implement va_arg. */
5715 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5717 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5718 tree gpr, fpr, ovf, sav, reg, t, u;
5719 int size, rsize, n_reg, sav_ofs, sav_scale;
5720 tree lab_false, lab_over, addr;
5722 tree ptrtype = build_pointer_type (type);
5724 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5726 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5727 return build_va_arg_indirect_ref (t);
5730 if (DEFAULT_ABI != ABI_V4)
5732 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5734 tree elem_type = TREE_TYPE (type);
5735 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5736 int elem_size = GET_MODE_SIZE (elem_mode);
5738 if (elem_size < UNITS_PER_WORD)
5740 tree real_part, imag_part;
5741 tree post = NULL_TREE;
5743 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5745 /* Copy the value into a temporary, lest the formal temporary
5746 be reused out from under us. */
5747 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5748 append_to_statement_list (post, pre_p);
5750 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5753 return build (COMPLEX_EXPR, type, real_part, imag_part);
5757 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5760 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5761 f_fpr = TREE_CHAIN (f_gpr);
5762 f_res = TREE_CHAIN (f_fpr);
5763 f_ovf = TREE_CHAIN (f_res);
5764 f_sav = TREE_CHAIN (f_ovf);
5766 valist = build_va_arg_indirect_ref (valist);
5767 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5768 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5769 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5770 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5772 size = int_size_in_bytes (type);
5773 rsize = (size + 3) / 4;
5776 if (TARGET_HARD_FLOAT && TARGET_FPRS
5777 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5779 /* FP args go in FP registers, if present. */
5784 if (TYPE_MODE (type) == DFmode)
5789 /* Otherwise into GP registers. */
5798 /* Pull the value out of the saved registers.... */
5801 addr = create_tmp_var (ptr_type_node, "addr");
5802 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5804 /* AltiVec vectors never go in registers when -mabi=altivec. */
5805 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5809 lab_false = create_artificial_label ();
5810 lab_over = create_artificial_label ();
5812 /* Long long and SPE vectors are aligned in the registers.
5813 As are any other 2 gpr item such as complex int due to a
5814 historical mistake. */
5818 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5819 size_int (n_reg - 1));
5820 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5823 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5824 t = build2 (GE_EXPR, boolean_type_node, u, t);
5825 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5826 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5827 gimplify_and_add (t, pre_p);
5831 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5833 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5834 u = build1 (CONVERT_EXPR, integer_type_node, u);
5835 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5836 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5838 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5839 gimplify_and_add (t, pre_p);
5841 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5842 gimplify_and_add (t, pre_p);
5844 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5845 append_to_statement_list (t, pre_p);
5849 /* Ensure that we don't find any more args in regs.
5850 Alignment has taken care of the n_reg == 2 case. */
5851 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5852 gimplify_and_add (t, pre_p);
5856 /* ... otherwise out of the overflow area. */
5858 /* Care for on-stack alignment if needed. */
5862 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5863 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5864 build_int_cst (NULL_TREE, -align));
5866 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5868 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5869 gimplify_and_add (u, pre_p);
5871 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5872 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5873 gimplify_and_add (t, pre_p);
5877 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5878 append_to_statement_list (t, pre_p);
5881 addr = fold_convert (ptrtype, addr);
5882 return build_va_arg_indirect_ref (addr);
5888 def_builtin (int mask, const char *name, tree type, int code)
5890 if (mask & target_flags)
5892 if (rs6000_builtin_decls[code])
5895 rs6000_builtin_decls[code] =
5896 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5901 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5903 static const struct builtin_description bdesc_3arg[] =
5905 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5906 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5907 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5908 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5909 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5910 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5911 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5912 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5913 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5914 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5915 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5916 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5918 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5919 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5920 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5921 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5922 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5923 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5924 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5925 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5926 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5927 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5929 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5930 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5931 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5932 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5933 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5934 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5935 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5936 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5937 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5938 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5939 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5940 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5941 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5942 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5943 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
5946 /* DST operations: void foo (void *, const int, const char). */
5948 static const struct builtin_description bdesc_dst[] =
5950 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5951 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5952 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5953 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5955 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5956 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5957 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5958 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
5961 /* Simple binary operations: VECc = foo (VECa, VECb). */
5963 static struct builtin_description bdesc_2arg[] =
5965 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5966 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5967 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5968 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5969 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5970 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5971 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5972 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5973 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5974 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5975 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5976 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5977 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5982 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5983 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5984 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5985 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5986 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5988 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5989 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5990 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5991 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5992 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5993 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5994 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5995 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5996 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5997 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5999 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6000 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
6001 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6002 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
6003 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6004 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6005 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6006 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6007 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
6008 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6009 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6010 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6011 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6012 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6013 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
6014 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6015 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6016 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6017 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6018 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6019 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6020 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
6021 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6022 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6023 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6024 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6025 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6026 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6027 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6028 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
6029 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
6030 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
6031 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6032 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6033 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6034 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6035 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6036 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6037 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6039 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6040 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6041 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6042 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6043 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6044 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6045 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6046 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6049 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6051 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6052 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6053 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6054 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6055 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6056 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6057 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6058 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6059 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6060 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6061 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6062 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6063 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6064 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6065 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6066 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6067 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6068 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6069 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6070 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6071 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6072 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6073 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6074 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6075 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6205 /* Place holder, leave as first spe builtin. */
6206 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6207 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6208 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6209 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6210 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6211 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6212 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6213 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6214 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6215 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6216 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6217 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6218 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6219 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6220 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6221 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6222 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6223 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6224 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6225 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6226 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6227 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6228 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6229 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6230 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6231 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6232 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6233 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6234 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6235 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6236 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6237 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6238 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6239 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6240 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6241 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6242 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6243 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6244 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6245 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6246 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6247 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6248 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6249 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6250 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6251 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6252 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6253 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6254 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6255 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6256 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6257 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6258 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6259 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6260 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6261 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6262 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6263 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6264 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6265 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6266 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6267 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6268 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6269 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6270 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6271 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6272 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6273 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6274 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6275 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6276 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6277 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6278 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6279 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6280 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6281 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6282 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6283 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6284 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6285 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6286 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6287 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6288 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6289 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6290 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6291 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6292 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6293 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6294 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6295 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6296 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6297 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6298 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6299 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6300 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6301 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6302 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6303 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6304 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6305 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6306 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6307 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6308 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6309 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6310 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6311 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6312 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6313 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6314 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6316 /* SPE binary operations expecting a 5-bit unsigned literal. */
6317 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6319 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6320 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6321 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6322 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6323 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6324 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6325 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6326 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6327 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6328 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6329 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6330 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6331 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6332 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6333 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6334 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6335 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6336 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6337 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6338 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6339 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6340 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6341 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6342 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6343 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6344 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6346 /* Place-holder. Leave as last binary SPE builtin. */
6347 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6350 /* AltiVec predicates. */
6352 struct builtin_description_predicates
6354 const unsigned int mask;
6355 const enum insn_code icode;
6357 const char *const name;
6358 const enum rs6000_builtins code;
6361 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6363 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6364 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6365 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6366 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6367 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6368 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6369 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6370 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6371 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6372 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6373 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6374 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6375 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6377 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6378 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6379 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6382 /* SPE predicates. */
6383 static struct builtin_description bdesc_spe_predicates[] =
6385 /* Place-holder. Leave as first. */
6386 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6387 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6388 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6389 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6390 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6391 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6392 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6393 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6394 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6395 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6396 /* Place-holder. Leave as last. */
6397 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6400 /* SPE evsel predicates. */
6401 static struct builtin_description bdesc_spe_evsel[] =
6403 /* Place-holder. Leave as first. */
6404 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6405 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6406 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6407 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6408 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6409 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6410 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6411 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6412 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6413 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6414 /* Place-holder. Leave as last. */
6415 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6418 /* ABS* operations. */
6420 static const struct builtin_description bdesc_abs[] =
6422 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6423 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6424 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6425 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6426 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6427 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6428 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6431 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6434 static struct builtin_description bdesc_1arg[] =
6436 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6437 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6438 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6439 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6440 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6441 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6442 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6443 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6444 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6445 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6446 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6447 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6448 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6449 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6450 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6451 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6452 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6474 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6475 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6476 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6477 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6478 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6479 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6480 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6481 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6482 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6483 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6484 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6485 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6486 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6487 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6488 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6489 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6490 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6491 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6492 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6493 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6494 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6495 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6496 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6497 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6498 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6499 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6500 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6501 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6502 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6503 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6505 /* Place-holder. Leave as last unary SPE builtin. */
6506 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6510 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6513 tree arg0 = TREE_VALUE (arglist);
6514 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6515 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6516 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6518 if (icode == CODE_FOR_nothing)
6519 /* Builtin not supported on this processor. */
6522 /* If we got invalid arguments bail out before generating bad rtl. */
6523 if (arg0 == error_mark_node)
6526 if (icode == CODE_FOR_altivec_vspltisb
6527 || icode == CODE_FOR_altivec_vspltish
6528 || icode == CODE_FOR_altivec_vspltisw
6529 || icode == CODE_FOR_spe_evsplatfi
6530 || icode == CODE_FOR_spe_evsplati)
6532 /* Only allow 5-bit *signed* literals. */
6533 if (GET_CODE (op0) != CONST_INT
6534 || INTVAL (op0) > 15
6535 || INTVAL (op0) < -16)
6537 error ("argument 1 must be a 5-bit signed literal");
6543 || GET_MODE (target) != tmode
6544 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6545 target = gen_reg_rtx (tmode);
6547 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6548 op0 = copy_to_mode_reg (mode0, op0);
6550 pat = GEN_FCN (icode) (target, op0);
6559 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6561 rtx pat, scratch1, scratch2;
6562 tree arg0 = TREE_VALUE (arglist);
6563 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6564 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6565 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6567 /* If we have invalid arguments, bail out before generating bad rtl. */
6568 if (arg0 == error_mark_node)
6572 || GET_MODE (target) != tmode
6573 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6574 target = gen_reg_rtx (tmode);
6576 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6577 op0 = copy_to_mode_reg (mode0, op0);
6579 scratch1 = gen_reg_rtx (mode0);
6580 scratch2 = gen_reg_rtx (mode0);
6582 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6591 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6594 tree arg0 = TREE_VALUE (arglist);
6595 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6596 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6597 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6598 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6599 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6600 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6602 if (icode == CODE_FOR_nothing)
6603 /* Builtin not supported on this processor. */
6606 /* If we got invalid arguments bail out before generating bad rtl. */
6607 if (arg0 == error_mark_node || arg1 == error_mark_node)
6610 if (icode == CODE_FOR_altivec_vcfux
6611 || icode == CODE_FOR_altivec_vcfsx
6612 || icode == CODE_FOR_altivec_vctsxs
6613 || icode == CODE_FOR_altivec_vctuxs
6614 || icode == CODE_FOR_altivec_vspltb
6615 || icode == CODE_FOR_altivec_vsplth
6616 || icode == CODE_FOR_altivec_vspltw
6617 || icode == CODE_FOR_spe_evaddiw
6618 || icode == CODE_FOR_spe_evldd
6619 || icode == CODE_FOR_spe_evldh
6620 || icode == CODE_FOR_spe_evldw
6621 || icode == CODE_FOR_spe_evlhhesplat
6622 || icode == CODE_FOR_spe_evlhhossplat
6623 || icode == CODE_FOR_spe_evlhhousplat
6624 || icode == CODE_FOR_spe_evlwhe
6625 || icode == CODE_FOR_spe_evlwhos
6626 || icode == CODE_FOR_spe_evlwhou
6627 || icode == CODE_FOR_spe_evlwhsplat
6628 || icode == CODE_FOR_spe_evlwwsplat
6629 || icode == CODE_FOR_spe_evrlwi
6630 || icode == CODE_FOR_spe_evslwi
6631 || icode == CODE_FOR_spe_evsrwis
6632 || icode == CODE_FOR_spe_evsubifw
6633 || icode == CODE_FOR_spe_evsrwiu)
6635 /* Only allow 5-bit unsigned literals. */
6637 if (TREE_CODE (arg1) != INTEGER_CST
6638 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6640 error ("argument 2 must be a 5-bit unsigned literal");
6646 || GET_MODE (target) != tmode
6647 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6648 target = gen_reg_rtx (tmode);
6650 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6651 op0 = copy_to_mode_reg (mode0, op0);
6652 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6653 op1 = copy_to_mode_reg (mode1, op1);
6655 pat = GEN_FCN (icode) (target, op0, op1);
6664 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6665 tree arglist, rtx target)
6668 tree cr6_form = TREE_VALUE (arglist);
6669 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6670 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6671 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6672 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6673 enum machine_mode tmode = SImode;
6674 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6675 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6678 if (TREE_CODE (cr6_form) != INTEGER_CST)
6680 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6684 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6686 gcc_assert (mode0 == mode1);
6688 /* If we have invalid arguments, bail out before generating bad rtl. */
6689 if (arg0 == error_mark_node || arg1 == error_mark_node)
6693 || GET_MODE (target) != tmode
6694 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6695 target = gen_reg_rtx (tmode);
6697 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6698 op0 = copy_to_mode_reg (mode0, op0);
6699 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6700 op1 = copy_to_mode_reg (mode1, op1);
6702 scratch = gen_reg_rtx (mode0);
6704 pat = GEN_FCN (icode) (scratch, op0, op1,
6705 gen_rtx_SYMBOL_REF (Pmode, opcode));
6710 /* The vec_any* and vec_all* predicates use the same opcodes for two
6711 different operations, but the bits in CR6 will be different
6712 depending on what information we want. So we have to play tricks
6713 with CR6 to get the right bits out.
6715 If you think this is disgusting, look at the specs for the
6716 AltiVec predicates. */
6718 switch (cr6_form_int)
6721 emit_insn (gen_cr6_test_for_zero (target));
6724 emit_insn (gen_cr6_test_for_zero_reverse (target));
6727 emit_insn (gen_cr6_test_for_lt (target));
6730 emit_insn (gen_cr6_test_for_lt_reverse (target));
6733 error ("argument 1 of __builtin_altivec_predicate is out of range");
6741 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6744 tree arg0 = TREE_VALUE (arglist);
6745 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6746 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6747 enum machine_mode mode0 = Pmode;
6748 enum machine_mode mode1 = Pmode;
6749 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6750 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6752 if (icode == CODE_FOR_nothing)
6753 /* Builtin not supported on this processor. */
6756 /* If we got invalid arguments bail out before generating bad rtl. */
6757 if (arg0 == error_mark_node || arg1 == error_mark_node)
6761 || GET_MODE (target) != tmode
6762 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6763 target = gen_reg_rtx (tmode);
6765 op1 = copy_to_mode_reg (mode1, op1);
6767 if (op0 == const0_rtx)
6769 addr = gen_rtx_MEM (tmode, op1);
6773 op0 = copy_to_mode_reg (mode0, op0);
6774 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6777 pat = GEN_FCN (icode) (target, addr);
6787 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6789 tree arg0 = TREE_VALUE (arglist);
6790 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6791 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6792 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6793 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6794 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6796 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6797 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6798 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6800 /* Invalid arguments. Bail before doing anything stoopid! */
6801 if (arg0 == error_mark_node
6802 || arg1 == error_mark_node
6803 || arg2 == error_mark_node)
6806 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6807 op0 = copy_to_mode_reg (mode2, op0);
6808 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6809 op1 = copy_to_mode_reg (mode0, op1);
6810 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6811 op2 = copy_to_mode_reg (mode1, op2);
6813 pat = GEN_FCN (icode) (op1, op2, op0);
6820 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6822 tree arg0 = TREE_VALUE (arglist);
6823 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6824 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6825 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6826 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6827 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6829 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6830 enum machine_mode mode1 = Pmode;
6831 enum machine_mode mode2 = Pmode;
6833 /* Invalid arguments. Bail before doing anything stoopid! */
6834 if (arg0 == error_mark_node
6835 || arg1 == error_mark_node
6836 || arg2 == error_mark_node)
6839 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6840 op0 = copy_to_mode_reg (tmode, op0);
6842 op2 = copy_to_mode_reg (mode2, op2);
6844 if (op1 == const0_rtx)
6846 addr = gen_rtx_MEM (tmode, op2);
6850 op1 = copy_to_mode_reg (mode1, op1);
6851 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6854 pat = GEN_FCN (icode) (addr, op0);
6861 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6864 tree arg0 = TREE_VALUE (arglist);
6865 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6866 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6867 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6868 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6869 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6870 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6871 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6872 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6873 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6875 if (icode == CODE_FOR_nothing)
6876 /* Builtin not supported on this processor. */
6879 /* If we got invalid arguments bail out before generating bad rtl. */
6880 if (arg0 == error_mark_node
6881 || arg1 == error_mark_node
6882 || arg2 == error_mark_node)
6885 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6886 || icode == CODE_FOR_altivec_vsldoi_v4si
6887 || icode == CODE_FOR_altivec_vsldoi_v8hi
6888 || icode == CODE_FOR_altivec_vsldoi_v16qi)
6890 /* Only allow 4-bit unsigned literals. */
6892 if (TREE_CODE (arg2) != INTEGER_CST
6893 || TREE_INT_CST_LOW (arg2) & ~0xf)
6895 error ("argument 3 must be a 4-bit unsigned literal");
6901 || GET_MODE (target) != tmode
6902 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6903 target = gen_reg_rtx (tmode);
6905 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6906 op0 = copy_to_mode_reg (mode0, op0);
6907 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6908 op1 = copy_to_mode_reg (mode1, op1);
6909 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6910 op2 = copy_to_mode_reg (mode2, op2);
6912 pat = GEN_FCN (icode) (target, op0, op1, op2);
6920 /* Expand the lvx builtins. */
6922 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6924 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6925 tree arglist = TREE_OPERAND (exp, 1);
6926 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6928 enum machine_mode tmode, mode0;
6930 enum insn_code icode;
6934 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6935 icode = CODE_FOR_altivec_lvx_v16qi;
6937 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6938 icode = CODE_FOR_altivec_lvx_v8hi;
6940 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6941 icode = CODE_FOR_altivec_lvx_v4si;
6943 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6944 icode = CODE_FOR_altivec_lvx_v4sf;
6953 arg0 = TREE_VALUE (arglist);
6954 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6955 tmode = insn_data[icode].operand[0].mode;
6956 mode0 = insn_data[icode].operand[1].mode;
6959 || GET_MODE (target) != tmode
6960 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6961 target = gen_reg_rtx (tmode);
6963 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6964 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6966 pat = GEN_FCN (icode) (target, op0);
6973 /* Expand the stvx builtins. */
6975 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6978 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6979 tree arglist = TREE_OPERAND (exp, 1);
6980 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6982 enum machine_mode mode0, mode1;
6984 enum insn_code icode;
6988 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6989 icode = CODE_FOR_altivec_stvx_v16qi;
6991 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6992 icode = CODE_FOR_altivec_stvx_v8hi;
6994 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6995 icode = CODE_FOR_altivec_stvx_v4si;
6997 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6998 icode = CODE_FOR_altivec_stvx_v4sf;
7005 arg0 = TREE_VALUE (arglist);
7006 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7007 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7008 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7009 mode0 = insn_data[icode].operand[0].mode;
7010 mode1 = insn_data[icode].operand[1].mode;
7012 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7013 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7014 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7015 op1 = copy_to_mode_reg (mode1, op1);
7017 pat = GEN_FCN (icode) (op0, op1);
7025 /* Expand the dst builtins. */
7027 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7030 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7031 tree arglist = TREE_OPERAND (exp, 1);
7032 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7033 tree arg0, arg1, arg2;
7034 enum machine_mode mode0, mode1, mode2;
7035 rtx pat, op0, op1, op2;
7036 struct builtin_description *d;
7041 /* Handle DST variants. */
7042 d = (struct builtin_description *) bdesc_dst;
7043 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7044 if (d->code == fcode)
7046 arg0 = TREE_VALUE (arglist);
7047 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7048 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7049 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7050 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7051 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7052 mode0 = insn_data[d->icode].operand[0].mode;
7053 mode1 = insn_data[d->icode].operand[1].mode;
7054 mode2 = insn_data[d->icode].operand[2].mode;
7056 /* Invalid arguments, bail out before generating bad rtl. */
7057 if (arg0 == error_mark_node
7058 || arg1 == error_mark_node
7059 || arg2 == error_mark_node)
7064 if (TREE_CODE (arg2) != INTEGER_CST
7065 || TREE_INT_CST_LOW (arg2) & ~0x3)
7067 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7071 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7072 op0 = copy_to_mode_reg (Pmode, op0);
7073 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7074 op1 = copy_to_mode_reg (mode1, op1);
7076 pat = GEN_FCN (d->icode) (op0, op1, op2);
7086 /* Expand vec_init builtin. */
7088 altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7090 enum machine_mode tmode = TYPE_MODE (type);
7091 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7092 int i, n_elt = GET_MODE_NUNITS (tmode);
7093 rtvec v = rtvec_alloc (n_elt);
7095 gcc_assert (VECTOR_MODE_P (tmode));
7097 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7099 rtx x = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7100 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7103 gcc_assert (arglist == NULL);
7105 if (!target || !register_operand (target, tmode))
7106 target = gen_reg_rtx (tmode);
7108 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7112 /* Return the integer constant in ARG. Constrain it to be in the range
7113 of the subparts of VEC_TYPE; issue an error if not. */
7116 get_element_number (tree vec_type, tree arg)
7118 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7120 if (!host_integerp (arg, 1)
7121 || (elt = tree_low_cst (arg, 1), elt > max))
7123 error ("selector must be an integer constant in the range 0..%wi", max);
7130 /* Expand vec_set builtin. */
7132 altivec_expand_vec_set_builtin (tree arglist)
7134 enum machine_mode tmode, mode1;
7135 tree arg0, arg1, arg2;
7139 arg0 = TREE_VALUE (arglist);
7140 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7141 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7143 tmode = TYPE_MODE (TREE_TYPE (arg0));
7144 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7145 gcc_assert (VECTOR_MODE_P (tmode));
7147 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7148 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7149 elt = get_element_number (TREE_TYPE (arg0), arg2);
7151 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7152 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7154 op0 = force_reg (tmode, op0);
7155 op1 = force_reg (mode1, op1);
7157 rs6000_expand_vector_set (op0, op1, elt);
7162 /* Expand vec_ext builtin. */
7164 altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7166 enum machine_mode tmode, mode0;
7171 arg0 = TREE_VALUE (arglist);
7172 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7174 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7175 elt = get_element_number (TREE_TYPE (arg0), arg1);
7177 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7178 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7179 gcc_assert (VECTOR_MODE_P (mode0));
7181 op0 = force_reg (mode0, op0);
7183 if (optimize || !target || !register_operand (target, tmode))
7184 target = gen_reg_rtx (tmode);
7186 rs6000_expand_vector_extract (target, op0, elt);
7191 /* Expand the builtin in EXP and store the result in TARGET. Store
7192 true in *EXPANDEDP if we found a builtin to expand. */
7194 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7196 struct builtin_description *d;
7197 struct builtin_description_predicates *dp;
7199 enum insn_code icode;
7200 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7201 tree arglist = TREE_OPERAND (exp, 1);
7204 enum machine_mode tmode, mode0;
7205 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7207 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7208 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7211 error ("unresolved overload for Altivec builtin %qF", fndecl);
7215 target = altivec_expand_ld_builtin (exp, target, expandedp);
7219 target = altivec_expand_st_builtin (exp, target, expandedp);
7223 target = altivec_expand_dst_builtin (exp, target, expandedp);
7231 case ALTIVEC_BUILTIN_STVX:
7232 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7233 case ALTIVEC_BUILTIN_STVEBX:
7234 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7235 case ALTIVEC_BUILTIN_STVEHX:
7236 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7237 case ALTIVEC_BUILTIN_STVEWX:
7238 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7239 case ALTIVEC_BUILTIN_STVXL:
7240 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7242 case ALTIVEC_BUILTIN_MFVSCR:
7243 icode = CODE_FOR_altivec_mfvscr;
7244 tmode = insn_data[icode].operand[0].mode;
7247 || GET_MODE (target) != tmode
7248 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7249 target = gen_reg_rtx (tmode);
7251 pat = GEN_FCN (icode) (target);
7257 case ALTIVEC_BUILTIN_MTVSCR:
7258 icode = CODE_FOR_altivec_mtvscr;
7259 arg0 = TREE_VALUE (arglist);
7260 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7261 mode0 = insn_data[icode].operand[0].mode;
7263 /* If we got invalid arguments bail out before generating bad rtl. */
7264 if (arg0 == error_mark_node)
7267 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7268 op0 = copy_to_mode_reg (mode0, op0);
7270 pat = GEN_FCN (icode) (op0);
7275 case ALTIVEC_BUILTIN_DSSALL:
7276 emit_insn (gen_altivec_dssall ());
7279 case ALTIVEC_BUILTIN_DSS:
7280 icode = CODE_FOR_altivec_dss;
7281 arg0 = TREE_VALUE (arglist);
7283 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7284 mode0 = insn_data[icode].operand[0].mode;
7286 /* If we got invalid arguments bail out before generating bad rtl. */
7287 if (arg0 == error_mark_node)
7290 if (TREE_CODE (arg0) != INTEGER_CST
7291 || TREE_INT_CST_LOW (arg0) & ~0x3)
7293 error ("argument to dss must be a 2-bit unsigned literal");
7297 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7298 op0 = copy_to_mode_reg (mode0, op0);
7300 emit_insn (gen_altivec_dss (op0));
7303 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7304 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7305 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7306 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7307 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7309 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7310 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7311 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7312 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7313 return altivec_expand_vec_set_builtin (arglist);
7315 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7316 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7317 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7318 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7319 return altivec_expand_vec_ext_builtin (arglist, target);
7326 /* Expand abs* operations. */
7327 d = (struct builtin_description *) bdesc_abs;
7328 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7329 if (d->code == fcode)
7330 return altivec_expand_abs_builtin (d->icode, arglist, target);
7332 /* Expand the AltiVec predicates. */
7333 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7334 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7335 if (dp->code == fcode)
7336 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7339 /* LV* are funky. We initialized them differently. */
7342 case ALTIVEC_BUILTIN_LVSL:
7343 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7345 case ALTIVEC_BUILTIN_LVSR:
7346 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7348 case ALTIVEC_BUILTIN_LVEBX:
7349 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7351 case ALTIVEC_BUILTIN_LVEHX:
7352 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7354 case ALTIVEC_BUILTIN_LVEWX:
7355 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7357 case ALTIVEC_BUILTIN_LVXL:
7358 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7360 case ALTIVEC_BUILTIN_LVX:
7361 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7372 /* Binops that need to be initialized manually, but can be expanded
7373 automagically by rs6000_expand_binop_builtin. */
7374 static struct builtin_description bdesc_2arg_spe[] =
7376 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7377 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7378 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7379 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7380 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7381 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7382 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7383 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7384 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7385 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7386 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7387 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7388 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7389 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7390 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7391 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7392 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7393 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7394 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7395 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7396 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7397 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7400 /* Expand the builtin in EXP and store the result in TARGET. Store
7401 true in *EXPANDEDP if we found a builtin to expand.
7403 This expands the SPE builtins that are not simple unary and binary
7406 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7408 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7409 tree arglist = TREE_OPERAND (exp, 1);
7411 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7412 enum insn_code icode;
7413 enum machine_mode tmode, mode0;
7415 struct builtin_description *d;
7420 /* Syntax check for a 5-bit unsigned immediate. */
7423 case SPE_BUILTIN_EVSTDD:
7424 case SPE_BUILTIN_EVSTDH:
7425 case SPE_BUILTIN_EVSTDW:
7426 case SPE_BUILTIN_EVSTWHE:
7427 case SPE_BUILTIN_EVSTWHO:
7428 case SPE_BUILTIN_EVSTWWE:
7429 case SPE_BUILTIN_EVSTWWO:
7430 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7431 if (TREE_CODE (arg1) != INTEGER_CST
7432 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7434 error ("argument 2 must be a 5-bit unsigned literal");
7442 /* The evsplat*i instructions are not quite generic. */
7445 case SPE_BUILTIN_EVSPLATFI:
7446 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7448 case SPE_BUILTIN_EVSPLATI:
7449 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7455 d = (struct builtin_description *) bdesc_2arg_spe;
7456 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7457 if (d->code == fcode)
7458 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7460 d = (struct builtin_description *) bdesc_spe_predicates;
7461 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7462 if (d->code == fcode)
7463 return spe_expand_predicate_builtin (d->icode, arglist, target);
7465 d = (struct builtin_description *) bdesc_spe_evsel;
7466 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7467 if (d->code == fcode)
7468 return spe_expand_evsel_builtin (d->icode, arglist, target);
7472 case SPE_BUILTIN_EVSTDDX:
7473 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7474 case SPE_BUILTIN_EVSTDHX:
7475 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7476 case SPE_BUILTIN_EVSTDWX:
7477 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7478 case SPE_BUILTIN_EVSTWHEX:
7479 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7480 case SPE_BUILTIN_EVSTWHOX:
7481 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7482 case SPE_BUILTIN_EVSTWWEX:
7483 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7484 case SPE_BUILTIN_EVSTWWOX:
7485 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7486 case SPE_BUILTIN_EVSTDD:
7487 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7488 case SPE_BUILTIN_EVSTDH:
7489 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7490 case SPE_BUILTIN_EVSTDW:
7491 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7492 case SPE_BUILTIN_EVSTWHE:
7493 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7494 case SPE_BUILTIN_EVSTWHO:
7495 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7496 case SPE_BUILTIN_EVSTWWE:
7497 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7498 case SPE_BUILTIN_EVSTWWO:
7499 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7500 case SPE_BUILTIN_MFSPEFSCR:
7501 icode = CODE_FOR_spe_mfspefscr;
7502 tmode = insn_data[icode].operand[0].mode;
7505 || GET_MODE (target) != tmode
7506 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7507 target = gen_reg_rtx (tmode);
7509 pat = GEN_FCN (icode) (target);
7514 case SPE_BUILTIN_MTSPEFSCR:
7515 icode = CODE_FOR_spe_mtspefscr;
7516 arg0 = TREE_VALUE (arglist);
7517 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7518 mode0 = insn_data[icode].operand[0].mode;
7520 if (arg0 == error_mark_node)
7523 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7524 op0 = copy_to_mode_reg (mode0, op0);
7526 pat = GEN_FCN (icode) (op0);
7539 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7541 rtx pat, scratch, tmp;
7542 tree form = TREE_VALUE (arglist);
7543 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7544 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7545 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7546 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7547 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7548 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7552 if (TREE_CODE (form) != INTEGER_CST)
7554 error ("argument 1 of __builtin_spe_predicate must be a constant");
7558 form_int = TREE_INT_CST_LOW (form);
7560 gcc_assert (mode0 == mode1);
7562 if (arg0 == error_mark_node || arg1 == error_mark_node)
7566 || GET_MODE (target) != SImode
7567 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7568 target = gen_reg_rtx (SImode);
7570 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7571 op0 = copy_to_mode_reg (mode0, op0);
7572 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7573 op1 = copy_to_mode_reg (mode1, op1);
7575 scratch = gen_reg_rtx (CCmode);
7577 pat = GEN_FCN (icode) (scratch, op0, op1);
7582 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7583 _lower_. We use one compare, but look in different bits of the
7584 CR for each variant.
7586 There are 2 elements in each SPE simd type (upper/lower). The CR
7587 bits are set as follows:
7589 BIT0 | BIT 1 | BIT 2 | BIT 3
7590 U | L | (U | L) | (U & L)
7592 So, for an "all" relationship, BIT 3 would be set.
7593 For an "any" relationship, BIT 2 would be set. Etc.
7595 Following traditional nomenclature, these bits map to:
7597 BIT0 | BIT 1 | BIT 2 | BIT 3
7600 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7605 /* All variant. OV bit. */
7607 /* We need to get to the OV bit, which is the ORDERED bit. We
7608 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7609 that's ugly and will make validate_condition_mode die.
7610 So let's just use another pattern. */
7611 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7613 /* Any variant. EQ bit. */
7617 /* Upper variant. LT bit. */
7621 /* Lower variant. GT bit. */
7626 error ("argument 1 of __builtin_spe_predicate is out of range");
7630 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7631 emit_move_insn (target, tmp);
7636 /* The evsel builtins look like this:
7638 e = __builtin_spe_evsel_OP (a, b, c, d);
7642 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7643 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7647 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7650 tree arg0 = TREE_VALUE (arglist);
7651 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7652 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7653 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7654 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7655 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7656 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7657 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7658 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7659 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7661 gcc_assert (mode0 == mode1);
7663 if (arg0 == error_mark_node || arg1 == error_mark_node
7664 || arg2 == error_mark_node || arg3 == error_mark_node)
7668 || GET_MODE (target) != mode0
7669 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7670 target = gen_reg_rtx (mode0);
7672 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7673 op0 = copy_to_mode_reg (mode0, op0);
7674 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7675 op1 = copy_to_mode_reg (mode0, op1);
7676 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7677 op2 = copy_to_mode_reg (mode0, op2);
7678 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7679 op3 = copy_to_mode_reg (mode0, op3);
7681 /* Generate the compare. */
7682 scratch = gen_reg_rtx (CCmode);
7683 pat = GEN_FCN (icode) (scratch, op0, op1);
7688 if (mode0 == V2SImode)
7689 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7691 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7696 /* Expand an expression EXP that calls a built-in function,
7697 with result going to TARGET if that's convenient
7698 (and in mode MODE if that's convenient).
7699 SUBTARGET may be used as the target for computing one of EXP's operands.
7700 IGNORE is nonzero if the value is to be ignored. */
7703 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7704 enum machine_mode mode ATTRIBUTE_UNUSED,
7705 int ignore ATTRIBUTE_UNUSED)
7707 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7708 tree arglist = TREE_OPERAND (exp, 1);
7709 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7710 struct builtin_description *d;
7715 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7716 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7718 int icode = (int) CODE_FOR_altivec_lvsr;
7719 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7720 enum machine_mode mode = insn_data[icode].operand[1].mode;
7724 gcc_assert (TARGET_ALTIVEC);
7726 arg = TREE_VALUE (arglist);
7727 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7728 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7729 addr = memory_address (mode, op);
7730 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7734 /* For the load case need to negate the address. */
7735 op = gen_reg_rtx (GET_MODE (addr));
7736 emit_insn (gen_rtx_SET (VOIDmode, op,
7737 gen_rtx_NEG (GET_MODE (addr), addr)));
7739 op = gen_rtx_MEM (mode, op);
7742 || GET_MODE (target) != tmode
7743 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7744 target = gen_reg_rtx (tmode);
7746 /*pat = gen_altivec_lvsr (target, op);*/
7747 pat = GEN_FCN (icode) (target, op);
7757 ret = altivec_expand_builtin (exp, target, &success);
7764 ret = spe_expand_builtin (exp, target, &success);
7770 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7772 /* Handle simple unary operations. */
7773 d = (struct builtin_description *) bdesc_1arg;
7774 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7775 if (d->code == fcode)
7776 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7778 /* Handle simple binary operations. */
7779 d = (struct builtin_description *) bdesc_2arg;
7780 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7781 if (d->code == fcode)
7782 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7784 /* Handle simple ternary operations. */
7785 d = (struct builtin_description *) bdesc_3arg;
7786 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7787 if (d->code == fcode)
7788 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7794 build_opaque_vector_type (tree node, int nunits)
7796 node = copy_node (node);
7797 TYPE_MAIN_VARIANT (node) = node;
7798 return build_vector_type (node, nunits);
7802 rs6000_init_builtins (void)
7804 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7805 V2SF_type_node = build_vector_type (float_type_node, 2);
7806 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7807 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7808 V4SF_type_node = build_vector_type (float_type_node, 4);
7809 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7810 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7812 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7813 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7814 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7816 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7817 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7818 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7819 opaque_V4SI_type_node = copy_node (V4SI_type_node);
7821 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7822 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7823 'vector unsigned short'. */
7825 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7826 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7827 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7828 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7830 long_integer_type_internal_node = long_integer_type_node;
7831 long_unsigned_type_internal_node = long_unsigned_type_node;
7832 intQI_type_internal_node = intQI_type_node;
7833 uintQI_type_internal_node = unsigned_intQI_type_node;
7834 intHI_type_internal_node = intHI_type_node;
7835 uintHI_type_internal_node = unsigned_intHI_type_node;
7836 intSI_type_internal_node = intSI_type_node;
7837 uintSI_type_internal_node = unsigned_intSI_type_node;
7838 float_type_internal_node = float_type_node;
7839 void_type_internal_node = void_type_node;
7841 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7842 get_identifier ("__bool char"),
7843 bool_char_type_node));
7844 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7845 get_identifier ("__bool short"),
7846 bool_short_type_node));
7847 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7848 get_identifier ("__bool int"),
7849 bool_int_type_node));
7850 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7851 get_identifier ("__pixel"),
7854 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7855 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7856 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7857 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7859 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7860 get_identifier ("__vector unsigned char"),
7861 unsigned_V16QI_type_node));
7862 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7863 get_identifier ("__vector signed char"),
7865 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7866 get_identifier ("__vector __bool char"),
7867 bool_V16QI_type_node));
7869 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7870 get_identifier ("__vector unsigned short"),
7871 unsigned_V8HI_type_node));
7872 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7873 get_identifier ("__vector signed short"),
7875 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7876 get_identifier ("__vector __bool short"),
7877 bool_V8HI_type_node));
7879 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7880 get_identifier ("__vector unsigned int"),
7881 unsigned_V4SI_type_node));
7882 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7883 get_identifier ("__vector signed int"),
7885 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7886 get_identifier ("__vector __bool int"),
7887 bool_V4SI_type_node));
7889 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7890 get_identifier ("__vector float"),
7892 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7893 get_identifier ("__vector __pixel"),
7894 pixel_V8HI_type_node));
7897 spe_init_builtins ();
7899 altivec_init_builtins ();
7900 if (TARGET_ALTIVEC || TARGET_SPE)
7901 rs6000_common_init_builtins ();
7904 /* Search through a set of builtins and enable the mask bits.
7905 DESC is an array of builtins.
7906 SIZE is the total number of builtins.
7907 START is the builtin enum at which to start.
7908 END is the builtin enum at which to end. */
7910 enable_mask_for_builtins (struct builtin_description *desc, int size,
7911 enum rs6000_builtins start,
7912 enum rs6000_builtins end)
7916 for (i = 0; i < size; ++i)
7917 if (desc[i].code == start)
7923 for (; i < size; ++i)
7925 /* Flip all the bits on. */
7926 desc[i].mask = target_flags;
7927 if (desc[i].code == end)
7933 spe_init_builtins (void)
7935 tree endlink = void_list_node;
7936 tree puint_type_node = build_pointer_type (unsigned_type_node);
7937 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7938 struct builtin_description *d;
7941 tree v2si_ftype_4_v2si
7942 = build_function_type
7943 (opaque_V2SI_type_node,
7944 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7945 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7946 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7947 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7950 tree v2sf_ftype_4_v2sf
7951 = build_function_type
7952 (opaque_V2SF_type_node,
7953 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7954 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7955 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7956 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7959 tree int_ftype_int_v2si_v2si
7960 = build_function_type
7962 tree_cons (NULL_TREE, integer_type_node,
7963 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7964 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7967 tree int_ftype_int_v2sf_v2sf
7968 = build_function_type
7970 tree_cons (NULL_TREE, integer_type_node,
7971 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7972 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7975 tree void_ftype_v2si_puint_int
7976 = build_function_type (void_type_node,
7977 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7978 tree_cons (NULL_TREE, puint_type_node,
7979 tree_cons (NULL_TREE,
7983 tree void_ftype_v2si_puint_char
7984 = build_function_type (void_type_node,
7985 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7986 tree_cons (NULL_TREE, puint_type_node,
7987 tree_cons (NULL_TREE,
7991 tree void_ftype_v2si_pv2si_int
7992 = build_function_type (void_type_node,
7993 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7994 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7995 tree_cons (NULL_TREE,
7999 tree void_ftype_v2si_pv2si_char
8000 = build_function_type (void_type_node,
8001 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8002 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8003 tree_cons (NULL_TREE,
8008 = build_function_type (void_type_node,
8009 tree_cons (NULL_TREE, integer_type_node, endlink));
8012 = build_function_type (integer_type_node, endlink);
8014 tree v2si_ftype_pv2si_int
8015 = build_function_type (opaque_V2SI_type_node,
8016 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8017 tree_cons (NULL_TREE, integer_type_node,
8020 tree v2si_ftype_puint_int
8021 = build_function_type (opaque_V2SI_type_node,
8022 tree_cons (NULL_TREE, puint_type_node,
8023 tree_cons (NULL_TREE, integer_type_node,
8026 tree v2si_ftype_pushort_int
8027 = build_function_type (opaque_V2SI_type_node,
8028 tree_cons (NULL_TREE, pushort_type_node,
8029 tree_cons (NULL_TREE, integer_type_node,
8032 tree v2si_ftype_signed_char
8033 = build_function_type (opaque_V2SI_type_node,
8034 tree_cons (NULL_TREE, signed_char_type_node,
8037 /* The initialization of the simple binary and unary builtins is
8038 done in rs6000_common_init_builtins, but we have to enable the
8039 mask bits here manually because we have run out of `target_flags'
8040 bits. We really need to redesign this mask business. */
8042 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8043 ARRAY_SIZE (bdesc_2arg),
8046 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8047 ARRAY_SIZE (bdesc_1arg),
8049 SPE_BUILTIN_EVSUBFUSIAAW);
8050 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8051 ARRAY_SIZE (bdesc_spe_predicates),
8052 SPE_BUILTIN_EVCMPEQ,
8053 SPE_BUILTIN_EVFSTSTLT);
8054 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8055 ARRAY_SIZE (bdesc_spe_evsel),
8056 SPE_BUILTIN_EVSEL_CMPGTS,
8057 SPE_BUILTIN_EVSEL_FSTSTEQ);
8059 (*lang_hooks.decls.pushdecl)
8060 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8061 opaque_V2SI_type_node));
8063 /* Initialize irregular SPE builtins. */
8065 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8066 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8067 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8068 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8069 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8070 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8071 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8072 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8073 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8074 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8075 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8076 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8077 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8078 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8079 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8080 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8081 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8082 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8085 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8086 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8087 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8088 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8089 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8090 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8091 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8092 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8093 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8094 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8095 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8096 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8097 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8098 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8099 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8100 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8101 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8102 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8103 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8104 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8105 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8106 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8109 d = (struct builtin_description *) bdesc_spe_predicates;
8110 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8114 switch (insn_data[d->icode].operand[1].mode)
8117 type = int_ftype_int_v2si_v2si;
8120 type = int_ftype_int_v2sf_v2sf;
8126 def_builtin (d->mask, d->name, type, d->code);
8129 /* Evsel predicates. */
8130 d = (struct builtin_description *) bdesc_spe_evsel;
8131 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8135 switch (insn_data[d->icode].operand[1].mode)
8138 type = v2si_ftype_4_v2si;
8141 type = v2sf_ftype_4_v2sf;
8147 def_builtin (d->mask, d->name, type, d->code);
8152 altivec_init_builtins (void)
8154 struct builtin_description *d;
8155 struct builtin_description_predicates *dp;
8159 tree pfloat_type_node = build_pointer_type (float_type_node);
8160 tree pint_type_node = build_pointer_type (integer_type_node);
8161 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8162 tree pchar_type_node = build_pointer_type (char_type_node);
8164 tree pvoid_type_node = build_pointer_type (void_type_node);
8166 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8167 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8168 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8169 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8171 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8173 tree int_ftype_opaque
8174 = build_function_type_list (integer_type_node,
8175 opaque_V4SI_type_node, NULL_TREE);
8177 tree opaque_ftype_opaque_int
8178 = build_function_type_list (opaque_V4SI_type_node,
8179 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8180 tree opaque_ftype_opaque_opaque_int
8181 = build_function_type_list (opaque_V4SI_type_node,
8182 opaque_V4SI_type_node, opaque_V4SI_type_node,
8183 integer_type_node, NULL_TREE);
8184 tree int_ftype_int_opaque_opaque
8185 = build_function_type_list (integer_type_node,
8186 integer_type_node, opaque_V4SI_type_node,
8187 opaque_V4SI_type_node, NULL_TREE);
8188 tree int_ftype_int_v4si_v4si
8189 = build_function_type_list (integer_type_node,
8190 integer_type_node, V4SI_type_node,
8191 V4SI_type_node, NULL_TREE);
8192 tree v4sf_ftype_pcfloat
8193 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8194 tree void_ftype_pfloat_v4sf
8195 = build_function_type_list (void_type_node,
8196 pfloat_type_node, V4SF_type_node, NULL_TREE);
8197 tree v4si_ftype_pcint
8198 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8199 tree void_ftype_pint_v4si
8200 = build_function_type_list (void_type_node,
8201 pint_type_node, V4SI_type_node, NULL_TREE);
8202 tree v8hi_ftype_pcshort
8203 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8204 tree void_ftype_pshort_v8hi
8205 = build_function_type_list (void_type_node,
8206 pshort_type_node, V8HI_type_node, NULL_TREE);
8207 tree v16qi_ftype_pcchar
8208 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8209 tree void_ftype_pchar_v16qi
8210 = build_function_type_list (void_type_node,
8211 pchar_type_node, V16QI_type_node, NULL_TREE);
8212 tree void_ftype_v4si
8213 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8214 tree v8hi_ftype_void
8215 = build_function_type (V8HI_type_node, void_list_node);
8216 tree void_ftype_void
8217 = build_function_type (void_type_node, void_list_node);
8219 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8221 tree opaque_ftype_long_pcvoid
8222 = build_function_type_list (opaque_V4SI_type_node,
8223 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8224 tree v16qi_ftype_long_pcvoid
8225 = build_function_type_list (V16QI_type_node,
8226 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8227 tree v8hi_ftype_long_pcvoid
8228 = build_function_type_list (V8HI_type_node,
8229 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8230 tree v4si_ftype_long_pcvoid
8231 = build_function_type_list (V4SI_type_node,
8232 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8234 tree void_ftype_opaque_long_pvoid
8235 = build_function_type_list (void_type_node,
8236 opaque_V4SI_type_node, long_integer_type_node,
8237 pvoid_type_node, NULL_TREE);
8238 tree void_ftype_v4si_long_pvoid
8239 = build_function_type_list (void_type_node,
8240 V4SI_type_node, long_integer_type_node,
8241 pvoid_type_node, NULL_TREE);
8242 tree void_ftype_v16qi_long_pvoid
8243 = build_function_type_list (void_type_node,
8244 V16QI_type_node, long_integer_type_node,
8245 pvoid_type_node, NULL_TREE);
8246 tree void_ftype_v8hi_long_pvoid
8247 = build_function_type_list (void_type_node,
8248 V8HI_type_node, long_integer_type_node,
8249 pvoid_type_node, NULL_TREE);
8250 tree int_ftype_int_v8hi_v8hi
8251 = build_function_type_list (integer_type_node,
8252 integer_type_node, V8HI_type_node,
8253 V8HI_type_node, NULL_TREE);
8254 tree int_ftype_int_v16qi_v16qi
8255 = build_function_type_list (integer_type_node,
8256 integer_type_node, V16QI_type_node,
8257 V16QI_type_node, NULL_TREE);
8258 tree int_ftype_int_v4sf_v4sf
8259 = build_function_type_list (integer_type_node,
8260 integer_type_node, V4SF_type_node,
8261 V4SF_type_node, NULL_TREE);
8262 tree v4si_ftype_v4si
8263 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8264 tree v8hi_ftype_v8hi
8265 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8266 tree v16qi_ftype_v16qi
8267 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8268 tree v4sf_ftype_v4sf
8269 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8270 tree void_ftype_pcvoid_int_int
8271 = build_function_type_list (void_type_node,
8272 pcvoid_type_node, integer_type_node,
8273 integer_type_node, NULL_TREE);
8275 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8276 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8277 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8278 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8279 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8280 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8281 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8282 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8283 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8284 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8285 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8286 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8287 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8288 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8289 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8290 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8291 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8292 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8293 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8294 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8295 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8296 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8297 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8298 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8299 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8300 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8301 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8302 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8303 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8304 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8305 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8306 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8307 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8308 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8309 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8310 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8311 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8312 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8313 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8314 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8315 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8316 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8317 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8318 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8319 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8320 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8322 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8324 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8325 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8326 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8327 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8328 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8329 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8330 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8331 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8332 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8333 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8335 /* Add the DST variants. */
8336 d = (struct builtin_description *) bdesc_dst;
8337 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8338 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8340 /* Initialize the predicates. */
8341 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8342 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8344 enum machine_mode mode1;
8346 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8347 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8352 mode1 = insn_data[dp->icode].operand[1].mode;
8357 type = int_ftype_int_opaque_opaque;
8360 type = int_ftype_int_v4si_v4si;
8363 type = int_ftype_int_v8hi_v8hi;
8366 type = int_ftype_int_v16qi_v16qi;
8369 type = int_ftype_int_v4sf_v4sf;
8375 def_builtin (dp->mask, dp->name, type, dp->code);
8378 /* Initialize the abs* operators. */
8379 d = (struct builtin_description *) bdesc_abs;
8380 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8382 enum machine_mode mode0;
8385 mode0 = insn_data[d->icode].operand[0].mode;
8390 type = v4si_ftype_v4si;
8393 type = v8hi_ftype_v8hi;
8396 type = v16qi_ftype_v16qi;
8399 type = v4sf_ftype_v4sf;
8405 def_builtin (d->mask, d->name, type, d->code);
8412 /* Initialize target builtin that implements
8413 targetm.vectorize.builtin_mask_for_load. */
8415 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8416 v16qi_ftype_long_pcvoid,
8417 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8419 tree_cons (get_identifier ("const"),
8420 NULL_TREE, NULL_TREE));
8421 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8422 altivec_builtin_mask_for_load = decl;
8425 /* Access to the vec_init patterns. */
8426 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8427 integer_type_node, integer_type_node,
8428 integer_type_node, NULL_TREE);
8429 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8430 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8432 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8433 short_integer_type_node,
8434 short_integer_type_node,
8435 short_integer_type_node,
8436 short_integer_type_node,
8437 short_integer_type_node,
8438 short_integer_type_node,
8439 short_integer_type_node, NULL_TREE);
8440 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8441 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8443 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8444 char_type_node, char_type_node,
8445 char_type_node, char_type_node,
8446 char_type_node, char_type_node,
8447 char_type_node, char_type_node,
8448 char_type_node, char_type_node,
8449 char_type_node, char_type_node,
8450 char_type_node, char_type_node,
8451 char_type_node, NULL_TREE);
8452 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8453 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8455 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8456 float_type_node, float_type_node,
8457 float_type_node, NULL_TREE);
8458 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8459 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8461 /* Access to the vec_set patterns. */
8462 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8464 integer_type_node, NULL_TREE);
8465 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8466 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8468 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8470 integer_type_node, NULL_TREE);
8471 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8472 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8474 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8476 integer_type_node, NULL_TREE);
8477 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8478 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8480 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8482 integer_type_node, NULL_TREE);
8483 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8484 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8486 /* Access to the vec_extract patterns. */
8487 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8488 integer_type_node, NULL_TREE);
8489 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8490 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8492 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8493 integer_type_node, NULL_TREE);
8494 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8495 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8497 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8498 integer_type_node, NULL_TREE);
8499 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8500 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8502 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8503 integer_type_node, NULL_TREE);
8504 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8505 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8509 rs6000_common_init_builtins (void)
8511 struct builtin_description *d;
8514 tree v4sf_ftype_v4sf_v4sf_v16qi
8515 = build_function_type_list (V4SF_type_node,
8516 V4SF_type_node, V4SF_type_node,
8517 V16QI_type_node, NULL_TREE);
8518 tree v4si_ftype_v4si_v4si_v16qi
8519 = build_function_type_list (V4SI_type_node,
8520 V4SI_type_node, V4SI_type_node,
8521 V16QI_type_node, NULL_TREE);
8522 tree v8hi_ftype_v8hi_v8hi_v16qi
8523 = build_function_type_list (V8HI_type_node,
8524 V8HI_type_node, V8HI_type_node,
8525 V16QI_type_node, NULL_TREE);
8526 tree v16qi_ftype_v16qi_v16qi_v16qi
8527 = build_function_type_list (V16QI_type_node,
8528 V16QI_type_node, V16QI_type_node,
8529 V16QI_type_node, NULL_TREE);
8531 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8533 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8534 tree v16qi_ftype_int
8535 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8536 tree v8hi_ftype_v16qi
8537 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8538 tree v4sf_ftype_v4sf
8539 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8541 tree v2si_ftype_v2si_v2si
8542 = build_function_type_list (opaque_V2SI_type_node,
8543 opaque_V2SI_type_node,
8544 opaque_V2SI_type_node, NULL_TREE);
8546 tree v2sf_ftype_v2sf_v2sf
8547 = build_function_type_list (opaque_V2SF_type_node,
8548 opaque_V2SF_type_node,
8549 opaque_V2SF_type_node, NULL_TREE);
8551 tree v2si_ftype_int_int
8552 = build_function_type_list (opaque_V2SI_type_node,
8553 integer_type_node, integer_type_node,
8556 tree opaque_ftype_opaque
8557 = build_function_type_list (opaque_V4SI_type_node,
8558 opaque_V4SI_type_node, NULL_TREE);
8560 tree v2si_ftype_v2si
8561 = build_function_type_list (opaque_V2SI_type_node,
8562 opaque_V2SI_type_node, NULL_TREE);
8564 tree v2sf_ftype_v2sf
8565 = build_function_type_list (opaque_V2SF_type_node,
8566 opaque_V2SF_type_node, NULL_TREE);
8568 tree v2sf_ftype_v2si
8569 = build_function_type_list (opaque_V2SF_type_node,
8570 opaque_V2SI_type_node, NULL_TREE);
8572 tree v2si_ftype_v2sf
8573 = build_function_type_list (opaque_V2SI_type_node,
8574 opaque_V2SF_type_node, NULL_TREE);
8576 tree v2si_ftype_v2si_char
8577 = build_function_type_list (opaque_V2SI_type_node,
8578 opaque_V2SI_type_node,
8579 char_type_node, NULL_TREE);
8581 tree v2si_ftype_int_char
8582 = build_function_type_list (opaque_V2SI_type_node,
8583 integer_type_node, char_type_node, NULL_TREE);
8585 tree v2si_ftype_char
8586 = build_function_type_list (opaque_V2SI_type_node,
8587 char_type_node, NULL_TREE);
8589 tree int_ftype_int_int
8590 = build_function_type_list (integer_type_node,
8591 integer_type_node, integer_type_node,
8594 tree opaque_ftype_opaque_opaque
8595 = build_function_type_list (opaque_V4SI_type_node,
8596 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8597 tree v4si_ftype_v4si_v4si
8598 = build_function_type_list (V4SI_type_node,
8599 V4SI_type_node, V4SI_type_node, NULL_TREE);
8600 tree v4sf_ftype_v4si_int
8601 = build_function_type_list (V4SF_type_node,
8602 V4SI_type_node, integer_type_node, NULL_TREE);
8603 tree v4si_ftype_v4sf_int
8604 = build_function_type_list (V4SI_type_node,
8605 V4SF_type_node, integer_type_node, NULL_TREE);
8606 tree v4si_ftype_v4si_int
8607 = build_function_type_list (V4SI_type_node,
8608 V4SI_type_node, integer_type_node, NULL_TREE);
8609 tree v8hi_ftype_v8hi_int
8610 = build_function_type_list (V8HI_type_node,
8611 V8HI_type_node, integer_type_node, NULL_TREE);
8612 tree v16qi_ftype_v16qi_int
8613 = build_function_type_list (V16QI_type_node,
8614 V16QI_type_node, integer_type_node, NULL_TREE);
8615 tree v16qi_ftype_v16qi_v16qi_int
8616 = build_function_type_list (V16QI_type_node,
8617 V16QI_type_node, V16QI_type_node,
8618 integer_type_node, NULL_TREE);
8619 tree v8hi_ftype_v8hi_v8hi_int
8620 = build_function_type_list (V8HI_type_node,
8621 V8HI_type_node, V8HI_type_node,
8622 integer_type_node, NULL_TREE);
8623 tree v4si_ftype_v4si_v4si_int
8624 = build_function_type_list (V4SI_type_node,
8625 V4SI_type_node, V4SI_type_node,
8626 integer_type_node, NULL_TREE);
8627 tree v4sf_ftype_v4sf_v4sf_int
8628 = build_function_type_list (V4SF_type_node,
8629 V4SF_type_node, V4SF_type_node,
8630 integer_type_node, NULL_TREE);
8631 tree v4sf_ftype_v4sf_v4sf
8632 = build_function_type_list (V4SF_type_node,
8633 V4SF_type_node, V4SF_type_node, NULL_TREE);
8634 tree opaque_ftype_opaque_opaque_opaque
8635 = build_function_type_list (opaque_V4SI_type_node,
8636 opaque_V4SI_type_node, opaque_V4SI_type_node,
8637 opaque_V4SI_type_node, NULL_TREE);
8638 tree v4sf_ftype_v4sf_v4sf_v4si
8639 = build_function_type_list (V4SF_type_node,
8640 V4SF_type_node, V4SF_type_node,
8641 V4SI_type_node, NULL_TREE);
8642 tree v4sf_ftype_v4sf_v4sf_v4sf
8643 = build_function_type_list (V4SF_type_node,
8644 V4SF_type_node, V4SF_type_node,
8645 V4SF_type_node, NULL_TREE);
8646 tree v4si_ftype_v4si_v4si_v4si
8647 = build_function_type_list (V4SI_type_node,
8648 V4SI_type_node, V4SI_type_node,
8649 V4SI_type_node, NULL_TREE);
8650 tree v8hi_ftype_v8hi_v8hi
8651 = build_function_type_list (V8HI_type_node,
8652 V8HI_type_node, V8HI_type_node, NULL_TREE);
8653 tree v8hi_ftype_v8hi_v8hi_v8hi
8654 = build_function_type_list (V8HI_type_node,
8655 V8HI_type_node, V8HI_type_node,
8656 V8HI_type_node, NULL_TREE);
8657 tree v4si_ftype_v8hi_v8hi_v4si
8658 = build_function_type_list (V4SI_type_node,
8659 V8HI_type_node, V8HI_type_node,
8660 V4SI_type_node, NULL_TREE);
8661 tree v4si_ftype_v16qi_v16qi_v4si
8662 = build_function_type_list (V4SI_type_node,
8663 V16QI_type_node, V16QI_type_node,
8664 V4SI_type_node, NULL_TREE);
8665 tree v16qi_ftype_v16qi_v16qi
8666 = build_function_type_list (V16QI_type_node,
8667 V16QI_type_node, V16QI_type_node, NULL_TREE);
8668 tree v4si_ftype_v4sf_v4sf
8669 = build_function_type_list (V4SI_type_node,
8670 V4SF_type_node, V4SF_type_node, NULL_TREE);
8671 tree v8hi_ftype_v16qi_v16qi
8672 = build_function_type_list (V8HI_type_node,
8673 V16QI_type_node, V16QI_type_node, NULL_TREE);
8674 tree v4si_ftype_v8hi_v8hi
8675 = build_function_type_list (V4SI_type_node,
8676 V8HI_type_node, V8HI_type_node, NULL_TREE);
8677 tree v8hi_ftype_v4si_v4si
8678 = build_function_type_list (V8HI_type_node,
8679 V4SI_type_node, V4SI_type_node, NULL_TREE);
8680 tree v16qi_ftype_v8hi_v8hi
8681 = build_function_type_list (V16QI_type_node,
8682 V8HI_type_node, V8HI_type_node, NULL_TREE);
8683 tree v4si_ftype_v16qi_v4si
8684 = build_function_type_list (V4SI_type_node,
8685 V16QI_type_node, V4SI_type_node, NULL_TREE);
8686 tree v4si_ftype_v16qi_v16qi
8687 = build_function_type_list (V4SI_type_node,
8688 V16QI_type_node, V16QI_type_node, NULL_TREE);
8689 tree v4si_ftype_v8hi_v4si
8690 = build_function_type_list (V4SI_type_node,
8691 V8HI_type_node, V4SI_type_node, NULL_TREE);
8692 tree v4si_ftype_v8hi
8693 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8694 tree int_ftype_v4si_v4si
8695 = build_function_type_list (integer_type_node,
8696 V4SI_type_node, V4SI_type_node, NULL_TREE);
8697 tree int_ftype_v4sf_v4sf
8698 = build_function_type_list (integer_type_node,
8699 V4SF_type_node, V4SF_type_node, NULL_TREE);
8700 tree int_ftype_v16qi_v16qi
8701 = build_function_type_list (integer_type_node,
8702 V16QI_type_node, V16QI_type_node, NULL_TREE);
8703 tree int_ftype_v8hi_v8hi
8704 = build_function_type_list (integer_type_node,
8705 V8HI_type_node, V8HI_type_node, NULL_TREE);
8707 /* Add the simple ternary operators. */
8708 d = (struct builtin_description *) bdesc_3arg;
8709 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8711 enum machine_mode mode0, mode1, mode2, mode3;
8713 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8714 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8725 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8728 mode0 = insn_data[d->icode].operand[0].mode;
8729 mode1 = insn_data[d->icode].operand[1].mode;
8730 mode2 = insn_data[d->icode].operand[2].mode;
8731 mode3 = insn_data[d->icode].operand[3].mode;
8734 /* When all four are of the same mode. */
8735 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8740 type = opaque_ftype_opaque_opaque_opaque;
8743 type = v4si_ftype_v4si_v4si_v4si;
8746 type = v4sf_ftype_v4sf_v4sf_v4sf;
8749 type = v8hi_ftype_v8hi_v8hi_v8hi;
8752 type = v16qi_ftype_v16qi_v16qi_v16qi;
8758 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8763 type = v4si_ftype_v4si_v4si_v16qi;
8766 type = v4sf_ftype_v4sf_v4sf_v16qi;
8769 type = v8hi_ftype_v8hi_v8hi_v16qi;
8772 type = v16qi_ftype_v16qi_v16qi_v16qi;
8778 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8779 && mode3 == V4SImode)
8780 type = v4si_ftype_v16qi_v16qi_v4si;
8781 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8782 && mode3 == V4SImode)
8783 type = v4si_ftype_v8hi_v8hi_v4si;
8784 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8785 && mode3 == V4SImode)
8786 type = v4sf_ftype_v4sf_v4sf_v4si;
8788 /* vchar, vchar, vchar, 4 bit literal. */
8789 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8791 type = v16qi_ftype_v16qi_v16qi_int;
8793 /* vshort, vshort, vshort, 4 bit literal. */
8794 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8796 type = v8hi_ftype_v8hi_v8hi_int;
8798 /* vint, vint, vint, 4 bit literal. */
8799 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8801 type = v4si_ftype_v4si_v4si_int;
8803 /* vfloat, vfloat, vfloat, 4 bit literal. */
8804 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8806 type = v4sf_ftype_v4sf_v4sf_int;
8811 def_builtin (d->mask, d->name, type, d->code);
8814 /* Add the simple binary operators. */
8815 d = (struct builtin_description *) bdesc_2arg;
8816 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8818 enum machine_mode mode0, mode1, mode2;
8820 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8821 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8831 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8834 mode0 = insn_data[d->icode].operand[0].mode;
8835 mode1 = insn_data[d->icode].operand[1].mode;
8836 mode2 = insn_data[d->icode].operand[2].mode;
8839 /* When all three operands are of the same mode. */
8840 if (mode0 == mode1 && mode1 == mode2)
8845 type = opaque_ftype_opaque_opaque;
8848 type = v4sf_ftype_v4sf_v4sf;
8851 type = v4si_ftype_v4si_v4si;
8854 type = v16qi_ftype_v16qi_v16qi;
8857 type = v8hi_ftype_v8hi_v8hi;
8860 type = v2si_ftype_v2si_v2si;
8863 type = v2sf_ftype_v2sf_v2sf;
8866 type = int_ftype_int_int;
8873 /* A few other combos we really don't want to do manually. */
8875 /* vint, vfloat, vfloat. */
8876 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8877 type = v4si_ftype_v4sf_v4sf;
8879 /* vshort, vchar, vchar. */
8880 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8881 type = v8hi_ftype_v16qi_v16qi;
8883 /* vint, vshort, vshort. */
8884 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8885 type = v4si_ftype_v8hi_v8hi;
8887 /* vshort, vint, vint. */
8888 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8889 type = v8hi_ftype_v4si_v4si;
8891 /* vchar, vshort, vshort. */
8892 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8893 type = v16qi_ftype_v8hi_v8hi;
8895 /* vint, vchar, vint. */
8896 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8897 type = v4si_ftype_v16qi_v4si;
8899 /* vint, vchar, vchar. */
8900 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8901 type = v4si_ftype_v16qi_v16qi;
8903 /* vint, vshort, vint. */
8904 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8905 type = v4si_ftype_v8hi_v4si;
8907 /* vint, vint, 5 bit literal. */
8908 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8909 type = v4si_ftype_v4si_int;
8911 /* vshort, vshort, 5 bit literal. */
8912 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8913 type = v8hi_ftype_v8hi_int;
8915 /* vchar, vchar, 5 bit literal. */
8916 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8917 type = v16qi_ftype_v16qi_int;
8919 /* vfloat, vint, 5 bit literal. */
8920 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8921 type = v4sf_ftype_v4si_int;
8923 /* vint, vfloat, 5 bit literal. */
8924 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8925 type = v4si_ftype_v4sf_int;
8927 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8928 type = v2si_ftype_int_int;
8930 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8931 type = v2si_ftype_v2si_char;
8933 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8934 type = v2si_ftype_int_char;
8939 gcc_assert (mode0 == SImode);
8943 type = int_ftype_v4si_v4si;
8946 type = int_ftype_v4sf_v4sf;
8949 type = int_ftype_v16qi_v16qi;
8952 type = int_ftype_v8hi_v8hi;
8959 def_builtin (d->mask, d->name, type, d->code);
8962 /* Add the simple unary operators. */
8963 d = (struct builtin_description *) bdesc_1arg;
8964 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8966 enum machine_mode mode0, mode1;
8968 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8969 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8978 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8981 mode0 = insn_data[d->icode].operand[0].mode;
8982 mode1 = insn_data[d->icode].operand[1].mode;
8985 if (mode0 == V4SImode && mode1 == QImode)
8986 type = v4si_ftype_int;
8987 else if (mode0 == V8HImode && mode1 == QImode)
8988 type = v8hi_ftype_int;
8989 else if (mode0 == V16QImode && mode1 == QImode)
8990 type = v16qi_ftype_int;
8991 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8992 type = opaque_ftype_opaque;
8993 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8994 type = v4sf_ftype_v4sf;
8995 else if (mode0 == V8HImode && mode1 == V16QImode)
8996 type = v8hi_ftype_v16qi;
8997 else if (mode0 == V4SImode && mode1 == V8HImode)
8998 type = v4si_ftype_v8hi;
8999 else if (mode0 == V2SImode && mode1 == V2SImode)
9000 type = v2si_ftype_v2si;
9001 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9002 type = v2sf_ftype_v2sf;
9003 else if (mode0 == V2SFmode && mode1 == V2SImode)
9004 type = v2sf_ftype_v2si;
9005 else if (mode0 == V2SImode && mode1 == V2SFmode)
9006 type = v2si_ftype_v2sf;
9007 else if (mode0 == V2SImode && mode1 == QImode)
9008 type = v2si_ftype_char;
9012 def_builtin (d->mask, d->name, type, d->code);
9017 rs6000_init_libfuncs (void)
9019 if (!TARGET_HARD_FLOAT)
9022 if (DEFAULT_ABI != ABI_V4)
9024 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
9026 /* AIX library routines for float->int conversion. */
9027 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9028 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9029 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9030 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9033 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9034 if (!TARGET_XL_COMPAT)
9036 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9037 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9038 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9039 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9043 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9044 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9045 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9046 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9051 /* 32-bit SVR4 quad floating point routines. */
9053 set_optab_libfunc (add_optab, TFmode, "_q_add");
9054 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9055 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9056 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9057 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9058 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9059 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9061 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9062 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9063 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9064 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9065 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9066 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9068 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9069 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9070 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9071 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9072 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9073 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9074 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9075 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
9080 /* Expand a block clear operation, and return 1 if successful. Return 0
9081 if we should let the compiler generate normal code.
9083 operands[0] is the destination
9084 operands[1] is the length
9085 operands[3] is the alignment */
9088 expand_block_clear (rtx operands[])
9090 rtx orig_dest = operands[0];
9091 rtx bytes_rtx = operands[1];
9092 rtx align_rtx = operands[3];
9093 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9094 HOST_WIDE_INT align;
9095 HOST_WIDE_INT bytes;
9100 /* If this is not a fixed size move, just call memcpy */
9104 /* This must be a fixed size alignment */
9105 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9106 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9108 /* Anything to clear? */
9109 bytes = INTVAL (bytes_rtx);
9113 /* Use the builtin memset after a point, to avoid huge code bloat.
9114 When optimize_size, avoid any significant code bloat; calling
9115 memset is about 4 instructions, so allow for one instruction to
9116 load zero and three to do clearing. */
9117 if (TARGET_ALTIVEC && align >= 128)
9119 else if (TARGET_POWERPC64 && align >= 32)
9124 if (optimize_size && bytes > 3 * clear_step)
9126 if (! optimize_size && bytes > 8 * clear_step)
9129 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9131 enum machine_mode mode = BLKmode;
9134 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9139 else if (bytes >= 8 && TARGET_POWERPC64
9140 /* 64-bit loads and stores require word-aligned
9142 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9147 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9148 { /* move 4 bytes */
9152 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9153 { /* move 2 bytes */
9157 else /* move 1 byte at a time */
9163 dest = adjust_address (orig_dest, mode, offset);
9165 emit_move_insn (dest, CONST0_RTX (mode));
9172 /* Expand a block move operation, and return 1 if successful. Return 0
9173 if we should let the compiler generate normal code.
9175 operands[0] is the destination
9176 operands[1] is the source
9177 operands[2] is the length
9178 operands[3] is the alignment */
9180 #define MAX_MOVE_REG 4
9183 expand_block_move (rtx operands[])
9185 rtx orig_dest = operands[0];
9186 rtx orig_src = operands[1];
9187 rtx bytes_rtx = operands[2];
9188 rtx align_rtx = operands[3];
9189 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
9194 rtx stores[MAX_MOVE_REG];
9197 /* If this is not a fixed size move, just call memcpy */
9201 /* This must be a fixed size alignment */
9202 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9203 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9205 /* Anything to move? */
9206 bytes = INTVAL (bytes_rtx);
9210 /* store_one_arg depends on expand_block_move to handle at least the size of
9211 reg_parm_stack_space. */
9212 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9215 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9218 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9219 rtx (*mov) (rtx, rtx);
9221 enum machine_mode mode = BLKmode;
9224 /* Altivec first, since it will be faster than a string move
9225 when it applies, and usually not significantly larger. */
9226 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9230 gen_func.mov = gen_movv4si;
9232 else if (TARGET_STRING
9233 && bytes > 24 /* move up to 32 bytes at a time */
9241 && ! fixed_regs[12])
9243 move_bytes = (bytes > 32) ? 32 : bytes;
9244 gen_func.movmemsi = gen_movmemsi_8reg;
9246 else if (TARGET_STRING
9247 && bytes > 16 /* move up to 24 bytes at a time */
9253 && ! fixed_regs[10])
9255 move_bytes = (bytes > 24) ? 24 : bytes;
9256 gen_func.movmemsi = gen_movmemsi_6reg;
9258 else if (TARGET_STRING
9259 && bytes > 8 /* move up to 16 bytes at a time */
9265 move_bytes = (bytes > 16) ? 16 : bytes;
9266 gen_func.movmemsi = gen_movmemsi_4reg;
9268 else if (bytes >= 8 && TARGET_POWERPC64
9269 /* 64-bit loads and stores require word-aligned
9271 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9275 gen_func.mov = gen_movdi;
9277 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9278 { /* move up to 8 bytes at a time */
9279 move_bytes = (bytes > 8) ? 8 : bytes;
9280 gen_func.movmemsi = gen_movmemsi_2reg;
9282 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9283 { /* move 4 bytes */
9286 gen_func.mov = gen_movsi;
9288 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9289 { /* move 2 bytes */
9292 gen_func.mov = gen_movhi;
9294 else if (TARGET_STRING && bytes > 1)
9295 { /* move up to 4 bytes at a time */
9296 move_bytes = (bytes > 4) ? 4 : bytes;
9297 gen_func.movmemsi = gen_movmemsi_1reg;
9299 else /* move 1 byte at a time */
9303 gen_func.mov = gen_movqi;
9306 src = adjust_address (orig_src, mode, offset);
9307 dest = adjust_address (orig_dest, mode, offset);
9309 if (mode != BLKmode)
9311 rtx tmp_reg = gen_reg_rtx (mode);
9313 emit_insn ((*gen_func.mov) (tmp_reg, src));
9314 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9317 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9320 for (i = 0; i < num_reg; i++)
9321 emit_insn (stores[i]);
9325 if (mode == BLKmode)
9327 /* Move the address into scratch registers. The movmemsi
9328 patterns require zero offset. */
9329 if (!REG_P (XEXP (src, 0)))
9331 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9332 src = replace_equiv_address (src, src_reg);
9334 set_mem_size (src, GEN_INT (move_bytes));
9336 if (!REG_P (XEXP (dest, 0)))
9338 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9339 dest = replace_equiv_address (dest, dest_reg);
9341 set_mem_size (dest, GEN_INT (move_bytes));
9343 emit_insn ((*gen_func.movmemsi) (dest, src,
9344 GEN_INT (move_bytes & 31),
9353 /* Return a string to perform a load_multiple operation.
9354 operands[0] is the vector.
9355 operands[1] is the source address.
9356 operands[2] is the first destination register. */
9359 rs6000_output_load_multiple (rtx operands[3])
9361 /* We have to handle the case where the pseudo used to contain the address
9362 is assigned to one of the output registers. */
9364 int words = XVECLEN (operands[0], 0);
9367 if (XVECLEN (operands[0], 0) == 1)
9368 return "{l|lwz} %2,0(%1)";
9370 for (i = 0; i < words; i++)
9371 if (refers_to_regno_p (REGNO (operands[2]) + i,
9372 REGNO (operands[2]) + i + 1, operands[1], 0))
9376 xop[0] = GEN_INT (4 * (words-1));
9377 xop[1] = operands[1];
9378 xop[2] = operands[2];
9379 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9384 xop[0] = GEN_INT (4 * (words-1));
9385 xop[1] = operands[1];
9386 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9387 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9392 for (j = 0; j < words; j++)
9395 xop[0] = GEN_INT (j * 4);
9396 xop[1] = operands[1];
9397 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9398 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9400 xop[0] = GEN_INT (i * 4);
9401 xop[1] = operands[1];
9402 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9407 return "{lsi|lswi} %2,%1,%N0";
9411 /* A validation routine: say whether CODE, a condition code, and MODE
9412 match. The other alternatives either don't make sense or should
9413 never be generated. */
9416 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9418 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9419 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9420 && GET_MODE_CLASS (mode) == MODE_CC);
9422 /* These don't make sense. */
9423 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9424 || mode != CCUNSmode);
9426 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9427 || mode == CCUNSmode);
9429 gcc_assert (mode == CCFPmode
9430 || (code != ORDERED && code != UNORDERED
9431 && code != UNEQ && code != LTGT
9432 && code != UNGT && code != UNLT
9433 && code != UNGE && code != UNLE));
9435 /* These should never be generated except for
9436 flag_finite_math_only. */
9437 gcc_assert (mode != CCFPmode
9438 || flag_finite_math_only
9439 || (code != LE && code != GE
9440 && code != UNEQ && code != LTGT
9441 && code != UNGT && code != UNLT));
9443 /* These are invalid; the information is not there. */
9444 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9448 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9449 mask required to convert the result of a rotate insn into a shift
9450 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9453 includes_lshift_p (rtx shiftop, rtx andop)
9455 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9457 shift_mask <<= INTVAL (shiftop);
9459 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9462 /* Similar, but for right shift. */
9465 includes_rshift_p (rtx shiftop, rtx andop)
9467 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9469 shift_mask >>= INTVAL (shiftop);
9471 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9474 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9475 to perform a left shift. It must have exactly SHIFTOP least
9476 significant 0's, then one or more 1's, then zero or more 0's. */
9479 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9481 if (GET_CODE (andop) == CONST_INT)
9483 HOST_WIDE_INT c, lsb, shift_mask;
9486 if (c == 0 || c == ~0)
9490 shift_mask <<= INTVAL (shiftop);
9492 /* Find the least significant one bit. */
9495 /* It must coincide with the LSB of the shift mask. */
9496 if (-lsb != shift_mask)
9499 /* Invert to look for the next transition (if any). */
9502 /* Remove the low group of ones (originally low group of zeros). */
9505 /* Again find the lsb, and check we have all 1's above. */
9509 else if (GET_CODE (andop) == CONST_DOUBLE
9510 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9512 HOST_WIDE_INT low, high, lsb;
9513 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9515 low = CONST_DOUBLE_LOW (andop);
9516 if (HOST_BITS_PER_WIDE_INT < 64)
9517 high = CONST_DOUBLE_HIGH (andop);
9519 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9520 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9523 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9525 shift_mask_high = ~0;
9526 if (INTVAL (shiftop) > 32)
9527 shift_mask_high <<= INTVAL (shiftop) - 32;
9531 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9538 return high == -lsb;
9541 shift_mask_low = ~0;
9542 shift_mask_low <<= INTVAL (shiftop);
9546 if (-lsb != shift_mask_low)
9549 if (HOST_BITS_PER_WIDE_INT < 64)
9554 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9557 return high == -lsb;
9561 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9567 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9568 to perform a left shift. It must have SHIFTOP or more least
9569 significant 0's, with the remainder of the word 1's. */
9572 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9574 if (GET_CODE (andop) == CONST_INT)
9576 HOST_WIDE_INT c, lsb, shift_mask;
9579 shift_mask <<= INTVAL (shiftop);
9582 /* Find the least significant one bit. */
9585 /* It must be covered by the shift mask.
9586 This test also rejects c == 0. */
9587 if ((lsb & shift_mask) == 0)
9590 /* Check we have all 1's above the transition, and reject all 1's. */
9591 return c == -lsb && lsb != 1;
9593 else if (GET_CODE (andop) == CONST_DOUBLE
9594 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9596 HOST_WIDE_INT low, lsb, shift_mask_low;
9598 low = CONST_DOUBLE_LOW (andop);
9600 if (HOST_BITS_PER_WIDE_INT < 64)
9602 HOST_WIDE_INT high, shift_mask_high;
9604 high = CONST_DOUBLE_HIGH (andop);
9608 shift_mask_high = ~0;
9609 if (INTVAL (shiftop) > 32)
9610 shift_mask_high <<= INTVAL (shiftop) - 32;
9614 if ((lsb & shift_mask_high) == 0)
9617 return high == -lsb;
9623 shift_mask_low = ~0;
9624 shift_mask_low <<= INTVAL (shiftop);
9628 if ((lsb & shift_mask_low) == 0)
9631 return low == -lsb && lsb != 1;
9637 /* Return 1 if operands will generate a valid arguments to rlwimi
9638 instruction for insert with right shift in 64-bit mode. The mask may
9639 not start on the first bit or stop on the last bit because wrap-around
9640 effects of instruction do not correspond to semantics of RTL insn. */
9643 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9645 if (INTVAL (startop) < 64
9646 && INTVAL (startop) > 32
9647 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9648 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9649 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9650 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9651 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9657 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9658 for lfq and stfq insns iff the registers are hard registers. */
9661 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9663 /* We might have been passed a SUBREG. */
9664 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9667 /* We might have been passed non floating point registers. */
9668 if (!FP_REGNO_P (REGNO (reg1))
9669 || !FP_REGNO_P (REGNO (reg2)))
9672 return (REGNO (reg1) == REGNO (reg2) - 1);
9675 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9676 addr1 and addr2 must be in consecutive memory locations
9677 (addr2 == addr1 + 8). */
9680 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9683 unsigned int reg1, reg2;
9684 int offset1, offset2;
9686 /* The mems cannot be volatile. */
9687 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9690 addr1 = XEXP (mem1, 0);
9691 addr2 = XEXP (mem2, 0);
9693 /* Extract an offset (if used) from the first addr. */
9694 if (GET_CODE (addr1) == PLUS)
9696 /* If not a REG, return zero. */
9697 if (GET_CODE (XEXP (addr1, 0)) != REG)
9701 reg1 = REGNO (XEXP (addr1, 0));
9702 /* The offset must be constant! */
9703 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9705 offset1 = INTVAL (XEXP (addr1, 1));
9708 else if (GET_CODE (addr1) != REG)
9712 reg1 = REGNO (addr1);
9713 /* This was a simple (mem (reg)) expression. Offset is 0. */
9717 /* And now for the second addr. */
9718 if (GET_CODE (addr2) == PLUS)
9720 /* If not a REG, return zero. */
9721 if (GET_CODE (XEXP (addr2, 0)) != REG)
9725 reg2 = REGNO (XEXP (addr2, 0));
9726 /* The offset must be constant. */
9727 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9729 offset2 = INTVAL (XEXP (addr2, 1));
9732 else if (GET_CODE (addr2) != REG)
9736 reg2 = REGNO (addr2);
9737 /* This was a simple (mem (reg)) expression. Offset is 0. */
9741 /* Both of these must have the same base register. */
9745 /* The offset for the second addr must be 8 more than the first addr. */
9746 if (offset2 != offset1 + 8)
9749 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9754 /* Return the register class of a scratch register needed to copy IN into
9755 or out of a register in CLASS in MODE. If it can be done directly,
9756 NO_REGS is returned. */
9759 rs6000_secondary_reload_class (enum reg_class class,
9760 enum machine_mode mode ATTRIBUTE_UNUSED,
9765 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9767 && MACHOPIC_INDIRECT
9771 /* We cannot copy a symbolic operand directly into anything
9772 other than BASE_REGS for TARGET_ELF. So indicate that a
9773 register from BASE_REGS is needed as an intermediate
9776 On Darwin, pic addresses require a load from memory, which
9777 needs a base register. */
9778 if (class != BASE_REGS
9779 && (GET_CODE (in) == SYMBOL_REF
9780 || GET_CODE (in) == HIGH
9781 || GET_CODE (in) == LABEL_REF
9782 || GET_CODE (in) == CONST))
9786 if (GET_CODE (in) == REG)
9789 if (regno >= FIRST_PSEUDO_REGISTER)
9791 regno = true_regnum (in);
9792 if (regno >= FIRST_PSEUDO_REGISTER)
9796 else if (GET_CODE (in) == SUBREG)
9798 regno = true_regnum (in);
9799 if (regno >= FIRST_PSEUDO_REGISTER)
9805 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9807 if (class == GENERAL_REGS || class == BASE_REGS
9808 || (regno >= 0 && INT_REGNO_P (regno)))
9811 /* Constants, memory, and FP registers can go into FP registers. */
9812 if ((regno == -1 || FP_REGNO_P (regno))
9813 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9816 /* Memory, and AltiVec registers can go into AltiVec registers. */
9817 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9818 && class == ALTIVEC_REGS)
9821 /* We can copy among the CR registers. */
9822 if ((class == CR_REGS || class == CR0_REGS)
9823 && regno >= 0 && CR_REGNO_P (regno))
9826 /* Otherwise, we need GENERAL_REGS. */
9827 return GENERAL_REGS;
9830 /* Given a comparison operation, return the bit number in CCR to test. We
9831 know this is a valid comparison.
9833 SCC_P is 1 if this is for an scc. That means that %D will have been
9834 used instead of %C, so the bits will be in different places.
9836 Return -1 if OP isn't a valid comparison for some reason. */
9839 ccr_bit (rtx op, int scc_p)
9841 enum rtx_code code = GET_CODE (op);
9842 enum machine_mode cc_mode;
9847 if (!COMPARISON_P (op))
9852 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9854 cc_mode = GET_MODE (reg);
9855 cc_regnum = REGNO (reg);
9856 base_bit = 4 * (cc_regnum - CR0_REGNO);
9858 validate_condition_mode (code, cc_mode);
9860 /* When generating a sCOND operation, only positive conditions are
9863 || code == EQ || code == GT || code == LT || code == UNORDERED
9864 || code == GTU || code == LTU);
9869 return scc_p ? base_bit + 3 : base_bit + 2;
9871 return base_bit + 2;
9872 case GT: case GTU: case UNLE:
9873 return base_bit + 1;
9874 case LT: case LTU: case UNGE:
9876 case ORDERED: case UNORDERED:
9877 return base_bit + 3;
9880 /* If scc, we will have done a cror to put the bit in the
9881 unordered position. So test that bit. For integer, this is ! LT
9882 unless this is an scc insn. */
9883 return scc_p ? base_bit + 3 : base_bit;
9886 return scc_p ? base_bit + 3 : base_bit + 1;
9893 /* Return the GOT register. */
9896 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9898 /* The second flow pass currently (June 1999) can't update
9899 regs_ever_live without disturbing other parts of the compiler, so
9900 update it here to make the prolog/epilogue code happy. */
9901 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9902 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9904 current_function_uses_pic_offset_table = 1;
9906 return pic_offset_table_rtx;
9909 /* Function to init struct machine_function.
9910 This will be called, via a pointer variable,
9911 from push_function_context. */
9913 static struct machine_function *
9914 rs6000_init_machine_status (void)
9916 return ggc_alloc_cleared (sizeof (machine_function));
9919 /* These macros test for integers and extract the low-order bits. */
9921 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9922 && GET_MODE (X) == VOIDmode)
9924 #define INT_LOWPART(X) \
9925 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9931 unsigned long val = INT_LOWPART (op);
9933 /* If the high bit is zero, the value is the first 1 bit we find
9935 if ((val & 0x80000000) == 0)
9937 gcc_assert (val & 0xffffffff);
9940 while (((val <<= 1) & 0x80000000) == 0)
9945 /* If the high bit is set and the low bit is not, or the mask is all
9946 1's, the value is zero. */
9947 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9950 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9953 while (((val >>= 1) & 1) != 0)
9963 unsigned long val = INT_LOWPART (op);
9965 /* If the low bit is zero, the value is the first 1 bit we find from
9969 gcc_assert (val & 0xffffffff);
9972 while (((val >>= 1) & 1) == 0)
9978 /* If the low bit is set and the high bit is not, or the mask is all
9979 1's, the value is 31. */
9980 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9983 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9986 while (((val <<= 1) & 0x80000000) != 0)
9992 /* Locate some local-dynamic symbol still in use by this function
9993 so that we can print its name in some tls_ld pattern. */
9996 rs6000_get_some_local_dynamic_name (void)
10000 if (cfun->machine->some_ld_name)
10001 return cfun->machine->some_ld_name;
10003 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10005 && for_each_rtx (&PATTERN (insn),
10006 rs6000_get_some_local_dynamic_name_1, 0))
10007 return cfun->machine->some_ld_name;
10009 gcc_unreachable ();
10012 /* Helper function for rs6000_get_some_local_dynamic_name. */
10015 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10019 if (GET_CODE (x) == SYMBOL_REF)
10021 const char *str = XSTR (x, 0);
10022 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10024 cfun->machine->some_ld_name = str;
10032 /* Write out a function code label. */
10035 rs6000_output_function_entry (FILE *file, const char *fname)
10037 if (fname[0] != '.')
10039 switch (DEFAULT_ABI)
10042 gcc_unreachable ();
10048 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10057 RS6000_OUTPUT_BASENAME (file, fname);
10059 assemble_name (file, fname);
10062 /* Print an operand. Recognize special options, documented below. */
10065 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10066 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10068 #define SMALL_DATA_RELOC "sda21"
10069 #define SMALL_DATA_REG 0
10073 print_operand (FILE *file, rtx x, int code)
10077 unsigned HOST_WIDE_INT uval;
10082 /* Write out an instruction after the call which may be replaced
10083 with glue code by the loader. This depends on the AIX version. */
10084 asm_fprintf (file, RS6000_CALL_GLUE);
10087 /* %a is output_address. */
10090 /* If X is a constant integer whose low-order 5 bits are zero,
10091 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10092 in the AIX assembler where "sri" with a zero shift count
10093 writes a trash instruction. */
10094 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10101 /* If constant, low-order 16 bits of constant, unsigned.
10102 Otherwise, write normally. */
10104 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10106 print_operand (file, x, 0);
10110 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10111 for 64-bit mask direction. */
10112 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10115 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10119 /* X is a CR register. Print the number of the GT bit of the CR. */
10120 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10121 output_operand_lossage ("invalid %%E value");
10123 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10127 /* Like 'J' but get to the EQ bit. */
10128 gcc_assert (GET_CODE (x) == REG);
10130 /* Bit 1 is EQ bit. */
10131 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
10133 fprintf (file, "%d", i);
10137 /* X is a CR register. Print the number of the EQ bit of the CR */
10138 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10139 output_operand_lossage ("invalid %%E value");
10141 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10145 /* X is a CR register. Print the shift count needed to move it
10146 to the high-order four bits. */
10147 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10148 output_operand_lossage ("invalid %%f value");
10150 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10154 /* Similar, but print the count for the rotate in the opposite
10156 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10157 output_operand_lossage ("invalid %%F value");
10159 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10163 /* X is a constant integer. If it is negative, print "m",
10164 otherwise print "z". This is to make an aze or ame insn. */
10165 if (GET_CODE (x) != CONST_INT)
10166 output_operand_lossage ("invalid %%G value");
10167 else if (INTVAL (x) >= 0)
10174 /* If constant, output low-order five bits. Otherwise, write
10177 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10179 print_operand (file, x, 0);
10183 /* If constant, output low-order six bits. Otherwise, write
10186 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10188 print_operand (file, x, 0);
10192 /* Print `i' if this is a constant, else nothing. */
10198 /* Write the bit number in CCR for jump. */
10199 i = ccr_bit (x, 0);
10201 output_operand_lossage ("invalid %%j code");
10203 fprintf (file, "%d", i);
10207 /* Similar, but add one for shift count in rlinm for scc and pass
10208 scc flag to `ccr_bit'. */
10209 i = ccr_bit (x, 1);
10211 output_operand_lossage ("invalid %%J code");
10213 /* If we want bit 31, write a shift count of zero, not 32. */
10214 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10218 /* X must be a constant. Write the 1's complement of the
10221 output_operand_lossage ("invalid %%k value");
10223 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10227 /* X must be a symbolic constant on ELF. Write an
10228 expression suitable for an 'addi' that adds in the low 16
10229 bits of the MEM. */
10230 if (GET_CODE (x) != CONST)
10232 print_operand_address (file, x);
10233 fputs ("@l", file);
10237 if (GET_CODE (XEXP (x, 0)) != PLUS
10238 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10239 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10240 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10241 output_operand_lossage ("invalid %%K value");
10242 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10243 fputs ("@l", file);
10244 /* For GNU as, there must be a non-alphanumeric character
10245 between 'l' and the number. The '-' is added by
10246 print_operand() already. */
10247 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10249 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10253 /* %l is output_asm_label. */
10256 /* Write second word of DImode or DFmode reference. Works on register
10257 or non-indexed memory only. */
10258 if (GET_CODE (x) == REG)
10259 fputs (reg_names[REGNO (x) + 1], file);
10260 else if (GET_CODE (x) == MEM)
10262 /* Handle possible auto-increment. Since it is pre-increment and
10263 we have already done it, we can just use an offset of word. */
10264 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10265 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10266 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10269 output_address (XEXP (adjust_address_nv (x, SImode,
10273 if (small_data_operand (x, GET_MODE (x)))
10274 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10275 reg_names[SMALL_DATA_REG]);
10280 /* MB value for a mask operand. */
10281 if (! mask_operand (x, SImode))
10282 output_operand_lossage ("invalid %%m value");
10284 fprintf (file, "%d", extract_MB (x));
10288 /* ME value for a mask operand. */
10289 if (! mask_operand (x, SImode))
10290 output_operand_lossage ("invalid %%M value");
10292 fprintf (file, "%d", extract_ME (x));
10295 /* %n outputs the negative of its operand. */
10298 /* Write the number of elements in the vector times 4. */
10299 if (GET_CODE (x) != PARALLEL)
10300 output_operand_lossage ("invalid %%N value");
10302 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10306 /* Similar, but subtract 1 first. */
10307 if (GET_CODE (x) != PARALLEL)
10308 output_operand_lossage ("invalid %%O value");
10310 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10314 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10316 || INT_LOWPART (x) < 0
10317 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10318 output_operand_lossage ("invalid %%p value");
10320 fprintf (file, "%d", i);
10324 /* The operand must be an indirect memory reference. The result
10325 is the register name. */
10326 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10327 || REGNO (XEXP (x, 0)) >= 32)
10328 output_operand_lossage ("invalid %%P value");
10330 fputs (reg_names[REGNO (XEXP (x, 0))], file);
10334 /* This outputs the logical code corresponding to a boolean
10335 expression. The expression may have one or both operands
10336 negated (if one, only the first one). For condition register
10337 logical operations, it will also treat the negated
10338 CR codes as NOTs, but not handle NOTs of them. */
10340 const char *const *t = 0;
10342 enum rtx_code code = GET_CODE (x);
10343 static const char * const tbl[3][3] = {
10344 { "and", "andc", "nor" },
10345 { "or", "orc", "nand" },
10346 { "xor", "eqv", "xor" } };
10350 else if (code == IOR)
10352 else if (code == XOR)
10355 output_operand_lossage ("invalid %%q value");
10357 if (GET_CODE (XEXP (x, 0)) != NOT)
10361 if (GET_CODE (XEXP (x, 1)) == NOT)
10379 /* X is a CR register. Print the mask for `mtcrf'. */
10380 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10381 output_operand_lossage ("invalid %%R value");
10383 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10387 /* Low 5 bits of 32 - value */
10389 output_operand_lossage ("invalid %%s value");
10391 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10395 /* PowerPC64 mask position. All 0's is excluded.
10396 CONST_INT 32-bit mask is considered sign-extended so any
10397 transition must occur within the CONST_INT, not on the boundary. */
10398 if (! mask64_operand (x, DImode))
10399 output_operand_lossage ("invalid %%S value");
10401 uval = INT_LOWPART (x);
10403 if (uval & 1) /* Clear Left */
10405 #if HOST_BITS_PER_WIDE_INT > 64
10406 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10410 else /* Clear Right */
10413 #if HOST_BITS_PER_WIDE_INT > 64
10414 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10420 gcc_assert (i >= 0);
10421 fprintf (file, "%d", i);
10425 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10426 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10428 /* Bit 3 is OV bit. */
10429 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10431 /* If we want bit 31, write a shift count of zero, not 32. */
10432 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10436 /* Print the symbolic name of a branch target register. */
10437 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10438 && REGNO (x) != COUNT_REGISTER_REGNUM))
10439 output_operand_lossage ("invalid %%T value");
10440 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10441 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10443 fputs ("ctr", file);
10447 /* High-order 16 bits of constant for use in unsigned operand. */
10449 output_operand_lossage ("invalid %%u value");
10451 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10452 (INT_LOWPART (x) >> 16) & 0xffff);
10456 /* High-order 16 bits of constant for use in signed operand. */
10458 output_operand_lossage ("invalid %%v value");
10460 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10461 (INT_LOWPART (x) >> 16) & 0xffff);
10465 /* Print `u' if this has an auto-increment or auto-decrement. */
10466 if (GET_CODE (x) == MEM
10467 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10468 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10473 /* Print the trap code for this operand. */
10474 switch (GET_CODE (x))
10477 fputs ("eq", file); /* 4 */
10480 fputs ("ne", file); /* 24 */
10483 fputs ("lt", file); /* 16 */
10486 fputs ("le", file); /* 20 */
10489 fputs ("gt", file); /* 8 */
10492 fputs ("ge", file); /* 12 */
10495 fputs ("llt", file); /* 2 */
10498 fputs ("lle", file); /* 6 */
10501 fputs ("lgt", file); /* 1 */
10504 fputs ("lge", file); /* 5 */
10507 gcc_unreachable ();
10512 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10515 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10516 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10518 print_operand (file, x, 0);
10522 /* MB value for a PowerPC64 rldic operand. */
10523 val = (GET_CODE (x) == CONST_INT
10524 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10529 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10530 if ((val <<= 1) < 0)
10533 #if HOST_BITS_PER_WIDE_INT == 32
10534 if (GET_CODE (x) == CONST_INT && i >= 0)
10535 i += 32; /* zero-extend high-part was all 0's */
10536 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10538 val = CONST_DOUBLE_LOW (x);
10544 for ( ; i < 64; i++)
10545 if ((val <<= 1) < 0)
10550 fprintf (file, "%d", i + 1);
10554 if (GET_CODE (x) == MEM
10555 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10560 /* Like 'L', for third word of TImode */
10561 if (GET_CODE (x) == REG)
10562 fputs (reg_names[REGNO (x) + 2], file);
10563 else if (GET_CODE (x) == MEM)
10565 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10566 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10567 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10569 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10570 if (small_data_operand (x, GET_MODE (x)))
10571 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10572 reg_names[SMALL_DATA_REG]);
10577 /* X is a SYMBOL_REF. Write out the name preceded by a
10578 period and without any trailing data in brackets. Used for function
10579 names. If we are configured for System V (or the embedded ABI) on
10580 the PowerPC, do not emit the period, since those systems do not use
10581 TOCs and the like. */
10582 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10584 /* Mark the decl as referenced so that cgraph will output the
10586 if (SYMBOL_REF_DECL (x))
10587 mark_decl_referenced (SYMBOL_REF_DECL (x));
10589 /* For macho, check to see if we need a stub. */
10592 const char *name = XSTR (x, 0);
10594 if (MACHOPIC_INDIRECT
10595 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10596 name = machopic_indirection_name (x, /*stub_p=*/true);
10598 assemble_name (file, name);
10600 else if (!DOT_SYMBOLS)
10601 assemble_name (file, XSTR (x, 0));
10603 rs6000_output_function_entry (file, XSTR (x, 0));
10607 /* Like 'L', for last word of TImode. */
10608 if (GET_CODE (x) == REG)
10609 fputs (reg_names[REGNO (x) + 3], file);
10610 else if (GET_CODE (x) == MEM)
10612 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10613 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10614 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10616 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10617 if (small_data_operand (x, GET_MODE (x)))
10618 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10619 reg_names[SMALL_DATA_REG]);
10623 /* Print AltiVec or SPE memory operand. */
10628 gcc_assert (GET_CODE (x) == MEM);
10634 /* Handle [reg]. */
10635 if (GET_CODE (tmp) == REG)
10637 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10640 /* Handle [reg+UIMM]. */
10641 else if (GET_CODE (tmp) == PLUS &&
10642 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10646 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10648 x = INTVAL (XEXP (tmp, 1));
10649 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10653 /* Fall through. Must be [reg+reg]. */
10656 && GET_CODE (tmp) == AND
10657 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10658 && INTVAL (XEXP (tmp, 1)) == -16)
10659 tmp = XEXP (tmp, 0);
10660 if (GET_CODE (tmp) == REG)
10661 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10664 gcc_assert (GET_CODE (tmp) == PLUS
10665 && GET_CODE (XEXP (tmp, 1)) == REG);
10667 if (REGNO (XEXP (tmp, 0)) == 0)
10668 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10669 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10671 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10672 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10678 if (GET_CODE (x) == REG)
10679 fprintf (file, "%s", reg_names[REGNO (x)]);
10680 else if (GET_CODE (x) == MEM)
10682 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10683 know the width from the mode. */
10684 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10685 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10686 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10687 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10688 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10689 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10691 output_address (XEXP (x, 0));
10694 output_addr_const (file, x);
10698 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10702 output_operand_lossage ("invalid %%xn code");
10706 /* Print the address of an operand. */
10709 print_operand_address (FILE *file, rtx x)
10711 if (GET_CODE (x) == REG)
10712 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10713 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10714 || GET_CODE (x) == LABEL_REF)
10716 output_addr_const (file, x);
10717 if (small_data_operand (x, GET_MODE (x)))
10718 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10719 reg_names[SMALL_DATA_REG]);
10721 gcc_assert (!TARGET_TOC);
10723 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10725 if (REGNO (XEXP (x, 0)) == 0)
10726 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10727 reg_names[ REGNO (XEXP (x, 0)) ]);
10729 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10730 reg_names[ REGNO (XEXP (x, 1)) ]);
10732 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10733 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10734 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10736 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10737 && CONSTANT_P (XEXP (x, 1)))
10739 output_addr_const (file, XEXP (x, 1));
10740 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10744 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10745 && CONSTANT_P (XEXP (x, 1)))
10747 fprintf (file, "lo16(");
10748 output_addr_const (file, XEXP (x, 1));
10749 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10752 else if (legitimate_constant_pool_address_p (x))
10754 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10756 rtx contains_minus = XEXP (x, 1);
10760 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10761 turn it into (sym) for output_addr_const. */
10762 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10763 contains_minus = XEXP (contains_minus, 0);
10765 minus = XEXP (contains_minus, 0);
10766 symref = XEXP (minus, 0);
10767 XEXP (contains_minus, 0) = symref;
10772 name = XSTR (symref, 0);
10773 newname = alloca (strlen (name) + sizeof ("@toc"));
10774 strcpy (newname, name);
10775 strcat (newname, "@toc");
10776 XSTR (symref, 0) = newname;
10778 output_addr_const (file, XEXP (x, 1));
10780 XSTR (symref, 0) = name;
10781 XEXP (contains_minus, 0) = minus;
10784 output_addr_const (file, XEXP (x, 1));
10786 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10789 gcc_unreachable ();
10792 /* Target hook for assembling integer objects. The PowerPC version has
10793 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10794 is defined. It also needs to handle DI-mode objects on 64-bit
10798 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10800 #ifdef RELOCATABLE_NEEDS_FIXUP
10801 /* Special handling for SI values. */
10802 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
10804 extern int in_toc_section (void);
10805 static int recurse = 0;
10807 /* For -mrelocatable, we mark all addresses that need to be fixed up
10808 in the .fixup section. */
10809 if (TARGET_RELOCATABLE
10810 && !in_toc_section ()
10811 && !in_text_section ()
10812 && !in_unlikely_text_section ()
10814 && GET_CODE (x) != CONST_INT
10815 && GET_CODE (x) != CONST_DOUBLE
10821 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10823 ASM_OUTPUT_LABEL (asm_out_file, buf);
10824 fprintf (asm_out_file, "\t.long\t(");
10825 output_addr_const (asm_out_file, x);
10826 fprintf (asm_out_file, ")@fixup\n");
10827 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10828 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10829 fprintf (asm_out_file, "\t.long\t");
10830 assemble_name (asm_out_file, buf);
10831 fprintf (asm_out_file, "\n\t.previous\n");
10835 /* Remove initial .'s to turn a -mcall-aixdesc function
10836 address into the address of the descriptor, not the function
10838 else if (GET_CODE (x) == SYMBOL_REF
10839 && XSTR (x, 0)[0] == '.'
10840 && DEFAULT_ABI == ABI_AIX)
10842 const char *name = XSTR (x, 0);
10843 while (*name == '.')
10846 fprintf (asm_out_file, "\t.long\t%s\n", name);
10850 #endif /* RELOCATABLE_NEEDS_FIXUP */
10851 return default_assemble_integer (x, size, aligned_p);
10854 #ifdef HAVE_GAS_HIDDEN
10855 /* Emit an assembler directive to set symbol visibility for DECL to
10856 VISIBILITY_TYPE. */
10859 rs6000_assemble_visibility (tree decl, int vis)
10861 /* Functions need to have their entry point symbol visibility set as
10862 well as their descriptor symbol visibility. */
10863 if (DEFAULT_ABI == ABI_AIX
10865 && TREE_CODE (decl) == FUNCTION_DECL)
10867 static const char * const visibility_types[] = {
10868 NULL, "internal", "hidden", "protected"
10871 const char *name, *type;
10873 name = ((* targetm.strip_name_encoding)
10874 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10875 type = visibility_types[vis];
10877 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10878 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10881 default_assemble_visibility (decl, vis);
10886 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10888 /* Reversal of FP compares takes care -- an ordered compare
10889 becomes an unordered compare and vice versa. */
10890 if (mode == CCFPmode
10891 && (!flag_finite_math_only
10892 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10893 || code == UNEQ || code == LTGT))
10894 return reverse_condition_maybe_unordered (code);
10896 return reverse_condition (code);
10899 /* Generate a compare for CODE. Return a brand-new rtx that
10900 represents the result of the compare. */
10903 rs6000_generate_compare (enum rtx_code code)
10905 enum machine_mode comp_mode;
10906 rtx compare_result;
10908 if (rs6000_compare_fp_p)
10909 comp_mode = CCFPmode;
10910 else if (code == GTU || code == LTU
10911 || code == GEU || code == LEU)
10912 comp_mode = CCUNSmode;
10913 else if ((code == EQ || code == NE)
10914 && GET_CODE (rs6000_compare_op0) == SUBREG
10915 && GET_CODE (rs6000_compare_op1) == SUBREG
10916 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10917 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10918 /* These are unsigned values, perhaps there will be a later
10919 ordering compare that can be shared with this one.
10920 Unfortunately we cannot detect the signedness of the operands
10921 for non-subregs. */
10922 comp_mode = CCUNSmode;
10924 comp_mode = CCmode;
10926 /* First, the compare. */
10927 compare_result = gen_reg_rtx (comp_mode);
10929 /* SPE FP compare instructions on the GPRs. Yuck! */
10930 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10931 && rs6000_compare_fp_p)
10933 rtx cmp, or_result, compare_result2;
10934 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10936 if (op_mode == VOIDmode)
10937 op_mode = GET_MODE (rs6000_compare_op1);
10939 /* Note: The E500 comparison instructions set the GT bit (x +
10940 1), on success. This explains the mess. */
10944 case EQ: case UNEQ: case NE: case LTGT:
10948 cmp = flag_unsafe_math_optimizations
10949 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10950 rs6000_compare_op1)
10951 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10952 rs6000_compare_op1);
10956 cmp = flag_unsafe_math_optimizations
10957 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10958 rs6000_compare_op1)
10959 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10960 rs6000_compare_op1);
10964 gcc_unreachable ();
10968 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10972 cmp = flag_unsafe_math_optimizations
10973 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10974 rs6000_compare_op1)
10975 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10976 rs6000_compare_op1);
10980 cmp = flag_unsafe_math_optimizations
10981 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10982 rs6000_compare_op1)
10983 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10984 rs6000_compare_op1);
10988 gcc_unreachable ();
10992 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10996 cmp = flag_unsafe_math_optimizations
10997 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10998 rs6000_compare_op1)
10999 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11000 rs6000_compare_op1);
11004 cmp = flag_unsafe_math_optimizations
11005 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11006 rs6000_compare_op1)
11007 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11008 rs6000_compare_op1);
11012 gcc_unreachable ();
11016 gcc_unreachable ();
11019 /* Synthesize LE and GE from LT/GT || EQ. */
11020 if (code == LE || code == GE || code == LEU || code == GEU)
11026 case LE: code = LT; break;
11027 case GE: code = GT; break;
11028 case LEU: code = LT; break;
11029 case GEU: code = GT; break;
11030 default: gcc_unreachable ();
11033 compare_result2 = gen_reg_rtx (CCFPmode);
11039 cmp = flag_unsafe_math_optimizations
11040 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11041 rs6000_compare_op1)
11042 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11043 rs6000_compare_op1);
11047 cmp = flag_unsafe_math_optimizations
11048 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11049 rs6000_compare_op1)
11050 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11051 rs6000_compare_op1);
11055 gcc_unreachable ();
11059 /* OR them together. */
11060 or_result = gen_reg_rtx (CCFPmode);
11061 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11063 compare_result = or_result;
11068 if (code == NE || code == LTGT)
11078 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11079 CLOBBERs to match cmptf_internal2 pattern. */
11080 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11081 && GET_MODE (rs6000_compare_op0) == TFmode
11082 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
11083 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11084 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11086 gen_rtx_SET (VOIDmode,
11088 gen_rtx_COMPARE (comp_mode,
11089 rs6000_compare_op0,
11090 rs6000_compare_op1)),
11091 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11092 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11093 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11094 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11095 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11096 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11097 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11098 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11099 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11100 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11102 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11103 comp_mode = CCEQmode;
11104 compare_result = gen_reg_rtx (CCEQmode);
11106 emit_insn (gen_stack_protect_testdi (compare_result,
11107 rs6000_compare_op0, op1));
11109 emit_insn (gen_stack_protect_testsi (compare_result,
11110 rs6000_compare_op0, op1));
11113 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11114 gen_rtx_COMPARE (comp_mode,
11115 rs6000_compare_op0,
11116 rs6000_compare_op1)));
11119 /* Some kinds of FP comparisons need an OR operation;
11120 under flag_finite_math_only we don't bother. */
11121 if (rs6000_compare_fp_p
11122 && !flag_finite_math_only
11123 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11124 && (code == LE || code == GE
11125 || code == UNEQ || code == LTGT
11126 || code == UNGT || code == UNLT))
11128 enum rtx_code or1, or2;
11129 rtx or1_rtx, or2_rtx, compare2_rtx;
11130 rtx or_result = gen_reg_rtx (CCEQmode);
11134 case LE: or1 = LT; or2 = EQ; break;
11135 case GE: or1 = GT; or2 = EQ; break;
11136 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11137 case LTGT: or1 = LT; or2 = GT; break;
11138 case UNGT: or1 = UNORDERED; or2 = GT; break;
11139 case UNLT: or1 = UNORDERED; or2 = LT; break;
11140 default: gcc_unreachable ();
11142 validate_condition_mode (or1, comp_mode);
11143 validate_condition_mode (or2, comp_mode);
11144 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11145 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11146 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11147 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11149 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11151 compare_result = or_result;
11155 validate_condition_mode (code, GET_MODE (compare_result));
11157 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11161 /* Emit the RTL for an sCOND pattern. */
11164 rs6000_emit_sCOND (enum rtx_code code, rtx result)
11167 enum machine_mode op_mode;
11168 enum rtx_code cond_code;
11170 condition_rtx = rs6000_generate_compare (code);
11171 cond_code = GET_CODE (condition_rtx);
11173 if (TARGET_E500 && rs6000_compare_fp_p
11174 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11178 PUT_MODE (condition_rtx, SImode);
11179 t = XEXP (condition_rtx, 0);
11181 gcc_assert (cond_code == NE || cond_code == EQ);
11183 if (cond_code == NE)
11184 emit_insn (gen_e500_flip_gt_bit (t, t));
11186 emit_insn (gen_move_from_CR_gt_bit (result, t));
11190 if (cond_code == NE
11191 || cond_code == GE || cond_code == LE
11192 || cond_code == GEU || cond_code == LEU
11193 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11195 rtx not_result = gen_reg_rtx (CCEQmode);
11196 rtx not_op, rev_cond_rtx;
11197 enum machine_mode cc_mode;
11199 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11201 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11202 SImode, XEXP (condition_rtx, 0), const0_rtx);
11203 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11204 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11205 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11208 op_mode = GET_MODE (rs6000_compare_op0);
11209 if (op_mode == VOIDmode)
11210 op_mode = GET_MODE (rs6000_compare_op1);
11212 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11214 PUT_MODE (condition_rtx, DImode);
11215 convert_move (result, condition_rtx, 0);
11219 PUT_MODE (condition_rtx, SImode);
11220 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11224 /* Emit a branch of kind CODE to location LOC. */
11227 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11229 rtx condition_rtx, loc_ref;
11231 condition_rtx = rs6000_generate_compare (code);
11232 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11233 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11234 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11235 loc_ref, pc_rtx)));
11238 /* Return the string to output a conditional branch to LABEL, which is
11239 the operand number of the label, or -1 if the branch is really a
11240 conditional return.
11242 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11243 condition code register and its mode specifies what kind of
11244 comparison we made.
11246 REVERSED is nonzero if we should reverse the sense of the comparison.
11248 INSN is the insn. */
11251 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11253 static char string[64];
11254 enum rtx_code code = GET_CODE (op);
11255 rtx cc_reg = XEXP (op, 0);
11256 enum machine_mode mode = GET_MODE (cc_reg);
11257 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11258 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11259 int really_reversed = reversed ^ need_longbranch;
11265 validate_condition_mode (code, mode);
11267 /* Work out which way this really branches. We could use
11268 reverse_condition_maybe_unordered here always but this
11269 makes the resulting assembler clearer. */
11270 if (really_reversed)
11272 /* Reversal of FP compares takes care -- an ordered compare
11273 becomes an unordered compare and vice versa. */
11274 if (mode == CCFPmode)
11275 code = reverse_condition_maybe_unordered (code);
11277 code = reverse_condition (code);
11280 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11282 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11287 /* Opposite of GT. */
11296 gcc_unreachable ();
11302 /* Not all of these are actually distinct opcodes, but
11303 we distinguish them for clarity of the resulting assembler. */
11304 case NE: case LTGT:
11305 ccode = "ne"; break;
11306 case EQ: case UNEQ:
11307 ccode = "eq"; break;
11309 ccode = "ge"; break;
11310 case GT: case GTU: case UNGT:
11311 ccode = "gt"; break;
11313 ccode = "le"; break;
11314 case LT: case LTU: case UNLT:
11315 ccode = "lt"; break;
11316 case UNORDERED: ccode = "un"; break;
11317 case ORDERED: ccode = "nu"; break;
11318 case UNGE: ccode = "nl"; break;
11319 case UNLE: ccode = "ng"; break;
11321 gcc_unreachable ();
11324 /* Maybe we have a guess as to how likely the branch is.
11325 The old mnemonics don't have a way to specify this information. */
11327 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11328 if (note != NULL_RTX)
11330 /* PROB is the difference from 50%. */
11331 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11333 /* Only hint for highly probable/improbable branches on newer
11334 cpus as static prediction overrides processor dynamic
11335 prediction. For older cpus we may as well always hint, but
11336 assume not taken for branches that are very close to 50% as a
11337 mispredicted taken branch is more expensive than a
11338 mispredicted not-taken branch. */
11339 if (rs6000_always_hint
11340 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11342 if (abs (prob) > REG_BR_PROB_BASE / 20
11343 && ((prob > 0) ^ need_longbranch))
11351 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11353 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11355 /* We need to escape any '%' characters in the reg_names string.
11356 Assume they'd only be the first character.... */
11357 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11359 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11363 /* If the branch distance was too far, we may have to use an
11364 unconditional branch to go the distance. */
11365 if (need_longbranch)
11366 s += sprintf (s, ",$+8\n\tb %s", label);
11368 s += sprintf (s, ",%s", label);
11374 /* Return the string to flip the GT bit on a CR. */
11376 output_e500_flip_gt_bit (rtx dst, rtx src)
11378 static char string[64];
11381 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11382 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11385 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11386 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11388 sprintf (string, "crnot %d,%d", a, b);
11392 /* Return insn index for the vector compare instruction for given CODE,
11393 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11397 get_vec_cmp_insn (enum rtx_code code,
11398 enum machine_mode dest_mode,
11399 enum machine_mode op_mode)
11401 if (!TARGET_ALTIVEC)
11402 return INSN_NOT_AVAILABLE;
11407 if (dest_mode == V16QImode && op_mode == V16QImode)
11408 return UNSPEC_VCMPEQUB;
11409 if (dest_mode == V8HImode && op_mode == V8HImode)
11410 return UNSPEC_VCMPEQUH;
11411 if (dest_mode == V4SImode && op_mode == V4SImode)
11412 return UNSPEC_VCMPEQUW;
11413 if (dest_mode == V4SImode && op_mode == V4SFmode)
11414 return UNSPEC_VCMPEQFP;
11417 if (dest_mode == V4SImode && op_mode == V4SFmode)
11418 return UNSPEC_VCMPGEFP;
11420 if (dest_mode == V16QImode && op_mode == V16QImode)
11421 return UNSPEC_VCMPGTSB;
11422 if (dest_mode == V8HImode && op_mode == V8HImode)
11423 return UNSPEC_VCMPGTSH;
11424 if (dest_mode == V4SImode && op_mode == V4SImode)
11425 return UNSPEC_VCMPGTSW;
11426 if (dest_mode == V4SImode && op_mode == V4SFmode)
11427 return UNSPEC_VCMPGTFP;
11430 if (dest_mode == V16QImode && op_mode == V16QImode)
11431 return UNSPEC_VCMPGTUB;
11432 if (dest_mode == V8HImode && op_mode == V8HImode)
11433 return UNSPEC_VCMPGTUH;
11434 if (dest_mode == V4SImode && op_mode == V4SImode)
11435 return UNSPEC_VCMPGTUW;
11440 return INSN_NOT_AVAILABLE;
11443 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11444 DMODE is expected destination mode. This is a recursive function. */
11447 rs6000_emit_vector_compare (enum rtx_code rcode,
11449 enum machine_mode dmode)
11453 enum machine_mode dest_mode;
11454 enum machine_mode op_mode = GET_MODE (op1);
11456 gcc_assert (TARGET_ALTIVEC);
11457 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11459 /* Floating point vector compare instructions uses destination V4SImode.
11460 Move destination to appropriate mode later. */
11461 if (dmode == V4SFmode)
11462 dest_mode = V4SImode;
11466 mask = gen_reg_rtx (dest_mode);
11467 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11469 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11471 bool swap_operands = false;
11472 bool try_again = false;
11477 swap_operands = true;
11482 swap_operands = true;
11486 /* Treat A != B as ~(A==B). */
11488 enum insn_code nor_code;
11489 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11492 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11493 gcc_assert (nor_code != CODE_FOR_nothing);
11494 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11496 if (dmode != dest_mode)
11498 rtx temp = gen_reg_rtx (dest_mode);
11499 convert_move (temp, mask, 0);
11509 /* Try GT/GTU/LT/LTU OR EQ */
11512 enum insn_code ior_code;
11513 enum rtx_code new_code;
11534 gcc_unreachable ();
11537 c_rtx = rs6000_emit_vector_compare (new_code,
11538 op0, op1, dest_mode);
11539 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11542 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11543 gcc_assert (ior_code != CODE_FOR_nothing);
11544 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11545 if (dmode != dest_mode)
11547 rtx temp = gen_reg_rtx (dest_mode);
11548 convert_move (temp, mask, 0);
11555 gcc_unreachable ();
11560 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11561 /* You only get two chances. */
11562 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11574 emit_insn (gen_rtx_SET (VOIDmode, mask,
11575 gen_rtx_UNSPEC (dest_mode,
11576 gen_rtvec (2, op0, op1),
11578 if (dmode != dest_mode)
11580 rtx temp = gen_reg_rtx (dest_mode);
11581 convert_move (temp, mask, 0);
11587 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11588 valid insn doesn exist for given mode. */
11591 get_vsel_insn (enum machine_mode mode)
11596 return UNSPEC_VSEL4SI;
11599 return UNSPEC_VSEL4SF;
11602 return UNSPEC_VSEL8HI;
11605 return UNSPEC_VSEL16QI;
11608 return INSN_NOT_AVAILABLE;
11611 return INSN_NOT_AVAILABLE;
11614 /* Emit vector select insn where DEST is destination using
11615 operands OP1, OP2 and MASK. */
11618 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11621 enum machine_mode dest_mode = GET_MODE (dest);
11622 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11624 temp = gen_reg_rtx (dest_mode);
11626 /* For each vector element, select op1 when mask is 1 otherwise
11628 t = gen_rtx_SET (VOIDmode, temp,
11629 gen_rtx_UNSPEC (dest_mode,
11630 gen_rtvec (3, op2, op1, mask),
11633 emit_move_insn (dest, temp);
11637 /* Emit vector conditional expression.
11638 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11639 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11642 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11643 rtx cond, rtx cc_op0, rtx cc_op1)
11645 enum machine_mode dest_mode = GET_MODE (dest);
11646 enum rtx_code rcode = GET_CODE (cond);
11649 if (!TARGET_ALTIVEC)
11652 /* Get the vector mask for the given relational operations. */
11653 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11655 rs6000_emit_vector_select (dest, op1, op2, mask);
11660 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11661 operands of the last comparison is nonzero/true, FALSE_COND if it
11662 is zero/false. Return 0 if the hardware has no such operation. */
11665 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11667 enum rtx_code code = GET_CODE (op);
11668 rtx op0 = rs6000_compare_op0;
11669 rtx op1 = rs6000_compare_op1;
11670 REAL_VALUE_TYPE c1;
11671 enum machine_mode compare_mode = GET_MODE (op0);
11672 enum machine_mode result_mode = GET_MODE (dest);
11674 bool is_against_zero;
11676 /* These modes should always match. */
11677 if (GET_MODE (op1) != compare_mode
11678 /* In the isel case however, we can use a compare immediate, so
11679 op1 may be a small constant. */
11680 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11682 if (GET_MODE (true_cond) != result_mode)
11684 if (GET_MODE (false_cond) != result_mode)
11687 /* First, work out if the hardware can do this at all, or
11688 if it's too slow.... */
11689 if (! rs6000_compare_fp_p)
11692 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11695 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11696 && SCALAR_FLOAT_MODE_P (compare_mode))
11699 is_against_zero = op1 == CONST0_RTX (compare_mode);
11701 /* A floating-point subtract might overflow, underflow, or produce
11702 an inexact result, thus changing the floating-point flags, so it
11703 can't be generated if we care about that. It's safe if one side
11704 of the construct is zero, since then no subtract will be
11706 if (SCALAR_FLOAT_MODE_P (compare_mode)
11707 && flag_trapping_math && ! is_against_zero)
11710 /* Eliminate half of the comparisons by switching operands, this
11711 makes the remaining code simpler. */
11712 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11713 || code == LTGT || code == LT || code == UNLE)
11715 code = reverse_condition_maybe_unordered (code);
11717 true_cond = false_cond;
11721 /* UNEQ and LTGT take four instructions for a comparison with zero,
11722 it'll probably be faster to use a branch here too. */
11723 if (code == UNEQ && HONOR_NANS (compare_mode))
11726 if (GET_CODE (op1) == CONST_DOUBLE)
11727 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11729 /* We're going to try to implement comparisons by performing
11730 a subtract, then comparing against zero. Unfortunately,
11731 Inf - Inf is NaN which is not zero, and so if we don't
11732 know that the operand is finite and the comparison
11733 would treat EQ different to UNORDERED, we can't do it. */
11734 if (HONOR_INFINITIES (compare_mode)
11735 && code != GT && code != UNGE
11736 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11737 /* Constructs of the form (a OP b ? a : b) are safe. */
11738 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11739 || (! rtx_equal_p (op0, true_cond)
11740 && ! rtx_equal_p (op1, true_cond))))
11743 /* At this point we know we can use fsel. */
11745 /* Reduce the comparison to a comparison against zero. */
11746 if (! is_against_zero)
11748 temp = gen_reg_rtx (compare_mode);
11749 emit_insn (gen_rtx_SET (VOIDmode, temp,
11750 gen_rtx_MINUS (compare_mode, op0, op1)));
11752 op1 = CONST0_RTX (compare_mode);
11755 /* If we don't care about NaNs we can reduce some of the comparisons
11756 down to faster ones. */
11757 if (! HONOR_NANS (compare_mode))
11763 true_cond = false_cond;
11776 /* Now, reduce everything down to a GE. */
11783 temp = gen_reg_rtx (compare_mode);
11784 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11789 temp = gen_reg_rtx (compare_mode);
11790 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11795 temp = gen_reg_rtx (compare_mode);
11796 emit_insn (gen_rtx_SET (VOIDmode, temp,
11797 gen_rtx_NEG (compare_mode,
11798 gen_rtx_ABS (compare_mode, op0))));
11803 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11804 temp = gen_reg_rtx (result_mode);
11805 emit_insn (gen_rtx_SET (VOIDmode, temp,
11806 gen_rtx_IF_THEN_ELSE (result_mode,
11807 gen_rtx_GE (VOIDmode,
11809 true_cond, false_cond)));
11810 false_cond = true_cond;
11813 temp = gen_reg_rtx (compare_mode);
11814 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11819 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11820 temp = gen_reg_rtx (result_mode);
11821 emit_insn (gen_rtx_SET (VOIDmode, temp,
11822 gen_rtx_IF_THEN_ELSE (result_mode,
11823 gen_rtx_GE (VOIDmode,
11825 true_cond, false_cond)));
11826 true_cond = false_cond;
11829 temp = gen_reg_rtx (compare_mode);
11830 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11835 gcc_unreachable ();
11838 emit_insn (gen_rtx_SET (VOIDmode, dest,
11839 gen_rtx_IF_THEN_ELSE (result_mode,
11840 gen_rtx_GE (VOIDmode,
11842 true_cond, false_cond)));
11846 /* Same as above, but for ints (isel). */
11849 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11851 rtx condition_rtx, cr;
11853 /* All isel implementations thus far are 32-bits. */
11854 if (GET_MODE (rs6000_compare_op0) != SImode)
11857 /* We still have to do the compare, because isel doesn't do a
11858 compare, it just looks at the CRx bits set by a previous compare
11860 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11861 cr = XEXP (condition_rtx, 0);
11863 if (GET_MODE (cr) == CCmode)
11864 emit_insn (gen_isel_signed (dest, condition_rtx,
11865 true_cond, false_cond, cr));
11867 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11868 true_cond, false_cond, cr));
11874 output_isel (rtx *operands)
11876 enum rtx_code code;
11878 code = GET_CODE (operands[1]);
11879 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11881 PUT_CODE (operands[1], reverse_condition (code));
11882 return "isel %0,%3,%2,%j1";
11885 return "isel %0,%2,%3,%j1";
11889 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11891 enum machine_mode mode = GET_MODE (op0);
11895 if (code == SMAX || code == SMIN)
11900 if (code == SMAX || code == UMAX)
11901 target = emit_conditional_move (dest, c, op0, op1, mode,
11902 op0, op1, mode, 0);
11904 target = emit_conditional_move (dest, c, op0, op1, mode,
11905 op1, op0, mode, 0);
11906 gcc_assert (target);
11907 if (target != dest)
11908 emit_move_insn (dest, target);
11911 /* Emit instructions to perform a load-reserved/store-conditional operation.
11912 The operation performed is an atomic
11913 (set M (CODE:MODE M OP))
11914 If not NULL, BEFORE is atomically set to M before the operation, and
11915 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11916 If SYNC_P then a memory barrier is emitted before the operation.
11917 Either OP or M may be wrapped in a NOT operation. */
11920 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11921 rtx m, rtx op, rtx before_param, rtx after_param,
11924 enum machine_mode used_mode;
11925 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11928 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11929 rtx shift = NULL_RTX;
11932 emit_insn (gen_memory_barrier ());
11934 if (GET_CODE (m) == NOT)
11935 used_m = XEXP (m, 0);
11939 /* If this is smaller than SImode, we'll have to use SImode with
11941 if (mode == QImode || mode == HImode)
11945 if (MEM_ALIGN (used_m) >= 32)
11948 if (BYTES_BIG_ENDIAN)
11949 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
11951 shift = GEN_INT (ishift);
11955 rtx addrSI, aligned_addr;
11956 int shift_mask = mode == QImode ? 0x18 : 0x10;
11958 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11959 XEXP (used_m, 0)));
11960 shift = gen_reg_rtx (SImode);
11962 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
11963 GEN_INT (shift_mask)));
11964 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
11966 aligned_addr = expand_binop (Pmode, and_optab,
11968 GEN_INT (-4), NULL_RTX,
11969 1, OPTAB_LIB_WIDEN);
11970 used_m = change_address (used_m, SImode, aligned_addr);
11971 set_mem_align (used_m, 32);
11972 /* It's safe to keep the old alias set of USED_M, because
11973 the operation is atomic and only affects the original
11975 if (GET_CODE (m) == NOT)
11976 m = gen_rtx_NOT (SImode, used_m);
11981 if (GET_CODE (op) == NOT)
11983 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11984 oldop = gen_rtx_NOT (SImode, oldop);
11987 oldop = lowpart_subreg (SImode, op, mode);
11993 newop = expand_binop (SImode, and_optab,
11994 oldop, GEN_INT (imask), NULL_RTX,
11995 1, OPTAB_LIB_WIDEN);
11996 emit_insn (gen_ashlsi3 (newop, newop, shift));
12000 newop = expand_binop (SImode, ior_optab,
12001 oldop, GEN_INT (~imask), NULL_RTX,
12002 1, OPTAB_LIB_WIDEN);
12003 emit_insn (gen_rotlsi3 (newop, newop, shift));
12011 newop = expand_binop (SImode, and_optab,
12012 oldop, GEN_INT (imask), NULL_RTX,
12013 1, OPTAB_LIB_WIDEN);
12014 emit_insn (gen_ashlsi3 (newop, newop, shift));
12016 mask = gen_reg_rtx (SImode);
12017 emit_move_insn (mask, GEN_INT (imask));
12018 emit_insn (gen_ashlsi3 (mask, mask, shift));
12021 newop = gen_rtx_PLUS (SImode, m, newop);
12023 newop = gen_rtx_MINUS (SImode, m, newop);
12024 newop = gen_rtx_AND (SImode, newop, mask);
12025 newop = gen_rtx_IOR (SImode, newop,
12026 gen_rtx_AND (SImode,
12027 gen_rtx_NOT (SImode, mask),
12033 gcc_unreachable ();
12036 if (GET_CODE (m) == NOT)
12040 mask = gen_reg_rtx (SImode);
12041 emit_move_insn (mask, GEN_INT (imask));
12042 emit_insn (gen_ashlsi3 (mask, mask, shift));
12044 xorm = gen_rtx_XOR (SImode, used_m, mask);
12045 /* Depending on the value of 'op', the XOR or the operation might
12046 be able to be simplified away. */
12047 newop = simplify_gen_binary (code, SImode, xorm, newop);
12050 used_mode = SImode;
12051 before = gen_reg_rtx (used_mode);
12052 after = gen_reg_rtx (used_mode);
12057 before = before_param;
12058 after = after_param;
12060 if (before == NULL_RTX)
12061 before = gen_reg_rtx (used_mode);
12062 if (after == NULL_RTX)
12063 after = gen_reg_rtx (used_mode);
12066 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12067 && used_mode != mode)
12068 the_op = op; /* Computed above. */
12069 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12070 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12072 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12074 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12075 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12076 set_atomic = gen_rtx_SET (VOIDmode, used_m,
12077 gen_rtx_UNSPEC (used_mode,
12078 gen_rtvec (1, the_op),
12080 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12082 if ((code == PLUS || code == MINUS) && used_mode != mode)
12083 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12084 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12086 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12087 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12089 /* Shift and mask the return values properly. */
12090 if (used_mode != mode && before_param)
12092 emit_insn (gen_lshrsi3 (before, before, shift));
12093 convert_move (before_param, before, 1);
12096 if (used_mode != mode && after_param)
12098 emit_insn (gen_lshrsi3 (after, after, shift));
12099 convert_move (after_param, after, 1);
12102 /* The previous sequence will end with a branch that's dependent on
12103 the conditional store, so placing an isync will ensure that no
12104 other instructions (especially, no load or store instructions)
12105 can start before the atomic operation completes. */
12107 emit_insn (gen_isync ());
12110 /* A subroutine of the atomic operation splitters. Jump to LABEL if
12111 COND is true. Mark the jump as unlikely to be taken. */
12114 emit_unlikely_jump (rtx cond, rtx label)
12116 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12119 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12120 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12121 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12124 /* A subroutine of the atomic operation splitters. Emit a load-locked
12125 instruction in MODE. */
12128 emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12130 rtx (*fn) (rtx, rtx) = NULL;
12131 if (mode == SImode)
12132 fn = gen_load_locked_si;
12133 else if (mode == DImode)
12134 fn = gen_load_locked_di;
12135 emit_insn (fn (reg, mem));
12138 /* A subroutine of the atomic operation splitters. Emit a store-conditional
12139 instruction in MODE. */
12142 emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12144 rtx (*fn) (rtx, rtx, rtx) = NULL;
12145 if (mode == SImode)
12146 fn = gen_store_conditional_si;
12147 else if (mode == DImode)
12148 fn = gen_store_conditional_di;
12150 /* Emit sync before stwcx. to address PPC405 Erratum. */
12151 if (PPC405_ERRATUM77)
12152 emit_insn (gen_memory_barrier ());
12154 emit_insn (fn (res, mem, val));
12157 /* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
12158 to perform. MEM is the memory on which to operate. VAL is the second
12159 operand of the binary operator. BEFORE and AFTER are optional locations to
12160 return the value of MEM either before of after the operation. SCRATCH is
12161 a scratch register. */
12164 rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12165 rtx before, rtx after, rtx scratch)
12167 enum machine_mode mode = GET_MODE (mem);
12168 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12170 emit_insn (gen_memory_barrier ());
12172 label = gen_label_rtx ();
12173 emit_label (label);
12174 label = gen_rtx_LABEL_REF (VOIDmode, label);
12176 if (before == NULL_RTX)
12178 emit_load_locked (mode, before, mem);
12181 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12182 else if (code == AND)
12183 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12185 x = gen_rtx_fmt_ee (code, mode, before, val);
12187 if (after != NULL_RTX)
12188 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12189 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12191 emit_store_conditional (mode, cond, mem, scratch);
12193 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12194 emit_unlikely_jump (x, label);
12196 emit_insn (gen_isync ());
12199 /* Expand an atomic compare and swap operation. MEM is the memory on which
12200 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12201 value to be stored. SCRATCH is a scratch GPR. */
12204 rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12207 enum machine_mode mode = GET_MODE (mem);
12208 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12210 emit_insn (gen_memory_barrier ());
12212 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12213 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12214 emit_label (XEXP (label1, 0));
12216 emit_load_locked (mode, retval, mem);
12218 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12219 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12221 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12222 emit_unlikely_jump (x, label2);
12224 emit_move_insn (scratch, newval);
12225 emit_store_conditional (mode, cond, mem, scratch);
12227 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12228 emit_unlikely_jump (x, label1);
12230 emit_insn (gen_isync ());
12231 emit_label (XEXP (label2, 0));
12234 /* Expand an atomic test and set operation. MEM is the memory on which
12235 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12238 rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12240 enum machine_mode mode = GET_MODE (mem);
12241 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12243 emit_insn (gen_memory_barrier ());
12245 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12246 emit_label (XEXP (label, 0));
12248 emit_load_locked (mode, retval, mem);
12249 emit_move_insn (scratch, val);
12250 emit_store_conditional (mode, cond, mem, scratch);
12252 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12253 emit_unlikely_jump (x, label);
12255 emit_insn (gen_isync ());
12258 /* Emit instructions to move SRC to DST. Called by splitters for
12259 multi-register moves. It will emit at most one instruction for
12260 each register that is accessed; that is, it won't emit li/lis pairs
12261 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12265 rs6000_split_multireg_move (rtx dst, rtx src)
12267 /* The register number of the first register being moved. */
12269 /* The mode that is to be moved. */
12270 enum machine_mode mode;
12271 /* The mode that the move is being done in, and its size. */
12272 enum machine_mode reg_mode;
12274 /* The number of registers that will be moved. */
12277 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12278 mode = GET_MODE (dst);
12279 nregs = hard_regno_nregs[reg][mode];
12280 if (FP_REGNO_P (reg))
12282 else if (ALTIVEC_REGNO_P (reg))
12283 reg_mode = V16QImode;
12285 reg_mode = word_mode;
12286 reg_mode_size = GET_MODE_SIZE (reg_mode);
12288 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12290 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12292 /* Move register range backwards, if we might have destructive
12295 for (i = nregs - 1; i >= 0; i--)
12296 emit_insn (gen_rtx_SET (VOIDmode,
12297 simplify_gen_subreg (reg_mode, dst, mode,
12298 i * reg_mode_size),
12299 simplify_gen_subreg (reg_mode, src, mode,
12300 i * reg_mode_size)));
12306 bool used_update = false;
12308 if (MEM_P (src) && INT_REGNO_P (reg))
12312 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12313 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12316 breg = XEXP (XEXP (src, 0), 0);
12317 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12318 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12319 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12320 emit_insn (TARGET_32BIT
12321 ? gen_addsi3 (breg, breg, delta_rtx)
12322 : gen_adddi3 (breg, breg, delta_rtx));
12323 src = replace_equiv_address (src, breg);
12325 else if (! offsettable_memref_p (src))
12328 basereg = gen_rtx_REG (Pmode, reg);
12329 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12330 src = replace_equiv_address (src, basereg);
12333 breg = XEXP (src, 0);
12334 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12335 breg = XEXP (breg, 0);
12337 /* If the base register we are using to address memory is
12338 also a destination reg, then change that register last. */
12340 && REGNO (breg) >= REGNO (dst)
12341 && REGNO (breg) < REGNO (dst) + nregs)
12342 j = REGNO (breg) - REGNO (dst);
12345 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12349 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12350 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12353 breg = XEXP (XEXP (dst, 0), 0);
12354 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12355 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12356 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12358 /* We have to update the breg before doing the store.
12359 Use store with update, if available. */
12363 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12364 emit_insn (TARGET_32BIT
12365 ? (TARGET_POWERPC64
12366 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12367 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12368 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12369 used_update = true;
12372 emit_insn (TARGET_32BIT
12373 ? gen_addsi3 (breg, breg, delta_rtx)
12374 : gen_adddi3 (breg, breg, delta_rtx));
12375 dst = replace_equiv_address (dst, breg);
12378 gcc_assert (offsettable_memref_p (dst));
12381 for (i = 0; i < nregs; i++)
12383 /* Calculate index to next subword. */
12388 /* If compiler already emitted move of first word by
12389 store with update, no need to do anything. */
12390 if (j == 0 && used_update)
12393 emit_insn (gen_rtx_SET (VOIDmode,
12394 simplify_gen_subreg (reg_mode, dst, mode,
12395 j * reg_mode_size),
12396 simplify_gen_subreg (reg_mode, src, mode,
12397 j * reg_mode_size)));
12403 /* This page contains routines that are used to determine what the
12404 function prologue and epilogue code will do and write them out. */
12406 /* Return the first fixed-point register that is required to be
12407 saved. 32 if none. */
12410 first_reg_to_save (void)
12414 /* Find lowest numbered live register. */
12415 for (first_reg = 13; first_reg <= 31; first_reg++)
12416 if (regs_ever_live[first_reg]
12417 && (! call_used_regs[first_reg]
12418 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12419 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12420 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12421 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12426 && current_function_uses_pic_offset_table
12427 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12428 return RS6000_PIC_OFFSET_TABLE_REGNUM;
12434 /* Similar, for FP regs. */
12437 first_fp_reg_to_save (void)
12441 /* Find lowest numbered live register. */
12442 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12443 if (regs_ever_live[first_reg])
12449 /* Similar, for AltiVec regs. */
12452 first_altivec_reg_to_save (void)
12456 /* Stack frame remains as is unless we are in AltiVec ABI. */
12457 if (! TARGET_ALTIVEC_ABI)
12458 return LAST_ALTIVEC_REGNO + 1;
12460 /* Find lowest numbered live register. */
12461 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12462 if (regs_ever_live[i])
12468 /* Return a 32-bit mask of the AltiVec registers we need to set in
12469 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12470 the 32-bit word is 0. */
12472 static unsigned int
12473 compute_vrsave_mask (void)
12475 unsigned int i, mask = 0;
12477 /* First, find out if we use _any_ altivec registers. */
12478 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12479 if (regs_ever_live[i])
12480 mask |= ALTIVEC_REG_BIT (i);
12485 /* Next, remove the argument registers from the set. These must
12486 be in the VRSAVE mask set by the caller, so we don't need to add
12487 them in again. More importantly, the mask we compute here is
12488 used to generate CLOBBERs in the set_vrsave insn, and we do not
12489 wish the argument registers to die. */
12490 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12491 mask &= ~ALTIVEC_REG_BIT (i);
12493 /* Similarly, remove the return value from the set. */
12496 diddle_return_value (is_altivec_return_reg, &yes);
12498 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12504 /* For a very restricted set of circumstances, we can cut down the
12505 size of prologues/epilogues by calling our own save/restore-the-world
12509 compute_save_world_info (rs6000_stack_t *info_ptr)
12511 info_ptr->world_save_p = 1;
12512 info_ptr->world_save_p
12513 = (WORLD_SAVE_P (info_ptr)
12514 && DEFAULT_ABI == ABI_DARWIN
12515 && ! (current_function_calls_setjmp && flag_exceptions)
12516 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12517 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12518 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12519 && info_ptr->cr_save_p);
12521 /* This will not work in conjunction with sibcalls. Make sure there
12522 are none. (This check is expensive, but seldom executed.) */
12523 if (WORLD_SAVE_P (info_ptr))
12526 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12527 if ( GET_CODE (insn) == CALL_INSN
12528 && SIBLING_CALL_P (insn))
12530 info_ptr->world_save_p = 0;
12535 if (WORLD_SAVE_P (info_ptr))
12537 /* Even if we're not touching VRsave, make sure there's room on the
12538 stack for it, if it looks like we're calling SAVE_WORLD, which
12539 will attempt to save it. */
12540 info_ptr->vrsave_size = 4;
12542 /* "Save" the VRsave register too if we're saving the world. */
12543 if (info_ptr->vrsave_mask == 0)
12544 info_ptr->vrsave_mask = compute_vrsave_mask ();
12546 /* Because the Darwin register save/restore routines only handle
12547 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12549 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12550 && (info_ptr->first_altivec_reg_save
12551 >= FIRST_SAVED_ALTIVEC_REGNO));
12558 is_altivec_return_reg (rtx reg, void *xyes)
12560 bool *yes = (bool *) xyes;
12561 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12566 /* Calculate the stack information for the current function. This is
12567 complicated by having two separate calling sequences, the AIX calling
12568 sequence and the V.4 calling sequence.
12570 AIX (and Darwin/Mac OS X) stack frames look like:
12572 SP----> +---------------------------------------+
12573 | back chain to caller | 0 0
12574 +---------------------------------------+
12575 | saved CR | 4 8 (8-11)
12576 +---------------------------------------+
12578 +---------------------------------------+
12579 | reserved for compilers | 12 24
12580 +---------------------------------------+
12581 | reserved for binders | 16 32
12582 +---------------------------------------+
12583 | saved TOC pointer | 20 40
12584 +---------------------------------------+
12585 | Parameter save area (P) | 24 48
12586 +---------------------------------------+
12587 | Alloca space (A) | 24+P etc.
12588 +---------------------------------------+
12589 | Local variable space (L) | 24+P+A
12590 +---------------------------------------+
12591 | Float/int conversion temporary (X) | 24+P+A+L
12592 +---------------------------------------+
12593 | Save area for AltiVec registers (W) | 24+P+A+L+X
12594 +---------------------------------------+
12595 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12596 +---------------------------------------+
12597 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
12598 +---------------------------------------+
12599 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12600 +---------------------------------------+
12601 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
12602 +---------------------------------------+
12603 old SP->| back chain to caller's caller |
12604 +---------------------------------------+
12606 The required alignment for AIX configurations is two words (i.e., 8
12610 V.4 stack frames look like:
12612 SP----> +---------------------------------------+
12613 | back chain to caller | 0
12614 +---------------------------------------+
12615 | caller's saved LR | 4
12616 +---------------------------------------+
12617 | Parameter save area (P) | 8
12618 +---------------------------------------+
12619 | Alloca space (A) | 8+P
12620 +---------------------------------------+
12621 | Varargs save area (V) | 8+P+A
12622 +---------------------------------------+
12623 | Local variable space (L) | 8+P+A+V
12624 +---------------------------------------+
12625 | Float/int conversion temporary (X) | 8+P+A+V+L
12626 +---------------------------------------+
12627 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12628 +---------------------------------------+
12629 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12630 +---------------------------------------+
12631 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12632 +---------------------------------------+
12633 | SPE: area for 64-bit GP registers |
12634 +---------------------------------------+
12635 | SPE alignment padding |
12636 +---------------------------------------+
12637 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
12638 +---------------------------------------+
12639 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
12640 +---------------------------------------+
12641 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
12642 +---------------------------------------+
12643 old SP->| back chain to caller's caller |
12644 +---------------------------------------+
12646 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12647 given. (But note below and in sysv4.h that we require only 8 and
12648 may round up the size of our stack frame anyways. The historical
12649 reason is early versions of powerpc-linux which didn't properly
12650 align the stack at program startup. A happy side-effect is that
12651 -mno-eabi libraries can be used with -meabi programs.)
12653 The EABI configuration defaults to the V.4 layout. However,
12654 the stack alignment requirements may differ. If -mno-eabi is not
12655 given, the required stack alignment is 8 bytes; if -mno-eabi is
12656 given, the required alignment is 16 bytes. (But see V.4 comment
12659 #ifndef ABI_STACK_BOUNDARY
12660 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
12663 static rs6000_stack_t *
12664 rs6000_stack_info (void)
12666 static rs6000_stack_t info, zero_info;
12667 rs6000_stack_t *info_ptr = &info;
12668 int reg_size = TARGET_32BIT ? 4 : 8;
12671 HOST_WIDE_INT non_fixed_size;
12673 /* Zero all fields portably. */
12678 /* Cache value so we don't rescan instruction chain over and over. */
12679 if (cfun->machine->insn_chain_scanned_p == 0)
12680 cfun->machine->insn_chain_scanned_p
12681 = spe_func_has_64bit_regs_p () + 1;
12682 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
12685 /* Select which calling sequence. */
12686 info_ptr->abi = DEFAULT_ABI;
12688 /* Calculate which registers need to be saved & save area size. */
12689 info_ptr->first_gp_reg_save = first_reg_to_save ();
12690 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12691 even if it currently looks like we won't. */
12692 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
12693 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12694 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
12695 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12696 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
12698 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
12700 /* For the SPE, we have an additional upper 32-bits on each GPR.
12701 Ideally we should save the entire 64-bits only when the upper
12702 half is used in SIMD instructions. Since we only record
12703 registers live (not the size they are used in), this proves
12704 difficult because we'd have to traverse the instruction chain at
12705 the right time, taking reload into account. This is a real pain,
12706 so we opt to save the GPRs in 64-bits always if but one register
12707 gets used in 64-bits. Otherwise, all the registers in the frame
12708 get saved in 32-bits.
12710 So... since when we save all GPRs (except the SP) in 64-bits, the
12711 traditional GP save area will be empty. */
12712 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12713 info_ptr->gp_size = 0;
12715 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12716 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12718 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12719 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12720 - info_ptr->first_altivec_reg_save);
12722 /* Does this function call anything? */
12723 info_ptr->calls_p = (! current_function_is_leaf
12724 || cfun->machine->ra_needs_full_frame);
12726 /* Determine if we need to save the link register. */
12727 if (rs6000_ra_ever_killed ()
12728 || (DEFAULT_ABI == ABI_AIX
12729 && current_function_profile
12730 && !TARGET_PROFILE_KERNEL)
12731 #ifdef TARGET_RELOCATABLE
12732 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12734 || (info_ptr->first_fp_reg_save != 64
12735 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
12736 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
12737 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
12738 || info_ptr->calls_p)
12740 info_ptr->lr_save_p = 1;
12741 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
12744 /* Determine if we need to save the condition code registers. */
12745 if (regs_ever_live[CR2_REGNO]
12746 || regs_ever_live[CR3_REGNO]
12747 || regs_ever_live[CR4_REGNO])
12749 info_ptr->cr_save_p = 1;
12750 if (DEFAULT_ABI == ABI_V4)
12751 info_ptr->cr_size = reg_size;
12754 /* If the current function calls __builtin_eh_return, then we need
12755 to allocate stack space for registers that will hold data for
12756 the exception handler. */
12757 if (current_function_calls_eh_return)
12760 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12763 /* SPE saves EH registers in 64-bits. */
12764 ehrd_size = i * (TARGET_SPE_ABI
12765 && info_ptr->spe_64bit_regs_used != 0
12766 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12771 /* Determine various sizes. */
12772 info_ptr->reg_size = reg_size;
12773 info_ptr->fixed_size = RS6000_SAVE_AREA;
12774 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12775 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12776 TARGET_ALTIVEC ? 16 : 8);
12777 if (FRAME_GROWS_DOWNWARD)
12778 info_ptr->vars_size
12779 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
12780 + info_ptr->parm_size,
12781 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
12782 - (info_ptr->fixed_size + info_ptr->vars_size
12783 + info_ptr->parm_size);
12785 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12786 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12788 info_ptr->spe_gp_size = 0;
12790 if (TARGET_ALTIVEC_ABI)
12791 info_ptr->vrsave_mask = compute_vrsave_mask ();
12793 info_ptr->vrsave_mask = 0;
12795 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12796 info_ptr->vrsave_size = 4;
12798 info_ptr->vrsave_size = 0;
12800 compute_save_world_info (info_ptr);
12802 /* Calculate the offsets. */
12803 switch (DEFAULT_ABI)
12807 gcc_unreachable ();
12811 info_ptr->fp_save_offset = - info_ptr->fp_size;
12812 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12814 if (TARGET_ALTIVEC_ABI)
12816 info_ptr->vrsave_save_offset
12817 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12819 /* Align stack so vector save area is on a quadword boundary. */
12820 if (info_ptr->altivec_size != 0)
12821 info_ptr->altivec_padding_size
12822 = 16 - (-info_ptr->vrsave_save_offset % 16);
12824 info_ptr->altivec_padding_size = 0;
12826 info_ptr->altivec_save_offset
12827 = info_ptr->vrsave_save_offset
12828 - info_ptr->altivec_padding_size
12829 - info_ptr->altivec_size;
12831 /* Adjust for AltiVec case. */
12832 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12835 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12836 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12837 info_ptr->lr_save_offset = 2*reg_size;
12841 info_ptr->fp_save_offset = - info_ptr->fp_size;
12842 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12843 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12845 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12847 /* Align stack so SPE GPR save area is aligned on a
12848 double-word boundary. */
12849 if (info_ptr->spe_gp_size != 0)
12850 info_ptr->spe_padding_size
12851 = 8 - (-info_ptr->cr_save_offset % 8);
12853 info_ptr->spe_padding_size = 0;
12855 info_ptr->spe_gp_save_offset
12856 = info_ptr->cr_save_offset
12857 - info_ptr->spe_padding_size
12858 - info_ptr->spe_gp_size;
12860 /* Adjust for SPE case. */
12861 info_ptr->toc_save_offset
12862 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12864 else if (TARGET_ALTIVEC_ABI)
12866 info_ptr->vrsave_save_offset
12867 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12869 /* Align stack so vector save area is on a quadword boundary. */
12870 if (info_ptr->altivec_size != 0)
12871 info_ptr->altivec_padding_size
12872 = 16 - (-info_ptr->vrsave_save_offset % 16);
12874 info_ptr->altivec_padding_size = 0;
12876 info_ptr->altivec_save_offset
12877 = info_ptr->vrsave_save_offset
12878 - info_ptr->altivec_padding_size
12879 - info_ptr->altivec_size;
12881 /* Adjust for AltiVec case. */
12882 info_ptr->toc_save_offset
12883 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12886 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12887 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12888 info_ptr->lr_save_offset = reg_size;
12892 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12893 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12894 + info_ptr->gp_size
12895 + info_ptr->altivec_size
12896 + info_ptr->altivec_padding_size
12897 + info_ptr->spe_gp_size
12898 + info_ptr->spe_padding_size
12900 + info_ptr->cr_size
12901 + info_ptr->lr_size
12902 + info_ptr->vrsave_size
12903 + info_ptr->toc_size,
12906 non_fixed_size = (info_ptr->vars_size
12907 + info_ptr->parm_size
12908 + info_ptr->save_size);
12910 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12911 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12913 /* Determine if we need to allocate any stack frame:
12915 For AIX we need to push the stack if a frame pointer is needed
12916 (because the stack might be dynamically adjusted), if we are
12917 debugging, if we make calls, or if the sum of fp_save, gp_save,
12918 and local variables are more than the space needed to save all
12919 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12920 + 18*8 = 288 (GPR13 reserved).
12922 For V.4 we don't have the stack cushion that AIX uses, but assume
12923 that the debugger can handle stackless frames. */
12925 if (info_ptr->calls_p)
12926 info_ptr->push_p = 1;
12928 else if (DEFAULT_ABI == ABI_V4)
12929 info_ptr->push_p = non_fixed_size != 0;
12931 else if (frame_pointer_needed)
12932 info_ptr->push_p = 1;
12934 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12935 info_ptr->push_p = 1;
12938 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12940 /* Zero offsets if we're not saving those registers. */
12941 if (info_ptr->fp_size == 0)
12942 info_ptr->fp_save_offset = 0;
12944 if (info_ptr->gp_size == 0)
12945 info_ptr->gp_save_offset = 0;
12947 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12948 info_ptr->altivec_save_offset = 0;
12950 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12951 info_ptr->vrsave_save_offset = 0;
12953 if (! TARGET_SPE_ABI
12954 || info_ptr->spe_64bit_regs_used == 0
12955 || info_ptr->spe_gp_size == 0)
12956 info_ptr->spe_gp_save_offset = 0;
12958 if (! info_ptr->lr_save_p)
12959 info_ptr->lr_save_offset = 0;
12961 if (! info_ptr->cr_save_p)
12962 info_ptr->cr_save_offset = 0;
12964 if (! info_ptr->toc_save_p)
12965 info_ptr->toc_save_offset = 0;
12970 /* Return true if the current function uses any GPRs in 64-bit SIMD
12974 spe_func_has_64bit_regs_p (void)
12978 /* Functions that save and restore all the call-saved registers will
12979 need to save/restore the registers in 64-bits. */
12980 if (current_function_calls_eh_return
12981 || current_function_calls_setjmp
12982 || current_function_has_nonlocal_goto)
12985 insns = get_insns ();
12987 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12993 /* FIXME: This should be implemented with attributes...
12995 (set_attr "spe64" "true")....then,
12996 if (get_spe64(insn)) return true;
12998 It's the only reliable way to do the stuff below. */
13000 i = PATTERN (insn);
13001 if (GET_CODE (i) == SET)
13003 enum machine_mode mode = GET_MODE (SET_SRC (i));
13005 if (SPE_VECTOR_MODE (mode))
13007 if (TARGET_E500_DOUBLE && mode == DFmode)
13017 debug_stack_info (rs6000_stack_t *info)
13019 const char *abi_string;
13022 info = rs6000_stack_info ();
13024 fprintf (stderr, "\nStack information for function %s:\n",
13025 ((current_function_decl && DECL_NAME (current_function_decl))
13026 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13031 default: abi_string = "Unknown"; break;
13032 case ABI_NONE: abi_string = "NONE"; break;
13033 case ABI_AIX: abi_string = "AIX"; break;
13034 case ABI_DARWIN: abi_string = "Darwin"; break;
13035 case ABI_V4: abi_string = "V.4"; break;
13038 fprintf (stderr, "\tABI = %5s\n", abi_string);
13040 if (TARGET_ALTIVEC_ABI)
13041 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13043 if (TARGET_SPE_ABI)
13044 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13046 if (info->first_gp_reg_save != 32)
13047 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13049 if (info->first_fp_reg_save != 64)
13050 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
13052 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13053 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13054 info->first_altivec_reg_save);
13056 if (info->lr_save_p)
13057 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
13059 if (info->cr_save_p)
13060 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13062 if (info->toc_save_p)
13063 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
13065 if (info->vrsave_mask)
13066 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13069 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13072 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13074 if (info->gp_save_offset)
13075 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13077 if (info->fp_save_offset)
13078 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13080 if (info->altivec_save_offset)
13081 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13082 info->altivec_save_offset);
13084 if (info->spe_gp_save_offset)
13085 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13086 info->spe_gp_save_offset);
13088 if (info->vrsave_save_offset)
13089 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13090 info->vrsave_save_offset);
13092 if (info->lr_save_offset)
13093 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13095 if (info->cr_save_offset)
13096 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13098 if (info->toc_save_offset)
13099 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
13101 if (info->varargs_save_offset)
13102 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13104 if (info->total_size)
13105 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13108 if (info->vars_size)
13109 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13112 if (info->parm_size)
13113 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13115 if (info->fixed_size)
13116 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13119 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13121 if (info->spe_gp_size)
13122 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13125 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13127 if (info->altivec_size)
13128 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13130 if (info->vrsave_size)
13131 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13133 if (info->altivec_padding_size)
13134 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13135 info->altivec_padding_size);
13137 if (info->spe_padding_size)
13138 fprintf (stderr, "\tspe_padding_size = %5d\n",
13139 info->spe_padding_size);
13142 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
13145 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13147 if (info->toc_size)
13148 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
13150 if (info->save_size)
13151 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13153 if (info->reg_size != 4)
13154 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13156 fprintf (stderr, "\n");
13160 rs6000_return_addr (int count, rtx frame)
13162 /* Currently we don't optimize very well between prolog and body
13163 code and for PIC code the code can be actually quite bad, so
13164 don't try to be too clever here. */
13165 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13167 cfun->machine->ra_needs_full_frame = 1;
13174 plus_constant (copy_to_reg
13175 (gen_rtx_MEM (Pmode,
13176 memory_address (Pmode, frame))),
13177 RETURN_ADDRESS_OFFSET)));
13180 cfun->machine->ra_need_lr = 1;
13181 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13184 /* Say whether a function is a candidate for sibcall handling or not.
13185 We do not allow indirect calls to be optimized into sibling calls.
13186 Also, we can't do it if there are any vector parameters; there's
13187 nowhere to put the VRsave code so it works; note that functions with
13188 vector parameters are required to have a prototype, so the argument
13189 type info must be available here. (The tail recursion case can work
13190 with vector parameters, but there's no way to distinguish here.) */
13192 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13197 if (TARGET_ALTIVEC_VRSAVE)
13199 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13200 type; type = TREE_CHAIN (type))
13202 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13206 if (DEFAULT_ABI == ABI_DARWIN
13207 || (*targetm.binds_local_p) (decl))
13209 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13211 if (!lookup_attribute ("longcall", attr_list)
13212 || lookup_attribute ("shortcall", attr_list))
13219 /* NULL if INSN insn is valid within a low-overhead loop.
13220 Otherwise return why doloop cannot be applied.
13221 PowerPC uses the COUNT register for branch on table instructions. */
13223 static const char *
13224 rs6000_invalid_within_doloop (rtx insn)
13227 return "Function call in the loop.";
13230 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13231 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13232 return "Computed branch in the loop.";
13238 rs6000_ra_ever_killed (void)
13244 if (current_function_is_thunk)
13247 /* regs_ever_live has LR marked as used if any sibcalls are present,
13248 but this should not force saving and restoring in the
13249 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
13250 clobbers LR, so that is inappropriate. */
13252 /* Also, the prologue can generate a store into LR that
13253 doesn't really count, like this:
13256 bcl to set PIC register
13260 When we're called from the epilogue, we need to avoid counting
13261 this as a store. */
13263 push_topmost_sequence ();
13264 top = get_insns ();
13265 pop_topmost_sequence ();
13266 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13268 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13272 if (FIND_REG_INC_NOTE (insn, reg))
13274 else if (GET_CODE (insn) == CALL_INSN
13275 && !SIBLING_CALL_P (insn))
13277 else if (set_of (reg, insn) != NULL_RTX
13278 && !prologue_epilogue_contains (insn))
13285 /* Add a REG_MAYBE_DEAD note to the insn. */
13287 rs6000_maybe_dead (rtx insn)
13289 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13294 /* Emit instructions needed to load the TOC register.
13295 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13296 a constant pool; or for SVR4 -fpic. */
13299 rs6000_emit_load_toc_table (int fromprolog)
13302 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13304 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13307 rtx lab, tmp1, tmp2, got, tempLR;
13309 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13310 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13312 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13314 got = rs6000_got_sym ();
13315 tmp1 = tmp2 = dest;
13318 tmp1 = gen_reg_rtx (Pmode);
13319 tmp2 = gen_reg_rtx (Pmode);
13321 tempLR = (fromprolog
13322 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13323 : gen_reg_rtx (Pmode));
13324 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13326 rs6000_maybe_dead (insn);
13327 insn = emit_move_insn (tmp1, tempLR);
13329 rs6000_maybe_dead (insn);
13330 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13332 rs6000_maybe_dead (insn);
13333 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13335 rs6000_maybe_dead (insn);
13337 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13339 rtx tempLR = (fromprolog
13340 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13341 : gen_reg_rtx (Pmode));
13343 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13345 rs6000_maybe_dead (insn);
13346 insn = emit_move_insn (dest, tempLR);
13348 rs6000_maybe_dead (insn);
13350 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13353 rtx tempLR = (fromprolog
13354 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13355 : gen_reg_rtx (Pmode));
13356 rtx temp0 = (fromprolog
13357 ? gen_rtx_REG (Pmode, 0)
13358 : gen_reg_rtx (Pmode));
13364 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13365 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13367 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13368 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13370 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13372 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13373 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13381 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13382 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13383 emit_move_insn (dest, tempLR);
13384 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13386 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13388 rs6000_maybe_dead (insn);
13390 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13392 /* This is for AIX code running in non-PIC ELF32. */
13395 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13396 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13398 insn = emit_insn (gen_elf_high (dest, realsym));
13400 rs6000_maybe_dead (insn);
13401 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13403 rs6000_maybe_dead (insn);
13407 gcc_assert (DEFAULT_ABI == ABI_AIX);
13410 insn = emit_insn (gen_load_toc_aix_si (dest));
13412 insn = emit_insn (gen_load_toc_aix_di (dest));
13414 rs6000_maybe_dead (insn);
13418 /* Emit instructions to restore the link register after determining where
13419 its value has been stored. */
13422 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13424 rs6000_stack_t *info = rs6000_stack_info ();
13427 operands[0] = source;
13428 operands[1] = scratch;
13430 if (info->lr_save_p)
13432 rtx frame_rtx = stack_pointer_rtx;
13433 HOST_WIDE_INT sp_offset = 0;
13436 if (frame_pointer_needed
13437 || current_function_calls_alloca
13438 || info->total_size > 32767)
13440 tmp = gen_frame_mem (Pmode, frame_rtx);
13441 emit_move_insn (operands[1], tmp);
13442 frame_rtx = operands[1];
13444 else if (info->push_p)
13445 sp_offset = info->total_size;
13447 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13448 tmp = gen_frame_mem (Pmode, tmp);
13449 emit_move_insn (tmp, operands[0]);
13452 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13455 static GTY(()) int set = -1;
13458 get_TOC_alias_set (void)
13461 set = new_alias_set ();
13465 /* This returns nonzero if the current function uses the TOC. This is
13466 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13467 is generated by the ABI_V4 load_toc_* patterns. */
13474 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13477 rtx pat = PATTERN (insn);
13480 if (GET_CODE (pat) == PARALLEL)
13481 for (i = 0; i < XVECLEN (pat, 0); i++)
13483 rtx sub = XVECEXP (pat, 0, i);
13484 if (GET_CODE (sub) == USE)
13486 sub = XEXP (sub, 0);
13487 if (GET_CODE (sub) == UNSPEC
13488 && XINT (sub, 1) == UNSPEC_TOC)
13498 create_TOC_reference (rtx symbol)
13500 return gen_rtx_PLUS (Pmode,
13501 gen_rtx_REG (Pmode, TOC_REGISTER),
13502 gen_rtx_CONST (Pmode,
13503 gen_rtx_MINUS (Pmode, symbol,
13504 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13507 /* If _Unwind_* has been called from within the same module,
13508 toc register is not guaranteed to be saved to 40(1) on function
13509 entry. Save it there in that case. */
13512 rs6000_aix_emit_builtin_unwind_init (void)
13515 rtx stack_top = gen_reg_rtx (Pmode);
13516 rtx opcode_addr = gen_reg_rtx (Pmode);
13517 rtx opcode = gen_reg_rtx (SImode);
13518 rtx tocompare = gen_reg_rtx (SImode);
13519 rtx no_toc_save_needed = gen_label_rtx ();
13521 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
13522 emit_move_insn (stack_top, mem);
13524 mem = gen_frame_mem (Pmode,
13525 gen_rtx_PLUS (Pmode, stack_top,
13526 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13527 emit_move_insn (opcode_addr, mem);
13528 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13529 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13530 : 0xE8410028, SImode));
13532 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13533 SImode, NULL_RTX, NULL_RTX,
13534 no_toc_save_needed);
13536 mem = gen_frame_mem (Pmode,
13537 gen_rtx_PLUS (Pmode, stack_top,
13538 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13539 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13540 emit_label (no_toc_save_needed);
13543 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
13544 and the change to the stack pointer. */
13547 rs6000_emit_stack_tie (void)
13549 rtx mem = gen_frame_mem (BLKmode,
13550 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13552 emit_insn (gen_stack_tie (mem));
13555 /* Emit the correct code for allocating stack space, as insns.
13556 If COPY_R12, make sure a copy of the old frame is left in r12.
13557 The generated code may use hard register 0 as a temporary. */
13560 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13563 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13564 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13565 rtx todec = gen_int_mode (-size, Pmode);
13567 if (INTVAL (todec) != -size)
13569 warning (0, "stack frame too large");
13570 emit_insn (gen_trap ());
13574 if (current_function_limit_stack)
13576 if (REG_P (stack_limit_rtx)
13577 && REGNO (stack_limit_rtx) > 1
13578 && REGNO (stack_limit_rtx) <= 31)
13580 emit_insn (TARGET_32BIT
13581 ? gen_addsi3 (tmp_reg,
13584 : gen_adddi3 (tmp_reg,
13588 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13591 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13593 && DEFAULT_ABI == ABI_V4)
13595 rtx toload = gen_rtx_CONST (VOIDmode,
13596 gen_rtx_PLUS (Pmode,
13600 emit_insn (gen_elf_high (tmp_reg, toload));
13601 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13602 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13606 warning (0, "stack limit expression is not supported");
13609 if (copy_r12 || ! TARGET_UPDATE)
13610 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13616 /* Need a note here so that try_split doesn't get confused. */
13617 if (get_last_insn () == NULL_RTX)
13618 emit_note (NOTE_INSN_DELETED);
13619 insn = emit_move_insn (tmp_reg, todec);
13620 try_split (PATTERN (insn), insn, 0);
13624 insn = emit_insn (TARGET_32BIT
13625 ? gen_movsi_update (stack_reg, stack_reg,
13627 : gen_movdi_di_update (stack_reg, stack_reg,
13628 todec, stack_reg));
13632 insn = emit_insn (TARGET_32BIT
13633 ? gen_addsi3 (stack_reg, stack_reg, todec)
13634 : gen_adddi3 (stack_reg, stack_reg, todec));
13635 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13636 gen_rtx_REG (Pmode, 12));
13639 RTX_FRAME_RELATED_P (insn) = 1;
13641 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13642 gen_rtx_SET (VOIDmode, stack_reg,
13643 gen_rtx_PLUS (Pmode, stack_reg,
13648 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13649 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13650 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13651 deduce these equivalences by itself so it wasn't necessary to hold
13652 its hand so much. */
13655 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
13656 rtx reg2, rtx rreg)
13660 /* copy_rtx will not make unique copies of registers, so we need to
13661 ensure we don't have unwanted sharing here. */
13663 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13666 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13668 real = copy_rtx (PATTERN (insn));
13670 if (reg2 != NULL_RTX)
13671 real = replace_rtx (real, reg2, rreg);
13673 real = replace_rtx (real, reg,
13674 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13675 STACK_POINTER_REGNUM),
13678 /* We expect that 'real' is either a SET or a PARALLEL containing
13679 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13680 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13682 if (GET_CODE (real) == SET)
13686 temp = simplify_rtx (SET_SRC (set));
13688 SET_SRC (set) = temp;
13689 temp = simplify_rtx (SET_DEST (set));
13691 SET_DEST (set) = temp;
13692 if (GET_CODE (SET_DEST (set)) == MEM)
13694 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13696 XEXP (SET_DEST (set), 0) = temp;
13703 gcc_assert (GET_CODE (real) == PARALLEL);
13704 for (i = 0; i < XVECLEN (real, 0); i++)
13705 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13707 rtx set = XVECEXP (real, 0, i);
13709 temp = simplify_rtx (SET_SRC (set));
13711 SET_SRC (set) = temp;
13712 temp = simplify_rtx (SET_DEST (set));
13714 SET_DEST (set) = temp;
13715 if (GET_CODE (SET_DEST (set)) == MEM)
13717 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13719 XEXP (SET_DEST (set), 0) = temp;
13721 RTX_FRAME_RELATED_P (set) = 1;
13726 real = spe_synthesize_frame_save (real);
13728 RTX_FRAME_RELATED_P (insn) = 1;
13729 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13734 /* Given an SPE frame note, return a PARALLEL of SETs with the
13735 original note, plus a synthetic register save. */
13738 spe_synthesize_frame_save (rtx real)
13740 rtx synth, offset, reg, real2;
13742 if (GET_CODE (real) != SET
13743 || GET_MODE (SET_SRC (real)) != V2SImode)
13746 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13747 frame related note. The parallel contains a set of the register
13748 being saved, and another set to a synthetic register (n+1200).
13749 This is so we can differentiate between 64-bit and 32-bit saves.
13750 Words cannot describe this nastiness. */
13752 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13753 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13754 && GET_CODE (SET_SRC (real)) == REG);
13757 (set (mem (plus (reg x) (const y)))
13760 (set (mem (plus (reg x) (const y+4)))
13764 real2 = copy_rtx (real);
13765 PUT_MODE (SET_DEST (real2), SImode);
13766 reg = SET_SRC (real2);
13767 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13768 synth = copy_rtx (real2);
13770 if (BYTES_BIG_ENDIAN)
13772 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13773 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13776 reg = SET_SRC (synth);
13778 synth = replace_rtx (synth, reg,
13779 gen_rtx_REG (SImode, REGNO (reg) + 1200));
13781 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13782 synth = replace_rtx (synth, offset,
13783 GEN_INT (INTVAL (offset)
13784 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13786 RTX_FRAME_RELATED_P (synth) = 1;
13787 RTX_FRAME_RELATED_P (real2) = 1;
13788 if (BYTES_BIG_ENDIAN)
13789 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13791 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13796 /* Returns an insn that has a vrsave set operation with the
13797 appropriate CLOBBERs. */
13800 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
13803 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
13804 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13807 = gen_rtx_SET (VOIDmode,
13809 gen_rtx_UNSPEC_VOLATILE (SImode,
13810 gen_rtvec (2, reg, vrsave),
13811 UNSPECV_SET_VRSAVE));
13815 /* We need to clobber the registers in the mask so the scheduler
13816 does not move sets to VRSAVE before sets of AltiVec registers.
13818 However, if the function receives nonlocal gotos, reload will set
13819 all call saved registers live. We will end up with:
13821 (set (reg 999) (mem))
13822 (parallel [ (set (reg vrsave) (unspec blah))
13823 (clobber (reg 999))])
13825 The clobber will cause the store into reg 999 to be dead, and
13826 flow will attempt to delete an epilogue insn. In this case, we
13827 need an unspec use/set of the register. */
13829 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13830 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13832 if (!epiloguep || call_used_regs [i])
13833 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13834 gen_rtx_REG (V4SImode, i));
13837 rtx reg = gen_rtx_REG (V4SImode, i);
13840 = gen_rtx_SET (VOIDmode,
13842 gen_rtx_UNSPEC (V4SImode,
13843 gen_rtvec (1, reg), 27));
13847 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13849 for (i = 0; i < nclobs; ++i)
13850 XVECEXP (insn, 0, i) = clobs[i];
13855 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13856 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13859 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13860 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13862 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13863 rtx replacea, replaceb;
13865 int_rtx = GEN_INT (offset);
13867 /* Some cases that need register indexed addressing. */
13868 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13869 || (TARGET_E500_DOUBLE && mode == DFmode)
13871 && SPE_VECTOR_MODE (mode)
13872 && !SPE_CONST_OFFSET_OK (offset)))
13874 /* Whomever calls us must make sure r11 is available in the
13875 flow path of instructions in the prologue. */
13876 offset_rtx = gen_rtx_REG (Pmode, 11);
13877 emit_move_insn (offset_rtx, int_rtx);
13879 replacea = offset_rtx;
13880 replaceb = int_rtx;
13884 offset_rtx = int_rtx;
13885 replacea = NULL_RTX;
13886 replaceb = NULL_RTX;
13889 reg = gen_rtx_REG (mode, regno);
13890 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13891 mem = gen_frame_mem (mode, addr);
13893 insn = emit_move_insn (mem, reg);
13895 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13898 /* Emit an offset memory reference suitable for a frame store, while
13899 converting to a valid addressing mode. */
13902 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13904 rtx int_rtx, offset_rtx;
13906 int_rtx = GEN_INT (offset);
13908 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13909 || (TARGET_E500_DOUBLE && mode == DFmode))
13911 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13912 emit_move_insn (offset_rtx, int_rtx);
13915 offset_rtx = int_rtx;
13917 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13920 /* Look for user-defined global regs. We should not save and restore these,
13921 and cannot use stmw/lmw if there are any in its range. */
13924 no_global_regs_above (int first_greg)
13927 for (i = 0; i < 32 - first_greg; i++)
13928 if (global_regs[first_greg + i])
13933 #ifndef TARGET_FIX_AND_CONTINUE
13934 #define TARGET_FIX_AND_CONTINUE 0
13937 /* Emit function prologue as insns. */
13940 rs6000_emit_prologue (void)
13942 rs6000_stack_t *info = rs6000_stack_info ();
13943 enum machine_mode reg_mode = Pmode;
13944 int reg_size = TARGET_32BIT ? 4 : 8;
13945 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13946 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13947 rtx frame_reg_rtx = sp_reg_rtx;
13948 rtx cr_save_rtx = NULL_RTX;
13950 int saving_FPRs_inline;
13951 int using_store_multiple;
13952 HOST_WIDE_INT sp_offset = 0;
13954 if (TARGET_FIX_AND_CONTINUE)
13956 /* gdb on darwin arranges to forward a function from the old
13957 address by modifying the first 5 instructions of the function
13958 to branch to the overriding function. This is necessary to
13959 permit function pointers that point to the old function to
13960 actually forward to the new function. */
13961 emit_insn (gen_nop ());
13962 emit_insn (gen_nop ());
13963 emit_insn (gen_nop ());
13964 emit_insn (gen_nop ());
13965 emit_insn (gen_nop ());
13968 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13970 reg_mode = V2SImode;
13974 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13975 && (!TARGET_SPE_ABI
13976 || info->spe_64bit_regs_used == 0)
13977 && info->first_gp_reg_save < 31
13978 && no_global_regs_above (info->first_gp_reg_save));
13979 saving_FPRs_inline = (info->first_fp_reg_save == 64
13980 || FP_SAVE_INLINE (info->first_fp_reg_save)
13981 || current_function_calls_eh_return
13982 || cfun->machine->ra_need_lr);
13984 /* For V.4, update stack before we do any saving and set back pointer. */
13986 && (DEFAULT_ABI == ABI_V4
13987 || current_function_calls_eh_return))
13989 if (info->total_size < 32767)
13990 sp_offset = info->total_size;
13992 frame_reg_rtx = frame_ptr_rtx;
13993 rs6000_emit_allocate_stack (info->total_size,
13994 (frame_reg_rtx != sp_reg_rtx
13995 && (info->cr_save_p
13997 || info->first_fp_reg_save < 64
13998 || info->first_gp_reg_save < 32
14000 if (frame_reg_rtx != sp_reg_rtx)
14001 rs6000_emit_stack_tie ();
14004 /* Handle world saves specially here. */
14005 if (WORLD_SAVE_P (info))
14011 /* save_world expects lr in r0. */
14012 if (info->lr_save_p)
14014 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14015 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14016 RTX_FRAME_RELATED_P (insn) = 1;
14019 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14020 assumptions about the offsets of various bits of the stack
14022 gcc_assert (info->gp_save_offset == -220
14023 && info->fp_save_offset == -144
14024 && info->lr_save_offset == 8
14025 && info->cr_save_offset == 4
14028 && (!current_function_calls_eh_return
14029 || info->ehrd_offset == -432)
14030 && info->vrsave_save_offset == -224
14031 && info->altivec_save_offset == (-224 -16 -192));
14033 treg = gen_rtx_REG (SImode, 11);
14034 emit_move_insn (treg, GEN_INT (-info->total_size));
14036 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14037 in R11. It also clobbers R12, so beware! */
14039 /* Preserve CR2 for save_world prologues */
14041 sz += 32 - info->first_gp_reg_save;
14042 sz += 64 - info->first_fp_reg_save;
14043 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14044 p = rtvec_alloc (sz);
14046 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14047 gen_rtx_REG (Pmode,
14048 LINK_REGISTER_REGNUM));
14049 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14050 gen_rtx_SYMBOL_REF (Pmode,
14052 /* We do floats first so that the instruction pattern matches
14054 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14056 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14057 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14058 GEN_INT (info->fp_save_offset
14059 + sp_offset + 8 * i));
14060 rtx mem = gen_frame_mem (DFmode, addr);
14062 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14064 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14066 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14067 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14068 GEN_INT (info->altivec_save_offset
14069 + sp_offset + 16 * i));
14070 rtx mem = gen_frame_mem (V4SImode, addr);
14072 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14074 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14076 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14077 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14078 GEN_INT (info->gp_save_offset
14079 + sp_offset + reg_size * i));
14080 rtx mem = gen_frame_mem (reg_mode, addr);
14082 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14086 /* CR register traditionally saved as CR2. */
14087 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14088 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14089 GEN_INT (info->cr_save_offset
14091 rtx mem = gen_frame_mem (reg_mode, addr);
14093 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14095 /* Prevent any attempt to delete the setting of r0 and treg! */
14096 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14097 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14098 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14100 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14101 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14102 NULL_RTX, NULL_RTX);
14104 if (current_function_calls_eh_return)
14109 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14110 if (regno == INVALID_REGNUM)
14112 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14113 info->ehrd_offset + sp_offset
14114 + reg_size * (int) i,
14120 /* Save AltiVec registers if needed. */
14121 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14125 /* There should be a non inline version of this, for when we
14126 are saving lots of vector registers. */
14127 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14128 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14130 rtx areg, savereg, mem;
14133 offset = info->altivec_save_offset + sp_offset
14134 + 16 * (i - info->first_altivec_reg_save);
14136 savereg = gen_rtx_REG (V4SImode, i);
14138 areg = gen_rtx_REG (Pmode, 0);
14139 emit_move_insn (areg, GEN_INT (offset));
14141 /* AltiVec addressing mode is [reg+reg]. */
14142 mem = gen_frame_mem (V4SImode,
14143 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14145 insn = emit_move_insn (mem, savereg);
14147 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14148 areg, GEN_INT (offset));
14152 /* VRSAVE is a bit vector representing which AltiVec registers
14153 are used. The OS uses this to determine which vector
14154 registers to save on a context switch. We need to save
14155 VRSAVE on the stack frame, add whatever AltiVec registers we
14156 used in this function, and do the corresponding magic in the
14159 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14160 && info->vrsave_mask != 0)
14162 rtx reg, mem, vrsave;
14165 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14166 as frame_reg_rtx and r11 as the static chain pointer for
14167 nested functions. */
14168 reg = gen_rtx_REG (SImode, 0);
14169 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14171 emit_insn (gen_get_vrsave_internal (reg));
14173 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14175 if (!WORLD_SAVE_P (info))
14178 offset = info->vrsave_save_offset + sp_offset;
14179 mem = gen_frame_mem (SImode,
14180 gen_rtx_PLUS (Pmode, frame_reg_rtx,
14181 GEN_INT (offset)));
14182 insn = emit_move_insn (mem, reg);
14185 /* Include the registers in the mask. */
14186 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14188 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14191 /* If we use the link register, get it into r0. */
14192 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14194 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14195 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14196 RTX_FRAME_RELATED_P (insn) = 1;
14199 /* If we need to save CR, put it into r12. */
14200 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14204 cr_save_rtx = gen_rtx_REG (SImode, 12);
14205 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14206 RTX_FRAME_RELATED_P (insn) = 1;
14207 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14208 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14209 But that's OK. All we have to do is specify that _one_ condition
14210 code register is saved in this stack slot. The thrower's epilogue
14211 will then restore all the call-saved registers.
14212 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14213 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14214 gen_rtx_REG (SImode, CR2_REGNO));
14215 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14220 /* Do any required saving of fpr's. If only one or two to save, do
14221 it ourselves. Otherwise, call function. */
14222 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14225 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14226 if ((regs_ever_live[info->first_fp_reg_save+i]
14227 && ! call_used_regs[info->first_fp_reg_save+i]))
14228 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14229 info->first_fp_reg_save + i,
14230 info->fp_save_offset + sp_offset + 8 * i,
14233 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14237 const char *alloc_rname;
14239 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14241 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14242 gen_rtx_REG (Pmode,
14243 LINK_REGISTER_REGNUM));
14244 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14245 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14246 alloc_rname = ggc_strdup (rname);
14247 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14248 gen_rtx_SYMBOL_REF (Pmode,
14250 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14252 rtx addr, reg, mem;
14253 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14254 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14255 GEN_INT (info->fp_save_offset
14256 + sp_offset + 8*i));
14257 mem = gen_frame_mem (DFmode, addr);
14259 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14261 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14262 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14263 NULL_RTX, NULL_RTX);
14266 /* Save GPRs. This is done as a PARALLEL if we are using
14267 the store-multiple instructions. */
14268 if (!WORLD_SAVE_P (info) && using_store_multiple)
14272 p = rtvec_alloc (32 - info->first_gp_reg_save);
14273 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14275 rtx addr, reg, mem;
14276 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14277 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14278 GEN_INT (info->gp_save_offset
14281 mem = gen_frame_mem (reg_mode, addr);
14283 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14285 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14286 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14287 NULL_RTX, NULL_RTX);
14289 else if (!WORLD_SAVE_P (info))
14292 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14293 if ((regs_ever_live[info->first_gp_reg_save + i]
14294 && (!call_used_regs[info->first_gp_reg_save + i]
14295 || (i + info->first_gp_reg_save
14296 == RS6000_PIC_OFFSET_TABLE_REGNUM
14297 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14298 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14299 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14300 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14302 rtx addr, reg, mem;
14303 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14305 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14307 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14310 if (!SPE_CONST_OFFSET_OK (offset))
14312 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14313 emit_move_insn (b, GEN_INT (offset));
14316 b = GEN_INT (offset);
14318 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14319 mem = gen_frame_mem (V2SImode, addr);
14320 insn = emit_move_insn (mem, reg);
14322 if (GET_CODE (b) == CONST_INT)
14323 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14324 NULL_RTX, NULL_RTX);
14326 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14327 b, GEN_INT (offset));
14331 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14332 GEN_INT (info->gp_save_offset
14335 mem = gen_frame_mem (reg_mode, addr);
14337 insn = emit_move_insn (mem, reg);
14338 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14339 NULL_RTX, NULL_RTX);
14344 /* ??? There's no need to emit actual instructions here, but it's the
14345 easiest way to get the frame unwind information emitted. */
14346 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
14348 unsigned int i, regno;
14350 /* In AIX ABI we need to pretend we save r2 here. */
14353 rtx addr, reg, mem;
14355 reg = gen_rtx_REG (reg_mode, 2);
14356 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14357 GEN_INT (sp_offset + 5 * reg_size));
14358 mem = gen_frame_mem (reg_mode, addr);
14360 insn = emit_move_insn (mem, reg);
14361 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14362 NULL_RTX, NULL_RTX);
14363 PATTERN (insn) = gen_blockage ();
14368 regno = EH_RETURN_DATA_REGNO (i);
14369 if (regno == INVALID_REGNUM)
14372 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14373 info->ehrd_offset + sp_offset
14374 + reg_size * (int) i,
14379 /* Save lr if we used it. */
14380 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14382 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14383 GEN_INT (info->lr_save_offset + sp_offset));
14384 rtx reg = gen_rtx_REG (Pmode, 0);
14385 rtx mem = gen_rtx_MEM (Pmode, addr);
14386 /* This should not be of frame_alias_set, because of
14387 __builtin_return_address. */
14389 insn = emit_move_insn (mem, reg);
14390 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14391 NULL_RTX, NULL_RTX);
14394 /* Save CR if we use any that must be preserved. */
14395 if (!WORLD_SAVE_P (info) && info->cr_save_p)
14397 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14398 GEN_INT (info->cr_save_offset + sp_offset));
14399 rtx mem = gen_frame_mem (SImode, addr);
14400 /* See the large comment above about why CR2_REGNO is used. */
14401 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14403 /* If r12 was used to hold the original sp, copy cr into r0 now
14405 if (REGNO (frame_reg_rtx) == 12)
14409 cr_save_rtx = gen_rtx_REG (SImode, 0);
14410 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14411 RTX_FRAME_RELATED_P (insn) = 1;
14412 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14413 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14418 insn = emit_move_insn (mem, cr_save_rtx);
14420 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14421 NULL_RTX, NULL_RTX);
14424 /* Update stack and set back pointer unless this is V.4,
14425 for which it was done previously. */
14426 if (!WORLD_SAVE_P (info) && info->push_p
14427 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14428 rs6000_emit_allocate_stack (info->total_size, FALSE);
14430 /* Set frame pointer, if needed. */
14431 if (frame_pointer_needed)
14433 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14435 RTX_FRAME_RELATED_P (insn) = 1;
14438 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
14439 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14440 || (DEFAULT_ABI == ABI_V4
14441 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14442 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14444 /* If emit_load_toc_table will use the link register, we need to save
14445 it. We use R12 for this purpose because emit_load_toc_table
14446 can use register 0. This allows us to use a plain 'blr' to return
14447 from the procedure more often. */
14448 int save_LR_around_toc_setup = (TARGET_ELF
14449 && DEFAULT_ABI != ABI_AIX
14451 && ! info->lr_save_p
14452 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14453 if (save_LR_around_toc_setup)
14455 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14457 insn = emit_move_insn (frame_ptr_rtx, lr);
14458 rs6000_maybe_dead (insn);
14459 RTX_FRAME_RELATED_P (insn) = 1;
14461 rs6000_emit_load_toc_table (TRUE);
14463 insn = emit_move_insn (lr, frame_ptr_rtx);
14464 rs6000_maybe_dead (insn);
14465 RTX_FRAME_RELATED_P (insn) = 1;
14468 rs6000_emit_load_toc_table (TRUE);
14472 if (DEFAULT_ABI == ABI_DARWIN
14473 && flag_pic && current_function_uses_pic_offset_table)
14475 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14476 rtx src = machopic_function_base_sym ();
14478 /* Save and restore LR locally around this call (in R0). */
14479 if (!info->lr_save_p)
14480 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14482 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14484 insn = emit_move_insn (gen_rtx_REG (Pmode,
14485 RS6000_PIC_OFFSET_TABLE_REGNUM),
14487 rs6000_maybe_dead (insn);
14489 if (!info->lr_save_p)
14490 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14495 /* Write function prologue. */
14498 rs6000_output_function_prologue (FILE *file,
14499 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14501 rs6000_stack_t *info = rs6000_stack_info ();
14503 if (TARGET_DEBUG_STACK)
14504 debug_stack_info (info);
14506 /* Write .extern for any function we will call to save and restore
14508 if (info->first_fp_reg_save < 64
14509 && !FP_SAVE_INLINE (info->first_fp_reg_save))
14510 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14511 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14512 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14513 RESTORE_FP_SUFFIX);
14515 /* Write .extern for AIX common mode routines, if needed. */
14516 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14518 fputs ("\t.extern __mulh\n", file);
14519 fputs ("\t.extern __mull\n", file);
14520 fputs ("\t.extern __divss\n", file);
14521 fputs ("\t.extern __divus\n", file);
14522 fputs ("\t.extern __quoss\n", file);
14523 fputs ("\t.extern __quous\n", file);
14524 common_mode_defined = 1;
14527 if (! HAVE_prologue)
14531 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14532 the "toplevel" insn chain. */
14533 emit_note (NOTE_INSN_DELETED);
14534 rs6000_emit_prologue ();
14535 emit_note (NOTE_INSN_DELETED);
14537 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14541 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14543 INSN_ADDRESSES_NEW (insn, addr);
14548 if (TARGET_DEBUG_STACK)
14549 debug_rtx_list (get_insns (), 100);
14550 final (get_insns (), file, FALSE);
14554 rs6000_pic_labelno++;
14557 /* Emit function epilogue as insns.
14559 At present, dwarf2out_frame_debug_expr doesn't understand
14560 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14561 anywhere in the epilogue. Most of the insns below would in any case
14562 need special notes to explain where r11 is in relation to the stack. */
14565 rs6000_emit_epilogue (int sibcall)
14567 rs6000_stack_t *info;
14568 int restoring_FPRs_inline;
14569 int using_load_multiple;
14570 int using_mfcr_multiple;
14571 int use_backchain_to_restore_sp;
14573 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14574 rtx frame_reg_rtx = sp_reg_rtx;
14575 enum machine_mode reg_mode = Pmode;
14576 int reg_size = TARGET_32BIT ? 4 : 8;
14579 info = rs6000_stack_info ();
14581 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14583 reg_mode = V2SImode;
14587 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14588 && (!TARGET_SPE_ABI
14589 || info->spe_64bit_regs_used == 0)
14590 && info->first_gp_reg_save < 31
14591 && no_global_regs_above (info->first_gp_reg_save));
14592 restoring_FPRs_inline = (sibcall
14593 || current_function_calls_eh_return
14594 || info->first_fp_reg_save == 64
14595 || FP_SAVE_INLINE (info->first_fp_reg_save));
14596 use_backchain_to_restore_sp = (frame_pointer_needed
14597 || current_function_calls_alloca
14598 || info->total_size > 32767);
14599 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14600 || rs6000_cpu == PROCESSOR_PPC603
14601 || rs6000_cpu == PROCESSOR_PPC750
14604 if (WORLD_SAVE_P (info))
14608 const char *alloc_rname;
14611 /* eh_rest_world_r10 will return to the location saved in the LR
14612 stack slot (which is not likely to be our caller.)
14613 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14614 rest_world is similar, except any R10 parameter is ignored.
14615 The exception-handling stuff that was here in 2.95 is no
14616 longer necessary. */
14620 + 32 - info->first_gp_reg_save
14621 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14622 + 63 + 1 - info->first_fp_reg_save);
14624 strcpy (rname, ((current_function_calls_eh_return) ?
14625 "*eh_rest_world_r10" : "*rest_world"));
14626 alloc_rname = ggc_strdup (rname);
14629 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14630 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14631 gen_rtx_REG (Pmode,
14632 LINK_REGISTER_REGNUM));
14634 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14635 /* The instruction pattern requires a clobber here;
14636 it is shared with the restVEC helper. */
14638 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14641 /* CR register traditionally saved as CR2. */
14642 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14643 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14644 GEN_INT (info->cr_save_offset));
14645 rtx mem = gen_frame_mem (reg_mode, addr);
14647 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14650 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14652 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14653 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14654 GEN_INT (info->gp_save_offset
14656 rtx mem = gen_frame_mem (reg_mode, addr);
14658 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14660 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14662 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14663 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14664 GEN_INT (info->altivec_save_offset
14666 rtx mem = gen_frame_mem (V4SImode, addr);
14668 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14670 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
14672 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14673 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14674 GEN_INT (info->fp_save_offset
14676 rtx mem = gen_frame_mem (DFmode, addr);
14678 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14681 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
14683 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
14685 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
14687 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
14689 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
14690 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14695 /* If we have a frame pointer, a call to alloca, or a large stack
14696 frame, restore the old stack pointer using the backchain. Otherwise,
14697 we know what size to update it with. */
14698 if (use_backchain_to_restore_sp)
14700 /* Under V.4, don't reset the stack pointer until after we're done
14701 loading the saved registers. */
14702 if (DEFAULT_ABI == ABI_V4)
14703 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
14705 emit_move_insn (frame_reg_rtx,
14706 gen_rtx_MEM (Pmode, sp_reg_rtx));
14709 else if (info->push_p)
14711 if (DEFAULT_ABI == ABI_V4
14712 || current_function_calls_eh_return)
14713 sp_offset = info->total_size;
14716 emit_insn (TARGET_32BIT
14717 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14718 GEN_INT (info->total_size))
14719 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14720 GEN_INT (info->total_size)));
14724 /* Restore AltiVec registers if needed. */
14725 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14729 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14730 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14732 rtx addr, areg, mem;
14734 areg = gen_rtx_REG (Pmode, 0);
14736 (areg, GEN_INT (info->altivec_save_offset
14738 + 16 * (i - info->first_altivec_reg_save)));
14740 /* AltiVec addressing mode is [reg+reg]. */
14741 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14742 mem = gen_frame_mem (V4SImode, addr);
14744 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14748 /* Restore VRSAVE if needed. */
14749 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14750 && info->vrsave_mask != 0)
14752 rtx addr, mem, reg;
14754 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14755 GEN_INT (info->vrsave_save_offset + sp_offset));
14756 mem = gen_frame_mem (SImode, addr);
14757 reg = gen_rtx_REG (SImode, 12);
14758 emit_move_insn (reg, mem);
14760 emit_insn (generate_set_vrsave (reg, info, 1));
14763 /* Get the old lr if we saved it. */
14764 if (info->lr_save_p)
14766 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14767 info->lr_save_offset + sp_offset);
14769 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
14772 /* Get the old cr if we saved it. */
14773 if (info->cr_save_p)
14775 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14776 GEN_INT (info->cr_save_offset + sp_offset));
14777 rtx mem = gen_frame_mem (SImode, addr);
14779 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14782 /* Set LR here to try to overlap restores below. */
14783 if (info->lr_save_p)
14784 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14785 gen_rtx_REG (Pmode, 0));
14787 /* Load exception handler data registers, if needed. */
14788 if (current_function_calls_eh_return)
14790 unsigned int i, regno;
14794 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14795 GEN_INT (sp_offset + 5 * reg_size));
14796 rtx mem = gen_frame_mem (reg_mode, addr);
14798 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14805 regno = EH_RETURN_DATA_REGNO (i);
14806 if (regno == INVALID_REGNUM)
14809 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14810 info->ehrd_offset + sp_offset
14811 + reg_size * (int) i);
14813 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14817 /* Restore GPRs. This is done as a PARALLEL if we are using
14818 the load-multiple instructions. */
14819 if (using_load_multiple)
14822 p = rtvec_alloc (32 - info->first_gp_reg_save);
14823 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14825 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14826 GEN_INT (info->gp_save_offset
14829 rtx mem = gen_frame_mem (reg_mode, addr);
14832 gen_rtx_SET (VOIDmode,
14833 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14836 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14839 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14840 if ((regs_ever_live[info->first_gp_reg_save + i]
14841 && (!call_used_regs[info->first_gp_reg_save + i]
14842 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14843 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14844 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14845 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14846 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14848 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14849 GEN_INT (info->gp_save_offset
14852 rtx mem = gen_frame_mem (reg_mode, addr);
14854 /* Restore 64-bit quantities for SPE. */
14855 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14857 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14860 if (!SPE_CONST_OFFSET_OK (offset))
14862 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14863 emit_move_insn (b, GEN_INT (offset));
14866 b = GEN_INT (offset);
14868 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14869 mem = gen_frame_mem (V2SImode, addr);
14872 emit_move_insn (gen_rtx_REG (reg_mode,
14873 info->first_gp_reg_save + i), mem);
14876 /* Restore fpr's if we need to do it without calling a function. */
14877 if (restoring_FPRs_inline)
14878 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14879 if ((regs_ever_live[info->first_fp_reg_save+i]
14880 && ! call_used_regs[info->first_fp_reg_save+i]))
14883 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14884 GEN_INT (info->fp_save_offset
14887 mem = gen_frame_mem (DFmode, addr);
14889 emit_move_insn (gen_rtx_REG (DFmode,
14890 info->first_fp_reg_save + i),
14894 /* If we saved cr, restore it here. Just those that were used. */
14895 if (info->cr_save_p)
14897 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14900 if (using_mfcr_multiple)
14902 for (i = 0; i < 8; i++)
14903 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14905 gcc_assert (count);
14908 if (using_mfcr_multiple && count > 1)
14913 p = rtvec_alloc (count);
14916 for (i = 0; i < 8; i++)
14917 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14919 rtvec r = rtvec_alloc (2);
14920 RTVEC_ELT (r, 0) = r12_rtx;
14921 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14922 RTVEC_ELT (p, ndx) =
14923 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14924 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14927 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14928 gcc_assert (ndx == count);
14931 for (i = 0; i < 8; i++)
14932 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14934 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14940 /* If this is V.4, unwind the stack pointer after all of the loads
14941 have been done. We need to emit a block here so that sched
14942 doesn't decide to move the sp change before the register restores
14943 (which may not have any obvious dependency on the stack). This
14944 doesn't hurt performance, because there is no scheduling that can
14945 be done after this point. */
14946 if (DEFAULT_ABI == ABI_V4
14947 || current_function_calls_eh_return)
14949 if (frame_reg_rtx != sp_reg_rtx)
14950 rs6000_emit_stack_tie ();
14952 if (use_backchain_to_restore_sp)
14954 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14956 else if (sp_offset != 0)
14958 emit_insn (TARGET_32BIT
14959 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14960 GEN_INT (sp_offset))
14961 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14962 GEN_INT (sp_offset)));
14966 if (current_function_calls_eh_return)
14968 rtx sa = EH_RETURN_STACKADJ_RTX;
14969 emit_insn (TARGET_32BIT
14970 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14971 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14977 if (! restoring_FPRs_inline)
14978 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14980 p = rtvec_alloc (2);
14982 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14983 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14984 gen_rtx_REG (Pmode,
14985 LINK_REGISTER_REGNUM));
14987 /* If we have to restore more than two FP registers, branch to the
14988 restore function. It will return to our caller. */
14989 if (! restoring_FPRs_inline)
14993 const char *alloc_rname;
14995 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
14996 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
14997 alloc_rname = ggc_strdup (rname);
14998 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14999 gen_rtx_SYMBOL_REF (Pmode,
15002 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15005 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15006 GEN_INT (info->fp_save_offset + 8*i));
15007 mem = gen_frame_mem (DFmode, addr);
15009 RTVEC_ELT (p, i+3) =
15010 gen_rtx_SET (VOIDmode,
15011 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15016 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15020 /* Write function epilogue. */
15023 rs6000_output_function_epilogue (FILE *file,
15024 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15026 rs6000_stack_t *info = rs6000_stack_info ();
15028 if (! HAVE_epilogue)
15030 rtx insn = get_last_insn ();
15031 /* If the last insn was a BARRIER, we don't have to write anything except
15032 the trace table. */
15033 if (GET_CODE (insn) == NOTE)
15034 insn = prev_nonnote_insn (insn);
15035 if (insn == 0 || GET_CODE (insn) != BARRIER)
15037 /* This is slightly ugly, but at least we don't have two
15038 copies of the epilogue-emitting code. */
15041 /* A NOTE_INSN_DELETED is supposed to be at the start
15042 and end of the "toplevel" insn chain. */
15043 emit_note (NOTE_INSN_DELETED);
15044 rs6000_emit_epilogue (FALSE);
15045 emit_note (NOTE_INSN_DELETED);
15047 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15051 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15053 INSN_ADDRESSES_NEW (insn, addr);
15058 if (TARGET_DEBUG_STACK)
15059 debug_rtx_list (get_insns (), 100);
15060 final (get_insns (), file, FALSE);
15066 macho_branch_islands ();
15067 /* Mach-O doesn't support labels at the end of objects, so if
15068 it looks like we might want one, insert a NOP. */
15070 rtx insn = get_last_insn ();
15073 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15074 insn = PREV_INSN (insn);
15078 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15079 fputs ("\tnop\n", file);
15083 /* Output a traceback table here. See /usr/include/sys/debug.h for info
15086 We don't output a traceback table if -finhibit-size-directive was
15087 used. The documentation for -finhibit-size-directive reads
15088 ``don't output a @code{.size} assembler directive, or anything
15089 else that would cause trouble if the function is split in the
15090 middle, and the two halves are placed at locations far apart in
15091 memory.'' The traceback table has this property, since it
15092 includes the offset from the start of the function to the
15093 traceback table itself.
15095 System V.4 Powerpc's (and the embedded ABI derived from it) use a
15096 different traceback table. */
15097 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15098 && rs6000_traceback != traceback_none)
15100 const char *fname = NULL;
15101 const char *language_string = lang_hooks.name;
15102 int fixed_parms = 0, float_parms = 0, parm_info = 0;
15104 int optional_tbtab;
15106 if (rs6000_traceback == traceback_full)
15107 optional_tbtab = 1;
15108 else if (rs6000_traceback == traceback_part)
15109 optional_tbtab = 0;
15111 optional_tbtab = !optimize_size && !TARGET_ELF;
15113 if (optional_tbtab)
15115 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15116 while (*fname == '.') /* V.4 encodes . in the name */
15119 /* Need label immediately before tbtab, so we can compute
15120 its offset from the function start. */
15121 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15122 ASM_OUTPUT_LABEL (file, fname);
15125 /* The .tbtab pseudo-op can only be used for the first eight
15126 expressions, since it can't handle the possibly variable
15127 length fields that follow. However, if you omit the optional
15128 fields, the assembler outputs zeros for all optional fields
15129 anyways, giving each variable length field is minimum length
15130 (as defined in sys/debug.h). Thus we can not use the .tbtab
15131 pseudo-op at all. */
15133 /* An all-zero word flags the start of the tbtab, for debuggers
15134 that have to find it by searching forward from the entry
15135 point or from the current pc. */
15136 fputs ("\t.long 0\n", file);
15138 /* Tbtab format type. Use format type 0. */
15139 fputs ("\t.byte 0,", file);
15141 /* Language type. Unfortunately, there does not seem to be any
15142 official way to discover the language being compiled, so we
15143 use language_string.
15144 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15145 Java is 13. Objective-C is 14. */
15146 if (! strcmp (language_string, "GNU C"))
15148 else if (! strcmp (language_string, "GNU F77")
15149 || ! strcmp (language_string, "GNU F95"))
15151 else if (! strcmp (language_string, "GNU Pascal"))
15153 else if (! strcmp (language_string, "GNU Ada"))
15155 else if (! strcmp (language_string, "GNU C++"))
15157 else if (! strcmp (language_string, "GNU Java"))
15159 else if (! strcmp (language_string, "GNU Objective-C"))
15162 gcc_unreachable ();
15163 fprintf (file, "%d,", i);
15165 /* 8 single bit fields: global linkage (not set for C extern linkage,
15166 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15167 from start of procedure stored in tbtab, internal function, function
15168 has controlled storage, function has no toc, function uses fp,
15169 function logs/aborts fp operations. */
15170 /* Assume that fp operations are used if any fp reg must be saved. */
15171 fprintf (file, "%d,",
15172 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15174 /* 6 bitfields: function is interrupt handler, name present in
15175 proc table, function calls alloca, on condition directives
15176 (controls stack walks, 3 bits), saves condition reg, saves
15178 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15179 set up as a frame pointer, even when there is no alloca call. */
15180 fprintf (file, "%d,",
15181 ((optional_tbtab << 6)
15182 | ((optional_tbtab & frame_pointer_needed) << 5)
15183 | (info->cr_save_p << 1)
15184 | (info->lr_save_p)));
15186 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15188 fprintf (file, "%d,",
15189 (info->push_p << 7) | (64 - info->first_fp_reg_save));
15191 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15192 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15194 if (optional_tbtab)
15196 /* Compute the parameter info from the function decl argument
15199 int next_parm_info_bit = 31;
15201 for (decl = DECL_ARGUMENTS (current_function_decl);
15202 decl; decl = TREE_CHAIN (decl))
15204 rtx parameter = DECL_INCOMING_RTL (decl);
15205 enum machine_mode mode = GET_MODE (parameter);
15207 if (GET_CODE (parameter) == REG)
15209 if (SCALAR_FLOAT_MODE_P (mode))
15227 gcc_unreachable ();
15230 /* If only one bit will fit, don't or in this entry. */
15231 if (next_parm_info_bit > 0)
15232 parm_info |= (bits << (next_parm_info_bit - 1));
15233 next_parm_info_bit -= 2;
15237 fixed_parms += ((GET_MODE_SIZE (mode)
15238 + (UNITS_PER_WORD - 1))
15240 next_parm_info_bit -= 1;
15246 /* Number of fixed point parameters. */
15247 /* This is actually the number of words of fixed point parameters; thus
15248 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15249 fprintf (file, "%d,", fixed_parms);
15251 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15253 /* This is actually the number of fp registers that hold parameters;
15254 and thus the maximum value is 13. */
15255 /* Set parameters on stack bit if parameters are not in their original
15256 registers, regardless of whether they are on the stack? Xlc
15257 seems to set the bit when not optimizing. */
15258 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15260 if (! optional_tbtab)
15263 /* Optional fields follow. Some are variable length. */
15265 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15266 11 double float. */
15267 /* There is an entry for each parameter in a register, in the order that
15268 they occur in the parameter list. Any intervening arguments on the
15269 stack are ignored. If the list overflows a long (max possible length
15270 34 bits) then completely leave off all elements that don't fit. */
15271 /* Only emit this long if there was at least one parameter. */
15272 if (fixed_parms || float_parms)
15273 fprintf (file, "\t.long %d\n", parm_info);
15275 /* Offset from start of code to tb table. */
15276 fputs ("\t.long ", file);
15277 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15279 RS6000_OUTPUT_BASENAME (file, fname);
15281 assemble_name (file, fname);
15283 rs6000_output_function_entry (file, fname);
15286 /* Interrupt handler mask. */
15287 /* Omit this long, since we never set the interrupt handler bit
15290 /* Number of CTL (controlled storage) anchors. */
15291 /* Omit this long, since the has_ctl bit is never set above. */
15293 /* Displacement into stack of each CTL anchor. */
15294 /* Omit this list of longs, because there are no CTL anchors. */
15296 /* Length of function name. */
15299 fprintf (file, "\t.short %d\n", (int) strlen (fname));
15301 /* Function name. */
15302 assemble_string (fname, strlen (fname));
15304 /* Register for alloca automatic storage; this is always reg 31.
15305 Only emit this if the alloca bit was set above. */
15306 if (frame_pointer_needed)
15307 fputs ("\t.byte 31\n", file);
15309 fputs ("\t.align 2\n", file);
15313 /* A C compound statement that outputs the assembler code for a thunk
15314 function, used to implement C++ virtual function calls with
15315 multiple inheritance. The thunk acts as a wrapper around a virtual
15316 function, adjusting the implicit object parameter before handing
15317 control off to the real function.
15319 First, emit code to add the integer DELTA to the location that
15320 contains the incoming first argument. Assume that this argument
15321 contains a pointer, and is the one used to pass the `this' pointer
15322 in C++. This is the incoming argument *before* the function
15323 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15324 values of all other incoming arguments.
15326 After the addition, emit code to jump to FUNCTION, which is a
15327 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15328 not touch the return address. Hence returning from FUNCTION will
15329 return to whoever called the current `thunk'.
15331 The effect must be as if FUNCTION had been called directly with the
15332 adjusted first argument. This macro is responsible for emitting
15333 all of the code for a thunk function; output_function_prologue()
15334 and output_function_epilogue() are not invoked.
15336 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15337 been extracted from it.) It might possibly be useful on some
15338 targets, but probably not.
15340 If you do not define this macro, the target-independent code in the
15341 C++ frontend will generate a less efficient heavyweight thunk that
15342 calls FUNCTION instead of jumping to it. The generic approach does
15343 not support varargs. */
15346 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15347 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15350 rtx this, insn, funexp;
15352 reload_completed = 1;
15353 epilogue_completed = 1;
15354 no_new_pseudos = 1;
15355 reset_block_changes ();
15357 /* Mark the end of the (empty) prologue. */
15358 emit_note (NOTE_INSN_PROLOGUE_END);
15360 /* Find the "this" pointer. If the function returns a structure,
15361 the structure return pointer is in r3. */
15362 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15363 this = gen_rtx_REG (Pmode, 4);
15365 this = gen_rtx_REG (Pmode, 3);
15367 /* Apply the constant offset, if required. */
15370 rtx delta_rtx = GEN_INT (delta);
15371 emit_insn (TARGET_32BIT
15372 ? gen_addsi3 (this, this, delta_rtx)
15373 : gen_adddi3 (this, this, delta_rtx));
15376 /* Apply the offset from the vtable, if required. */
15379 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15380 rtx tmp = gen_rtx_REG (Pmode, 12);
15382 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15383 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15385 emit_insn (TARGET_32BIT
15386 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15387 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15388 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15392 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15394 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15396 emit_insn (TARGET_32BIT
15397 ? gen_addsi3 (this, this, tmp)
15398 : gen_adddi3 (this, this, tmp));
15401 /* Generate a tail call to the target function. */
15402 if (!TREE_USED (function))
15404 assemble_external (function);
15405 TREE_USED (function) = 1;
15407 funexp = XEXP (DECL_RTL (function), 0);
15408 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15411 if (MACHOPIC_INDIRECT)
15412 funexp = machopic_indirect_call_target (funexp);
15415 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15416 generate sibcall RTL explicitly. */
15417 insn = emit_call_insn (
15418 gen_rtx_PARALLEL (VOIDmode,
15420 gen_rtx_CALL (VOIDmode,
15421 funexp, const0_rtx),
15422 gen_rtx_USE (VOIDmode, const0_rtx),
15423 gen_rtx_USE (VOIDmode,
15424 gen_rtx_REG (SImode,
15425 LINK_REGISTER_REGNUM)),
15426 gen_rtx_RETURN (VOIDmode))));
15427 SIBLING_CALL_P (insn) = 1;
15430 /* Run just enough of rest_of_compilation to get the insns emitted.
15431 There's not really enough bulk here to make other passes such as
15432 instruction scheduling worth while. Note that use_thunk calls
15433 assemble_start_function and assemble_end_function. */
15434 insn = get_insns ();
15435 insn_locators_initialize ();
15436 shorten_branches (insn);
15437 final_start_function (insn, file, 1);
15438 final (insn, file, 1);
15439 final_end_function ();
15441 reload_completed = 0;
15442 epilogue_completed = 0;
15443 no_new_pseudos = 0;
15446 /* A quick summary of the various types of 'constant-pool tables'
15449 Target Flags Name One table per
15450 AIX (none) AIX TOC object file
15451 AIX -mfull-toc AIX TOC object file
15452 AIX -mminimal-toc AIX minimal TOC translation unit
15453 SVR4/EABI (none) SVR4 SDATA object file
15454 SVR4/EABI -fpic SVR4 pic object file
15455 SVR4/EABI -fPIC SVR4 PIC translation unit
15456 SVR4/EABI -mrelocatable EABI TOC function
15457 SVR4/EABI -maix AIX TOC object file
15458 SVR4/EABI -maix -mminimal-toc
15459 AIX minimal TOC translation unit
15461 Name Reg. Set by entries contains:
15462 made by addrs? fp? sum?
15464 AIX TOC 2 crt0 as Y option option
15465 AIX minimal TOC 30 prolog gcc Y Y option
15466 SVR4 SDATA 13 crt0 gcc N Y N
15467 SVR4 pic 30 prolog ld Y not yet N
15468 SVR4 PIC 30 prolog gcc Y option option
15469 EABI TOC 30 prolog gcc Y option option
15473 /* Hash functions for the hash table. */
15476 rs6000_hash_constant (rtx k)
15478 enum rtx_code code = GET_CODE (k);
15479 enum machine_mode mode = GET_MODE (k);
15480 unsigned result = (code << 3) ^ mode;
15481 const char *format;
15484 format = GET_RTX_FORMAT (code);
15485 flen = strlen (format);
15491 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15494 if (mode != VOIDmode)
15495 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15507 for (; fidx < flen; fidx++)
15508 switch (format[fidx])
15513 const char *str = XSTR (k, fidx);
15514 len = strlen (str);
15515 result = result * 613 + len;
15516 for (i = 0; i < len; i++)
15517 result = result * 613 + (unsigned) str[i];
15522 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15526 result = result * 613 + (unsigned) XINT (k, fidx);
15529 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15530 result = result * 613 + (unsigned) XWINT (k, fidx);
15534 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15535 result = result * 613 + (unsigned) (XWINT (k, fidx)
15542 gcc_unreachable ();
15549 toc_hash_function (const void *hash_entry)
15551 const struct toc_hash_struct *thc =
15552 (const struct toc_hash_struct *) hash_entry;
15553 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15556 /* Compare H1 and H2 for equivalence. */
15559 toc_hash_eq (const void *h1, const void *h2)
15561 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15562 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15564 if (((const struct toc_hash_struct *) h1)->key_mode
15565 != ((const struct toc_hash_struct *) h2)->key_mode)
15568 return rtx_equal_p (r1, r2);
15571 /* These are the names given by the C++ front-end to vtables, and
15572 vtable-like objects. Ideally, this logic should not be here;
15573 instead, there should be some programmatic way of inquiring as
15574 to whether or not an object is a vtable. */
15576 #define VTABLE_NAME_P(NAME) \
15577 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
15578 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15579 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
15580 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
15581 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15584 rs6000_output_symbol_ref (FILE *file, rtx x)
15586 /* Currently C++ toc references to vtables can be emitted before it
15587 is decided whether the vtable is public or private. If this is
15588 the case, then the linker will eventually complain that there is
15589 a reference to an unknown section. Thus, for vtables only,
15590 we emit the TOC reference to reference the symbol and not the
15592 const char *name = XSTR (x, 0);
15594 if (VTABLE_NAME_P (name))
15596 RS6000_OUTPUT_BASENAME (file, name);
15599 assemble_name (file, name);
15602 /* Output a TOC entry. We derive the entry name from what is being
15606 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15609 const char *name = buf;
15610 const char *real_name;
15612 HOST_WIDE_INT offset = 0;
15614 gcc_assert (!TARGET_NO_TOC);
15616 /* When the linker won't eliminate them, don't output duplicate
15617 TOC entries (this happens on AIX if there is any kind of TOC,
15618 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15620 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15622 struct toc_hash_struct *h;
15625 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
15626 time because GGC is not initialized at that point. */
15627 if (toc_hash_table == NULL)
15628 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15629 toc_hash_eq, NULL);
15631 h = ggc_alloc (sizeof (*h));
15633 h->key_mode = mode;
15634 h->labelno = labelno;
15636 found = htab_find_slot (toc_hash_table, h, 1);
15637 if (*found == NULL)
15639 else /* This is indeed a duplicate.
15640 Set this label equal to that label. */
15642 fputs ("\t.set ", file);
15643 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15644 fprintf (file, "%d,", labelno);
15645 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15646 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15652 /* If we're going to put a double constant in the TOC, make sure it's
15653 aligned properly when strict alignment is on. */
15654 if (GET_CODE (x) == CONST_DOUBLE
15655 && STRICT_ALIGNMENT
15656 && GET_MODE_BITSIZE (mode) >= 64
15657 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15658 ASM_OUTPUT_ALIGN (file, 3);
15661 (*targetm.asm_out.internal_label) (file, "LC", labelno);
15663 /* Handle FP constants specially. Note that if we have a minimal
15664 TOC, things we put here aren't actually in the TOC, so we can allow
15666 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15668 REAL_VALUE_TYPE rv;
15671 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15672 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15676 if (TARGET_MINIMAL_TOC)
15677 fputs (DOUBLE_INT_ASM_OP, file);
15679 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15680 k[0] & 0xffffffff, k[1] & 0xffffffff,
15681 k[2] & 0xffffffff, k[3] & 0xffffffff);
15682 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15683 k[0] & 0xffffffff, k[1] & 0xffffffff,
15684 k[2] & 0xffffffff, k[3] & 0xffffffff);
15689 if (TARGET_MINIMAL_TOC)
15690 fputs ("\t.long ", file);
15692 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15693 k[0] & 0xffffffff, k[1] & 0xffffffff,
15694 k[2] & 0xffffffff, k[3] & 0xffffffff);
15695 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15696 k[0] & 0xffffffff, k[1] & 0xffffffff,
15697 k[2] & 0xffffffff, k[3] & 0xffffffff);
15701 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
15703 REAL_VALUE_TYPE rv;
15706 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15707 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
15711 if (TARGET_MINIMAL_TOC)
15712 fputs (DOUBLE_INT_ASM_OP, file);
15714 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15715 k[0] & 0xffffffff, k[1] & 0xffffffff);
15716 fprintf (file, "0x%lx%08lx\n",
15717 k[0] & 0xffffffff, k[1] & 0xffffffff);
15722 if (TARGET_MINIMAL_TOC)
15723 fputs ("\t.long ", file);
15725 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15726 k[0] & 0xffffffff, k[1] & 0xffffffff);
15727 fprintf (file, "0x%lx,0x%lx\n",
15728 k[0] & 0xffffffff, k[1] & 0xffffffff);
15732 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
15734 REAL_VALUE_TYPE rv;
15737 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15738 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15742 if (TARGET_MINIMAL_TOC)
15743 fputs (DOUBLE_INT_ASM_OP, file);
15745 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15746 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
15751 if (TARGET_MINIMAL_TOC)
15752 fputs ("\t.long ", file);
15754 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15755 fprintf (file, "0x%lx\n", l & 0xffffffff);
15759 else if (GET_MODE (x) == VOIDmode
15760 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
15762 unsigned HOST_WIDE_INT low;
15763 HOST_WIDE_INT high;
15765 if (GET_CODE (x) == CONST_DOUBLE)
15767 low = CONST_DOUBLE_LOW (x);
15768 high = CONST_DOUBLE_HIGH (x);
15771 #if HOST_BITS_PER_WIDE_INT == 32
15774 high = (low & 0x80000000) ? ~0 : 0;
15778 low = INTVAL (x) & 0xffffffff;
15779 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
15783 /* TOC entries are always Pmode-sized, but since this
15784 is a bigendian machine then if we're putting smaller
15785 integer constants in the TOC we have to pad them.
15786 (This is still a win over putting the constants in
15787 a separate constant pool, because then we'd have
15788 to have both a TOC entry _and_ the actual constant.)
15790 For a 32-bit target, CONST_INT values are loaded and shifted
15791 entirely within `low' and can be stored in one TOC entry. */
15793 /* It would be easy to make this work, but it doesn't now. */
15794 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
15796 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
15798 #if HOST_BITS_PER_WIDE_INT == 32
15799 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15800 POINTER_SIZE, &low, &high, 0);
15803 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15804 high = (HOST_WIDE_INT) low >> 32;
15811 if (TARGET_MINIMAL_TOC)
15812 fputs (DOUBLE_INT_ASM_OP, file);
15814 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15815 (long) high & 0xffffffff, (long) low & 0xffffffff);
15816 fprintf (file, "0x%lx%08lx\n",
15817 (long) high & 0xffffffff, (long) low & 0xffffffff);
15822 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15824 if (TARGET_MINIMAL_TOC)
15825 fputs ("\t.long ", file);
15827 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15828 (long) high & 0xffffffff, (long) low & 0xffffffff);
15829 fprintf (file, "0x%lx,0x%lx\n",
15830 (long) high & 0xffffffff, (long) low & 0xffffffff);
15834 if (TARGET_MINIMAL_TOC)
15835 fputs ("\t.long ", file);
15837 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15838 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15844 if (GET_CODE (x) == CONST)
15846 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
15848 base = XEXP (XEXP (x, 0), 0);
15849 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15852 switch (GET_CODE (base))
15855 name = XSTR (base, 0);
15859 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15860 CODE_LABEL_NUMBER (XEXP (base, 0)));
15864 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15868 gcc_unreachable ();
15871 real_name = (*targetm.strip_name_encoding) (name);
15872 if (TARGET_MINIMAL_TOC)
15873 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15876 fprintf (file, "\t.tc %s", real_name);
15879 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
15881 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
15883 fputs ("[TC],", file);
15886 /* Currently C++ toc references to vtables can be emitted before it
15887 is decided whether the vtable is public or private. If this is
15888 the case, then the linker will eventually complain that there is
15889 a TOC reference to an unknown section. Thus, for vtables only,
15890 we emit the TOC reference to reference the symbol and not the
15892 if (VTABLE_NAME_P (name))
15894 RS6000_OUTPUT_BASENAME (file, name);
15896 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
15897 else if (offset > 0)
15898 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
15901 output_addr_const (file, x);
15905 /* Output an assembler pseudo-op to write an ASCII string of N characters
15906 starting at P to FILE.
15908 On the RS/6000, we have to do this using the .byte operation and
15909 write out special characters outside the quoted string.
15910 Also, the assembler is broken; very long strings are truncated,
15911 so we must artificially break them up early. */
15914 output_ascii (FILE *file, const char *p, int n)
15917 int i, count_string;
15918 const char *for_string = "\t.byte \"";
15919 const char *for_decimal = "\t.byte ";
15920 const char *to_close = NULL;
15923 for (i = 0; i < n; i++)
15926 if (c >= ' ' && c < 0177)
15929 fputs (for_string, file);
15932 /* Write two quotes to get one. */
15940 for_decimal = "\"\n\t.byte ";
15944 if (count_string >= 512)
15946 fputs (to_close, file);
15948 for_string = "\t.byte \"";
15949 for_decimal = "\t.byte ";
15957 fputs (for_decimal, file);
15958 fprintf (file, "%d", c);
15960 for_string = "\n\t.byte \"";
15961 for_decimal = ", ";
15967 /* Now close the string if we have written one. Then end the line. */
15969 fputs (to_close, file);
15972 /* Generate a unique section name for FILENAME for a section type
15973 represented by SECTION_DESC. Output goes into BUF.
15975 SECTION_DESC can be any string, as long as it is different for each
15976 possible section type.
15978 We name the section in the same manner as xlc. The name begins with an
15979 underscore followed by the filename (after stripping any leading directory
15980 names) with the last period replaced by the string SECTION_DESC. If
15981 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15985 rs6000_gen_section_name (char **buf, const char *filename,
15986 const char *section_desc)
15988 const char *q, *after_last_slash, *last_period = 0;
15992 after_last_slash = filename;
15993 for (q = filename; *q; q++)
15996 after_last_slash = q + 1;
15997 else if (*q == '.')
16001 len = strlen (after_last_slash) + strlen (section_desc) + 2;
16002 *buf = (char *) xmalloc (len);
16007 for (q = after_last_slash; *q; q++)
16009 if (q == last_period)
16011 strcpy (p, section_desc);
16012 p += strlen (section_desc);
16016 else if (ISALNUM (*q))
16020 if (last_period == 0)
16021 strcpy (p, section_desc);
16026 /* Emit profile function. */
16029 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16031 /* Non-standard profiling for kernels, which just saves LR then calls
16032 _mcount without worrying about arg saves. The idea is to change
16033 the function prologue as little as possible as it isn't easy to
16034 account for arg save/restore code added just for _mcount. */
16035 if (TARGET_PROFILE_KERNEL)
16038 if (DEFAULT_ABI == ABI_AIX)
16040 #ifndef NO_PROFILE_COUNTERS
16041 # define NO_PROFILE_COUNTERS 0
16043 if (NO_PROFILE_COUNTERS)
16044 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16048 const char *label_name;
16051 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16052 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16053 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16055 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16059 else if (DEFAULT_ABI == ABI_DARWIN)
16061 const char *mcount_name = RS6000_MCOUNT;
16062 int caller_addr_regno = LINK_REGISTER_REGNUM;
16064 /* Be conservative and always set this, at least for now. */
16065 current_function_uses_pic_offset_table = 1;
16068 /* For PIC code, set up a stub and collect the caller's address
16069 from r0, which is where the prologue puts it. */
16070 if (MACHOPIC_INDIRECT
16071 && current_function_uses_pic_offset_table)
16072 caller_addr_regno = 0;
16074 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16076 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16080 /* Write function profiler code. */
16083 output_function_profiler (FILE *file, int labelno)
16087 switch (DEFAULT_ABI)
16090 gcc_unreachable ();
16095 warning (0, "no profiling of 64-bit code for this ABI");
16098 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16099 fprintf (file, "\tmflr %s\n", reg_names[0]);
16100 if (NO_PROFILE_COUNTERS)
16102 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16103 reg_names[0], reg_names[1]);
16105 else if (TARGET_SECURE_PLT && flag_pic)
16107 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16108 reg_names[0], reg_names[1]);
16109 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16110 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16111 reg_names[12], reg_names[12]);
16112 assemble_name (file, buf);
16113 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16114 assemble_name (file, buf);
16115 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16117 else if (flag_pic == 1)
16119 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16120 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16121 reg_names[0], reg_names[1]);
16122 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16123 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16124 assemble_name (file, buf);
16125 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16127 else if (flag_pic > 1)
16129 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16130 reg_names[0], reg_names[1]);
16131 /* Now, we need to get the address of the label. */
16132 fputs ("\tbcl 20,31,1f\n\t.long ", file);
16133 assemble_name (file, buf);
16134 fputs ("-.\n1:", file);
16135 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16136 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16137 reg_names[0], reg_names[11]);
16138 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16139 reg_names[0], reg_names[0], reg_names[11]);
16143 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16144 assemble_name (file, buf);
16145 fputs ("@ha\n", file);
16146 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16147 reg_names[0], reg_names[1]);
16148 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16149 assemble_name (file, buf);
16150 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16153 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
16154 fprintf (file, "\tbl %s%s\n",
16155 RS6000_MCOUNT, flag_pic ? "@plt" : "");
16160 if (!TARGET_PROFILE_KERNEL)
16162 /* Don't do anything, done in output_profile_hook (). */
16166 gcc_assert (!TARGET_32BIT);
16168 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16169 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16171 if (cfun->static_chain_decl != NULL)
16173 asm_fprintf (file, "\tstd %s,24(%s)\n",
16174 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16175 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16176 asm_fprintf (file, "\tld %s,24(%s)\n",
16177 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16180 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16187 /* Power4 load update and store update instructions are cracked into a
16188 load or store and an integer insn which are executed in the same cycle.
16189 Branches have their own dispatch slot which does not count against the
16190 GCC issue rate, but it changes the program flow so there are no other
16191 instructions to issue in this cycle. */
16194 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16195 int verbose ATTRIBUTE_UNUSED,
16196 rtx insn, int more)
16198 if (GET_CODE (PATTERN (insn)) == USE
16199 || GET_CODE (PATTERN (insn)) == CLOBBER)
16202 if (rs6000_sched_groups)
16204 if (is_microcoded_insn (insn))
16206 else if (is_cracked_insn (insn))
16207 return more > 2 ? more - 2 : 0;
16213 /* Adjust the cost of a scheduling dependency. Return the new cost of
16214 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16217 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16219 if (! recog_memoized (insn))
16222 if (REG_NOTE_KIND (link) != 0)
16225 if (REG_NOTE_KIND (link) == 0)
16227 /* Data dependency; DEP_INSN writes a register that INSN reads
16228 some cycles later. */
16230 /* Separate a load from a narrower, dependent store. */
16231 if (rs6000_sched_groups
16232 && GET_CODE (PATTERN (insn)) == SET
16233 && GET_CODE (PATTERN (dep_insn)) == SET
16234 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16235 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16236 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16237 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16240 switch (get_attr_type (insn))
16243 /* Tell the first scheduling pass about the latency between
16244 a mtctr and bctr (and mtlr and br/blr). The first
16245 scheduling pass will not know about this latency since
16246 the mtctr instruction, which has the latency associated
16247 to it, will be generated by reload. */
16248 return TARGET_POWER ? 5 : 4;
16250 /* Leave some extra cycles between a compare and its
16251 dependent branch, to inhibit expensive mispredicts. */
16252 if ((rs6000_cpu_attr == CPU_PPC603
16253 || rs6000_cpu_attr == CPU_PPC604
16254 || rs6000_cpu_attr == CPU_PPC604E
16255 || rs6000_cpu_attr == CPU_PPC620
16256 || rs6000_cpu_attr == CPU_PPC630
16257 || rs6000_cpu_attr == CPU_PPC750
16258 || rs6000_cpu_attr == CPU_PPC7400
16259 || rs6000_cpu_attr == CPU_PPC7450
16260 || rs6000_cpu_attr == CPU_POWER4
16261 || rs6000_cpu_attr == CPU_POWER5)
16262 && recog_memoized (dep_insn)
16263 && (INSN_CODE (dep_insn) >= 0)
16264 && (get_attr_type (dep_insn) == TYPE_CMP
16265 || get_attr_type (dep_insn) == TYPE_COMPARE
16266 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16267 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16268 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16269 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16270 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16271 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16276 /* Fall out to return default cost. */
16282 /* The function returns a true if INSN is microcoded.
16283 Return false otherwise. */
16286 is_microcoded_insn (rtx insn)
16288 if (!insn || !INSN_P (insn)
16289 || GET_CODE (PATTERN (insn)) == USE
16290 || GET_CODE (PATTERN (insn)) == CLOBBER)
16293 if (rs6000_sched_groups)
16295 enum attr_type type = get_attr_type (insn);
16296 if (type == TYPE_LOAD_EXT_U
16297 || type == TYPE_LOAD_EXT_UX
16298 || type == TYPE_LOAD_UX
16299 || type == TYPE_STORE_UX
16300 || type == TYPE_MFCR)
16307 /* The function returns a nonzero value if INSN can be scheduled only
16308 as the first insn in a dispatch group ("dispatch-slot restricted").
16309 In this case, the returned value indicates how many dispatch slots
16310 the insn occupies (at the beginning of the group).
16311 Return 0 otherwise. */
16314 is_dispatch_slot_restricted (rtx insn)
16316 enum attr_type type;
16318 if (!rs6000_sched_groups)
16322 || insn == NULL_RTX
16323 || GET_CODE (insn) == NOTE
16324 || GET_CODE (PATTERN (insn)) == USE
16325 || GET_CODE (PATTERN (insn)) == CLOBBER)
16328 type = get_attr_type (insn);
16335 case TYPE_DELAYED_CR:
16336 case TYPE_CR_LOGICAL:
16349 if (rs6000_cpu == PROCESSOR_POWER5
16350 && is_cracked_insn (insn))
16356 /* The function returns true if INSN is cracked into 2 instructions
16357 by the processor (and therefore occupies 2 issue slots). */
16360 is_cracked_insn (rtx insn)
16362 if (!insn || !INSN_P (insn)
16363 || GET_CODE (PATTERN (insn)) == USE
16364 || GET_CODE (PATTERN (insn)) == CLOBBER)
16367 if (rs6000_sched_groups)
16369 enum attr_type type = get_attr_type (insn);
16370 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16371 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16372 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16373 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16374 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16375 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16376 || type == TYPE_IDIV || type == TYPE_LDIV
16377 || type == TYPE_INSERT_WORD)
16384 /* The function returns true if INSN can be issued only from
16385 the branch slot. */
16388 is_branch_slot_insn (rtx insn)
16390 if (!insn || !INSN_P (insn)
16391 || GET_CODE (PATTERN (insn)) == USE
16392 || GET_CODE (PATTERN (insn)) == CLOBBER)
16395 if (rs6000_sched_groups)
16397 enum attr_type type = get_attr_type (insn);
16398 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16406 /* A C statement (sans semicolon) to update the integer scheduling
16407 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16408 INSN earlier, reduce the priority to execute INSN later. Do not
16409 define this macro if you do not need to adjust the scheduling
16410 priorities of insns. */
16413 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16415 /* On machines (like the 750) which have asymmetric integer units,
16416 where one integer unit can do multiply and divides and the other
16417 can't, reduce the priority of multiply/divide so it is scheduled
16418 before other integer operations. */
16421 if (! INSN_P (insn))
16424 if (GET_CODE (PATTERN (insn)) == USE)
16427 switch (rs6000_cpu_attr) {
16429 switch (get_attr_type (insn))
16436 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16437 priority, priority);
16438 if (priority >= 0 && priority < 0x01000000)
16445 if (is_dispatch_slot_restricted (insn)
16446 && reload_completed
16447 && current_sched_info->sched_max_insns_priority
16448 && rs6000_sched_restricted_insns_priority)
16451 /* Prioritize insns that can be dispatched only in the first
16453 if (rs6000_sched_restricted_insns_priority == 1)
16454 /* Attach highest priority to insn. This means that in
16455 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16456 precede 'priority' (critical path) considerations. */
16457 return current_sched_info->sched_max_insns_priority;
16458 else if (rs6000_sched_restricted_insns_priority == 2)
16459 /* Increase priority of insn by a minimal amount. This means that in
16460 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16461 considerations precede dispatch-slot restriction considerations. */
16462 return (priority + 1);
16468 /* Return how many instructions the machine can issue per cycle. */
16471 rs6000_issue_rate (void)
16473 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16474 if (!reload_completed)
16477 switch (rs6000_cpu_attr) {
16478 case CPU_RIOS1: /* ? */
16480 case CPU_PPC601: /* ? */
16503 /* Return how many instructions to look ahead for better insn
16507 rs6000_use_sched_lookahead (void)
16509 if (rs6000_cpu_attr == CPU_PPC8540)
16514 /* Determine is PAT refers to memory. */
16517 is_mem_ref (rtx pat)
16523 if (GET_CODE (pat) == MEM)
16526 /* Recursively process the pattern. */
16527 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16529 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16532 ret |= is_mem_ref (XEXP (pat, i));
16533 else if (fmt[i] == 'E')
16534 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16535 ret |= is_mem_ref (XVECEXP (pat, i, j));
16541 /* Determine if PAT is a PATTERN of a load insn. */
16544 is_load_insn1 (rtx pat)
16546 if (!pat || pat == NULL_RTX)
16549 if (GET_CODE (pat) == SET)
16550 return is_mem_ref (SET_SRC (pat));
16552 if (GET_CODE (pat) == PARALLEL)
16556 for (i = 0; i < XVECLEN (pat, 0); i++)
16557 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16564 /* Determine if INSN loads from memory. */
16567 is_load_insn (rtx insn)
16569 if (!insn || !INSN_P (insn))
16572 if (GET_CODE (insn) == CALL_INSN)
16575 return is_load_insn1 (PATTERN (insn));
16578 /* Determine if PAT is a PATTERN of a store insn. */
16581 is_store_insn1 (rtx pat)
16583 if (!pat || pat == NULL_RTX)
16586 if (GET_CODE (pat) == SET)
16587 return is_mem_ref (SET_DEST (pat));
16589 if (GET_CODE (pat) == PARALLEL)
16593 for (i = 0; i < XVECLEN (pat, 0); i++)
16594 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16601 /* Determine if INSN stores to memory. */
16604 is_store_insn (rtx insn)
16606 if (!insn || !INSN_P (insn))
16609 return is_store_insn1 (PATTERN (insn));
16612 /* Returns whether the dependence between INSN and NEXT is considered
16613 costly by the given target. */
16616 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16619 /* If the flag is not enabled - no dependence is considered costly;
16620 allow all dependent insns in the same group.
16621 This is the most aggressive option. */
16622 if (rs6000_sched_costly_dep == no_dep_costly)
16625 /* If the flag is set to 1 - a dependence is always considered costly;
16626 do not allow dependent instructions in the same group.
16627 This is the most conservative option. */
16628 if (rs6000_sched_costly_dep == all_deps_costly)
16631 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16632 && is_load_insn (next)
16633 && is_store_insn (insn))
16634 /* Prevent load after store in the same group. */
16637 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16638 && is_load_insn (next)
16639 && is_store_insn (insn)
16640 && (!link || (int) REG_NOTE_KIND (link) == 0))
16641 /* Prevent load after store in the same group if it is a true
16645 /* The flag is set to X; dependences with latency >= X are considered costly,
16646 and will not be scheduled in the same group. */
16647 if (rs6000_sched_costly_dep <= max_dep_latency
16648 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16654 /* Return the next insn after INSN that is found before TAIL is reached,
16655 skipping any "non-active" insns - insns that will not actually occupy
16656 an issue slot. Return NULL_RTX if such an insn is not found. */
16659 get_next_active_insn (rtx insn, rtx tail)
16661 if (insn == NULL_RTX || insn == tail)
16666 insn = NEXT_INSN (insn);
16667 if (insn == NULL_RTX || insn == tail)
16672 || (NONJUMP_INSN_P (insn)
16673 && GET_CODE (PATTERN (insn)) != USE
16674 && GET_CODE (PATTERN (insn)) != CLOBBER
16675 && INSN_CODE (insn) != CODE_FOR_stack_tie))
16681 /* Return whether the presence of INSN causes a dispatch group termination
16682 of group WHICH_GROUP.
16684 If WHICH_GROUP == current_group, this function will return true if INSN
16685 causes the termination of the current group (i.e, the dispatch group to
16686 which INSN belongs). This means that INSN will be the last insn in the
16687 group it belongs to.
16689 If WHICH_GROUP == previous_group, this function will return true if INSN
16690 causes the termination of the previous group (i.e, the dispatch group that
16691 precedes the group to which INSN belongs). This means that INSN will be
16692 the first insn in the group it belongs to). */
16695 insn_terminates_group_p (rtx insn, enum group_termination which_group)
16697 enum attr_type type;
16702 type = get_attr_type (insn);
16704 if (is_microcoded_insn (insn))
16707 if (which_group == current_group)
16709 if (is_branch_slot_insn (insn))
16713 else if (which_group == previous_group)
16715 if (is_dispatch_slot_restricted (insn))
16723 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16724 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16727 is_costly_group (rtx *group_insns, rtx next_insn)
16732 int issue_rate = rs6000_issue_rate ();
16734 for (i = 0; i < issue_rate; i++)
16736 rtx insn = group_insns[i];
16739 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
16741 rtx next = XEXP (link, 0);
16742 if (next == next_insn)
16744 cost = insn_cost (insn, link, next_insn);
16745 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16754 /* Utility of the function redefine_groups.
16755 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16756 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16757 to keep it "far" (in a separate group) from GROUP_INSNS, following
16758 one of the following schemes, depending on the value of the flag
16759 -minsert_sched_nops = X:
16760 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16761 in order to force NEXT_INSN into a separate group.
16762 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16763 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16764 insertion (has a group just ended, how many vacant issue slots remain in the
16765 last group, and how many dispatch groups were encountered so far). */
16768 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16769 rtx next_insn, bool *group_end, int can_issue_more,
16774 int issue_rate = rs6000_issue_rate ();
16775 bool end = *group_end;
16778 if (next_insn == NULL_RTX)
16779 return can_issue_more;
16781 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16782 return can_issue_more;
16784 force = is_costly_group (group_insns, next_insn);
16786 return can_issue_more;
16788 if (sched_verbose > 6)
16789 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
16790 *group_count ,can_issue_more);
16792 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16795 can_issue_more = 0;
16797 /* Since only a branch can be issued in the last issue_slot, it is
16798 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16799 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16800 in this case the last nop will start a new group and the branch
16801 will be forced to the new group. */
16802 if (can_issue_more && !is_branch_slot_insn (next_insn))
16805 while (can_issue_more > 0)
16808 emit_insn_before (nop, next_insn);
16816 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16818 int n_nops = rs6000_sched_insert_nops;
16820 /* Nops can't be issued from the branch slot, so the effective
16821 issue_rate for nops is 'issue_rate - 1'. */
16822 if (can_issue_more == 0)
16823 can_issue_more = issue_rate;
16825 if (can_issue_more == 0)
16827 can_issue_more = issue_rate - 1;
16830 for (i = 0; i < issue_rate; i++)
16832 group_insns[i] = 0;
16839 emit_insn_before (nop, next_insn);
16840 if (can_issue_more == issue_rate - 1) /* new group begins */
16843 if (can_issue_more == 0)
16845 can_issue_more = issue_rate - 1;
16848 for (i = 0; i < issue_rate; i++)
16850 group_insns[i] = 0;
16856 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16859 /* Is next_insn going to start a new group? */
16862 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16863 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16864 || (can_issue_more < issue_rate &&
16865 insn_terminates_group_p (next_insn, previous_group)));
16866 if (*group_end && end)
16869 if (sched_verbose > 6)
16870 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16871 *group_count, can_issue_more);
16872 return can_issue_more;
16875 return can_issue_more;
16878 /* This function tries to synch the dispatch groups that the compiler "sees"
16879 with the dispatch groups that the processor dispatcher is expected to
16880 form in practice. It tries to achieve this synchronization by forcing the
16881 estimated processor grouping on the compiler (as opposed to the function
16882 'pad_goups' which tries to force the scheduler's grouping on the processor).
16884 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16885 examines the (estimated) dispatch groups that will be formed by the processor
16886 dispatcher. It marks these group boundaries to reflect the estimated
16887 processor grouping, overriding the grouping that the scheduler had marked.
16888 Depending on the value of the flag '-minsert-sched-nops' this function can
16889 force certain insns into separate groups or force a certain distance between
16890 them by inserting nops, for example, if there exists a "costly dependence"
16893 The function estimates the group boundaries that the processor will form as
16894 follows: It keeps track of how many vacant issue slots are available after
16895 each insn. A subsequent insn will start a new group if one of the following
16897 - no more vacant issue slots remain in the current dispatch group.
16898 - only the last issue slot, which is the branch slot, is vacant, but the next
16899 insn is not a branch.
16900 - only the last 2 or less issue slots, including the branch slot, are vacant,
16901 which means that a cracked insn (which occupies two issue slots) can't be
16902 issued in this group.
16903 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16904 start a new group. */
16907 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16909 rtx insn, next_insn;
16911 int can_issue_more;
16914 int group_count = 0;
16918 issue_rate = rs6000_issue_rate ();
16919 group_insns = alloca (issue_rate * sizeof (rtx));
16920 for (i = 0; i < issue_rate; i++)
16922 group_insns[i] = 0;
16924 can_issue_more = issue_rate;
16926 insn = get_next_active_insn (prev_head_insn, tail);
16929 while (insn != NULL_RTX)
16931 slot = (issue_rate - can_issue_more);
16932 group_insns[slot] = insn;
16934 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16935 if (insn_terminates_group_p (insn, current_group))
16936 can_issue_more = 0;
16938 next_insn = get_next_active_insn (insn, tail);
16939 if (next_insn == NULL_RTX)
16940 return group_count + 1;
16942 /* Is next_insn going to start a new group? */
16944 = (can_issue_more == 0
16945 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16946 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16947 || (can_issue_more < issue_rate &&
16948 insn_terminates_group_p (next_insn, previous_group)));
16950 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16951 next_insn, &group_end, can_issue_more,
16957 can_issue_more = 0;
16958 for (i = 0; i < issue_rate; i++)
16960 group_insns[i] = 0;
16964 if (GET_MODE (next_insn) == TImode && can_issue_more)
16965 PUT_MODE (next_insn, VOIDmode);
16966 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16967 PUT_MODE (next_insn, TImode);
16970 if (can_issue_more == 0)
16971 can_issue_more = issue_rate;
16974 return group_count;
16977 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16978 dispatch group boundaries that the scheduler had marked. Pad with nops
16979 any dispatch groups which have vacant issue slots, in order to force the
16980 scheduler's grouping on the processor dispatcher. The function
16981 returns the number of dispatch groups found. */
16984 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16986 rtx insn, next_insn;
16989 int can_issue_more;
16991 int group_count = 0;
16993 /* Initialize issue_rate. */
16994 issue_rate = rs6000_issue_rate ();
16995 can_issue_more = issue_rate;
16997 insn = get_next_active_insn (prev_head_insn, tail);
16998 next_insn = get_next_active_insn (insn, tail);
17000 while (insn != NULL_RTX)
17003 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17005 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17007 if (next_insn == NULL_RTX)
17012 /* If the scheduler had marked group termination at this location
17013 (between insn and next_indn), and neither insn nor next_insn will
17014 force group termination, pad the group with nops to force group
17017 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17018 && !insn_terminates_group_p (insn, current_group)
17019 && !insn_terminates_group_p (next_insn, previous_group))
17021 if (!is_branch_slot_insn (next_insn))
17024 while (can_issue_more)
17027 emit_insn_before (nop, next_insn);
17032 can_issue_more = issue_rate;
17037 next_insn = get_next_active_insn (insn, tail);
17040 return group_count;
17043 /* The following function is called at the end of scheduling BB.
17044 After reload, it inserts nops at insn group bundling. */
17047 rs6000_sched_finish (FILE *dump, int sched_verbose)
17052 fprintf (dump, "=== Finishing schedule.\n");
17054 if (reload_completed && rs6000_sched_groups)
17056 if (rs6000_sched_insert_nops == sched_finish_none)
17059 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17060 n_groups = pad_groups (dump, sched_verbose,
17061 current_sched_info->prev_head,
17062 current_sched_info->next_tail);
17064 n_groups = redefine_groups (dump, sched_verbose,
17065 current_sched_info->prev_head,
17066 current_sched_info->next_tail);
17068 if (sched_verbose >= 6)
17070 fprintf (dump, "ngroups = %d\n", n_groups);
17071 print_rtl (dump, current_sched_info->prev_head);
17072 fprintf (dump, "Done finish_sched\n");
17077 /* Length in units of the trampoline for entering a nested function. */
17080 rs6000_trampoline_size (void)
17084 switch (DEFAULT_ABI)
17087 gcc_unreachable ();
17090 ret = (TARGET_32BIT) ? 12 : 24;
17095 ret = (TARGET_32BIT) ? 40 : 48;
17102 /* Emit RTL insns to initialize the variable parts of a trampoline.
17103 FNADDR is an RTX for the address of the function's pure code.
17104 CXT is an RTX for the static chain value for the function. */
17107 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17109 enum machine_mode pmode = Pmode;
17110 int regsize = (TARGET_32BIT) ? 4 : 8;
17111 rtx ctx_reg = force_reg (pmode, cxt);
17113 switch (DEFAULT_ABI)
17116 gcc_unreachable ();
17118 /* Macros to shorten the code expansions below. */
17119 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
17120 #define MEM_PLUS(addr,offset) \
17121 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
17123 /* Under AIX, just build the 3 word function descriptor */
17126 rtx fn_reg = gen_reg_rtx (pmode);
17127 rtx toc_reg = gen_reg_rtx (pmode);
17128 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17129 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17130 emit_move_insn (MEM_DEREF (addr), fn_reg);
17131 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17132 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17136 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17139 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
17140 FALSE, VOIDmode, 4,
17142 GEN_INT (rs6000_trampoline_size ()), SImode,
17152 /* Table of valid machine attributes. */
17154 const struct attribute_spec rs6000_attribute_table[] =
17156 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17157 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
17158 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17159 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17160 #ifdef SUBTARGET_ATTRIBUTE_TABLE
17161 SUBTARGET_ATTRIBUTE_TABLE,
17163 { NULL, 0, 0, false, false, false, NULL }
17166 /* Handle the "altivec" attribute. The attribute may have
17167 arguments as follows:
17169 __attribute__((altivec(vector__)))
17170 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17171 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17173 and may appear more than once (e.g., 'vector bool char') in a
17174 given declaration. */
17177 rs6000_handle_altivec_attribute (tree *node,
17178 tree name ATTRIBUTE_UNUSED,
17180 int flags ATTRIBUTE_UNUSED,
17181 bool *no_add_attrs)
17183 tree type = *node, result = NULL_TREE;
17184 enum machine_mode mode;
17187 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17188 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17189 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17192 while (POINTER_TYPE_P (type)
17193 || TREE_CODE (type) == FUNCTION_TYPE
17194 || TREE_CODE (type) == METHOD_TYPE
17195 || TREE_CODE (type) == ARRAY_TYPE)
17196 type = TREE_TYPE (type);
17198 mode = TYPE_MODE (type);
17200 /* Check for invalid AltiVec type qualifiers. */
17201 if (type == long_unsigned_type_node || type == long_integer_type_node)
17204 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17205 else if (rs6000_warn_altivec_long)
17206 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17208 else if (type == long_long_unsigned_type_node
17209 || type == long_long_integer_type_node)
17210 error ("use of %<long long%> in AltiVec types is invalid");
17211 else if (type == double_type_node)
17212 error ("use of %<double%> in AltiVec types is invalid");
17213 else if (type == long_double_type_node)
17214 error ("use of %<long double%> in AltiVec types is invalid");
17215 else if (type == boolean_type_node)
17216 error ("use of boolean types in AltiVec types is invalid");
17217 else if (TREE_CODE (type) == COMPLEX_TYPE)
17218 error ("use of %<complex%> in AltiVec types is invalid");
17220 switch (altivec_type)
17223 unsigned_p = TYPE_UNSIGNED (type);
17227 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17230 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17233 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17235 case SFmode: result = V4SF_type_node; break;
17236 /* If the user says 'vector int bool', we may be handed the 'bool'
17237 attribute _before_ the 'vector' attribute, and so select the
17238 proper type in the 'b' case below. */
17239 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17247 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17248 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17249 case QImode: case V16QImode: result = bool_V16QI_type_node;
17256 case V8HImode: result = pixel_V8HI_type_node;
17262 if (result && result != type && TYPE_READONLY (type))
17263 result = build_qualified_type (result, TYPE_QUAL_CONST);
17265 *no_add_attrs = true; /* No need to hang on to the attribute. */
17268 *node = reconstruct_complex_type (*node, result);
17273 /* AltiVec defines four built-in scalar types that serve as vector
17274 elements; we must teach the compiler how to mangle them. */
17276 static const char *
17277 rs6000_mangle_fundamental_type (tree type)
17279 if (type == bool_char_type_node) return "U6__boolc";
17280 if (type == bool_short_type_node) return "U6__bools";
17281 if (type == pixel_type_node) return "u7__pixel";
17282 if (type == bool_int_type_node) return "U6__booli";
17284 /* For all other types, use normal C++ mangling. */
17288 /* Handle a "longcall" or "shortcall" attribute; arguments as in
17289 struct attribute_spec.handler. */
17292 rs6000_handle_longcall_attribute (tree *node, tree name,
17293 tree args ATTRIBUTE_UNUSED,
17294 int flags ATTRIBUTE_UNUSED,
17295 bool *no_add_attrs)
17297 if (TREE_CODE (*node) != FUNCTION_TYPE
17298 && TREE_CODE (*node) != FIELD_DECL
17299 && TREE_CODE (*node) != TYPE_DECL)
17301 warning (OPT_Wattributes, "%qs attribute only applies to functions",
17302 IDENTIFIER_POINTER (name));
17303 *no_add_attrs = true;
17309 /* Set longcall attributes on all functions declared when
17310 rs6000_default_long_calls is true. */
17312 rs6000_set_default_type_attributes (tree type)
17314 if (rs6000_default_long_calls
17315 && (TREE_CODE (type) == FUNCTION_TYPE
17316 || TREE_CODE (type) == METHOD_TYPE))
17317 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17319 TYPE_ATTRIBUTES (type));
17322 /* Return a reference suitable for calling a function with the
17323 longcall attribute. */
17326 rs6000_longcall_ref (rtx call_ref)
17328 const char *call_name;
17331 if (GET_CODE (call_ref) != SYMBOL_REF)
17334 /* System V adds '.' to the internal name, so skip them. */
17335 call_name = XSTR (call_ref, 0);
17336 if (*call_name == '.')
17338 while (*call_name == '.')
17341 node = get_identifier (call_name);
17342 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17345 return force_reg (Pmode, call_ref);
17348 #ifdef USING_ELFOS_H
17350 /* A C statement or statements to switch to the appropriate section
17351 for output of RTX in mode MODE. You can assume that RTX is some
17352 kind of constant in RTL. The argument MODE is redundant except in
17353 the case of a `const_int' rtx. Select the section by calling
17354 `text_section' or one of the alternatives for other sections.
17356 Do not define this macro if you put all constants in the read-only
17360 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17361 unsigned HOST_WIDE_INT align)
17363 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17366 default_elf_select_rtx_section (mode, x, align);
17369 /* A C statement or statements to switch to the appropriate
17370 section for output of DECL. DECL is either a `VAR_DECL' node
17371 or a constant of some sort. RELOC indicates whether forming
17372 the initial value of DECL requires link-time relocations. */
17375 rs6000_elf_select_section (tree decl, int reloc,
17376 unsigned HOST_WIDE_INT align)
17378 /* Pretend that we're always building for a shared library when
17379 ABI_AIX, because otherwise we end up with dynamic relocations
17380 in read-only sections. This happens for function pointers,
17381 references to vtables in typeinfo, and probably other cases. */
17382 default_elf_select_section_1 (decl, reloc, align,
17383 flag_pic || DEFAULT_ABI == ABI_AIX);
17386 /* A C statement to build up a unique section name, expressed as a
17387 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17388 RELOC indicates whether the initial value of EXP requires
17389 link-time relocations. If you do not define this macro, GCC will use
17390 the symbol name prefixed by `.' as the section name. Note - this
17391 macro can now be called for uninitialized data items as well as
17392 initialized data and functions. */
17395 rs6000_elf_unique_section (tree decl, int reloc)
17397 /* As above, pretend that we're always building for a shared library
17398 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
17399 default_unique_section_1 (decl, reloc,
17400 flag_pic || DEFAULT_ABI == ABI_AIX);
17403 /* For a SYMBOL_REF, set generic flags and then perform some
17404 target-specific processing.
17406 When the AIX ABI is requested on a non-AIX system, replace the
17407 function name with the real name (with a leading .) rather than the
17408 function descriptor name. This saves a lot of overriding code to
17409 read the prefixes. */
17412 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17414 default_encode_section_info (decl, rtl, first);
17417 && TREE_CODE (decl) == FUNCTION_DECL
17419 && DEFAULT_ABI == ABI_AIX)
17421 rtx sym_ref = XEXP (rtl, 0);
17422 size_t len = strlen (XSTR (sym_ref, 0));
17423 char *str = alloca (len + 2);
17425 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17426 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17431 rs6000_elf_in_small_data_p (tree decl)
17433 if (rs6000_sdata == SDATA_NONE)
17436 /* We want to merge strings, so we never consider them small data. */
17437 if (TREE_CODE (decl) == STRING_CST)
17440 /* Functions are never in the small data area. */
17441 if (TREE_CODE (decl) == FUNCTION_DECL)
17444 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17446 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17447 if (strcmp (section, ".sdata") == 0
17448 || strcmp (section, ".sdata2") == 0
17449 || strcmp (section, ".sbss") == 0
17450 || strcmp (section, ".sbss2") == 0
17451 || strcmp (section, ".PPC.EMB.sdata0") == 0
17452 || strcmp (section, ".PPC.EMB.sbss0") == 0)
17457 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17460 && (unsigned HOST_WIDE_INT) size <= g_switch_value
17461 /* If it's not public, and we're not going to reference it there,
17462 there's no need to put it in the small data section. */
17463 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17470 #endif /* USING_ELFOS_H */
17473 /* Return a REG that occurs in ADDR with coefficient 1.
17474 ADDR can be effectively incremented by incrementing REG.
17476 r0 is special and we must not select it as an address
17477 register by this routine since our caller will try to
17478 increment the returned register via an "la" instruction. */
17481 find_addr_reg (rtx addr)
17483 while (GET_CODE (addr) == PLUS)
17485 if (GET_CODE (XEXP (addr, 0)) == REG
17486 && REGNO (XEXP (addr, 0)) != 0)
17487 addr = XEXP (addr, 0);
17488 else if (GET_CODE (XEXP (addr, 1)) == REG
17489 && REGNO (XEXP (addr, 1)) != 0)
17490 addr = XEXP (addr, 1);
17491 else if (CONSTANT_P (XEXP (addr, 0)))
17492 addr = XEXP (addr, 1);
17493 else if (CONSTANT_P (XEXP (addr, 1)))
17494 addr = XEXP (addr, 0);
17496 gcc_unreachable ();
17498 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17503 rs6000_fatal_bad_address (rtx op)
17505 fatal_insn ("bad address", op);
17510 static tree branch_island_list = 0;
17512 /* Remember to generate a branch island for far calls to the given
17516 add_compiler_branch_island (tree label_name, tree function_name,
17519 tree branch_island = build_tree_list (function_name, label_name);
17520 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17521 TREE_CHAIN (branch_island) = branch_island_list;
17522 branch_island_list = branch_island;
17525 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17526 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17527 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17528 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17530 /* Generate far-jump branch islands for everything on the
17531 branch_island_list. Invoked immediately after the last instruction
17532 of the epilogue has been emitted; the branch-islands must be
17533 appended to, and contiguous with, the function body. Mach-O stubs
17534 are generated in machopic_output_stub(). */
17537 macho_branch_islands (void)
17540 tree branch_island;
17542 for (branch_island = branch_island_list;
17544 branch_island = TREE_CHAIN (branch_island))
17546 const char *label =
17547 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17549 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17550 char name_buf[512];
17551 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17552 if (name[0] == '*' || name[0] == '&')
17553 strcpy (name_buf, name+1);
17557 strcpy (name_buf+1, name);
17559 strcpy (tmp_buf, "\n");
17560 strcat (tmp_buf, label);
17561 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17562 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17563 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17564 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17567 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17568 strcat (tmp_buf, label);
17569 strcat (tmp_buf, "_pic\n");
17570 strcat (tmp_buf, label);
17571 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
17573 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17574 strcat (tmp_buf, name_buf);
17575 strcat (tmp_buf, " - ");
17576 strcat (tmp_buf, label);
17577 strcat (tmp_buf, "_pic)\n");
17579 strcat (tmp_buf, "\tmtlr r0\n");
17581 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17582 strcat (tmp_buf, name_buf);
17583 strcat (tmp_buf, " - ");
17584 strcat (tmp_buf, label);
17585 strcat (tmp_buf, "_pic)\n");
17587 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17591 strcat (tmp_buf, ":\nlis r12,hi16(");
17592 strcat (tmp_buf, name_buf);
17593 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17594 strcat (tmp_buf, name_buf);
17595 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17597 output_asm_insn (tmp_buf, 0);
17598 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17599 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17600 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17601 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17604 branch_island_list = 0;
17607 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
17608 already there or not. */
17611 no_previous_def (tree function_name)
17613 tree branch_island;
17614 for (branch_island = branch_island_list;
17616 branch_island = TREE_CHAIN (branch_island))
17617 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17622 /* GET_PREV_LABEL gets the label name from the previous definition of
17626 get_prev_label (tree function_name)
17628 tree branch_island;
17629 for (branch_island = branch_island_list;
17631 branch_island = TREE_CHAIN (branch_island))
17632 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17633 return BRANCH_ISLAND_LABEL_NAME (branch_island);
17637 /* INSN is either a function call or a millicode call. It may have an
17638 unconditional jump in its delay slot.
17640 CALL_DEST is the routine we are calling. */
17643 output_call (rtx insn, rtx *operands, int dest_operand_number,
17644 int cookie_operand_number)
17646 static char buf[256];
17647 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17648 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
17651 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
17653 if (no_previous_def (funname))
17655 int line_number = 0;
17656 rtx label_rtx = gen_label_rtx ();
17657 char *label_buf, temp_buf[256];
17658 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17659 CODE_LABEL_NUMBER (label_rtx));
17660 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17661 labelname = get_identifier (label_buf);
17662 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17664 line_number = NOTE_LINE_NUMBER (insn);
17665 add_compiler_branch_island (labelname, funname, line_number);
17668 labelname = get_prev_label (funname);
17670 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17671 instruction will reach 'foo', otherwise link as 'bl L42'".
17672 "L42" should be a 'branch island', that will do a far jump to
17673 'foo'. Branch islands are generated in
17674 macho_branch_islands(). */
17675 sprintf (buf, "jbsr %%z%d,%.246s",
17676 dest_operand_number, IDENTIFIER_POINTER (labelname));
17679 sprintf (buf, "bl %%z%d", dest_operand_number);
17683 /* Generate PIC and indirect symbol stubs. */
17686 machopic_output_stub (FILE *file, const char *symb, const char *stub)
17688 unsigned int length;
17689 char *symbol_name, *lazy_ptr_name;
17690 char *local_label_0;
17691 static int label = 0;
17693 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17694 symb = (*targetm.strip_name_encoding) (symb);
17697 length = strlen (symb);
17698 symbol_name = alloca (length + 32);
17699 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17701 lazy_ptr_name = alloca (length + 32);
17702 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17705 machopic_picsymbol_stub1_section ();
17707 machopic_symbol_stub1_section ();
17711 fprintf (file, "\t.align 5\n");
17713 fprintf (file, "%s:\n", stub);
17714 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17717 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
17718 sprintf (local_label_0, "\"L%011d$spb\"", label);
17720 fprintf (file, "\tmflr r0\n");
17721 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17722 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17723 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17724 lazy_ptr_name, local_label_0);
17725 fprintf (file, "\tmtlr r0\n");
17726 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17727 (TARGET_64BIT ? "ldu" : "lwzu"),
17728 lazy_ptr_name, local_label_0);
17729 fprintf (file, "\tmtctr r12\n");
17730 fprintf (file, "\tbctr\n");
17734 fprintf (file, "\t.align 4\n");
17736 fprintf (file, "%s:\n", stub);
17737 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17739 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
17740 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17741 (TARGET_64BIT ? "ldu" : "lwzu"),
17743 fprintf (file, "\tmtctr r12\n");
17744 fprintf (file, "\tbctr\n");
17747 machopic_lazy_symbol_ptr_section ();
17748 fprintf (file, "%s:\n", lazy_ptr_name);
17749 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17750 fprintf (file, "%sdyld_stub_binding_helper\n",
17751 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
17754 /* Legitimize PIC addresses. If the address is already
17755 position-independent, we return ORIG. Newly generated
17756 position-independent addresses go into a reg. This is REG if non
17757 zero, otherwise we allocate register(s) as necessary. */
17759 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
17762 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
17767 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17768 reg = gen_reg_rtx (Pmode);
17770 if (GET_CODE (orig) == CONST)
17774 if (GET_CODE (XEXP (orig, 0)) == PLUS
17775 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17778 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
17780 /* Use a different reg for the intermediate value, as
17781 it will be marked UNCHANGING. */
17782 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17783 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17786 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17789 if (GET_CODE (offset) == CONST_INT)
17791 if (SMALL_INT (offset))
17792 return plus_constant (base, INTVAL (offset));
17793 else if (! reload_in_progress && ! reload_completed)
17794 offset = force_reg (Pmode, offset);
17797 rtx mem = force_const_mem (Pmode, orig);
17798 return machopic_legitimize_pic_address (mem, Pmode, reg);
17801 return gen_rtx_PLUS (Pmode, base, offset);
17804 /* Fall back on generic machopic code. */
17805 return machopic_legitimize_pic_address (orig, mode, reg);
17808 /* This is just a placeholder to make linking work without having to
17809 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17810 ever needed for Darwin (not too likely!) this would have to get a
17811 real definition. */
17818 /* Output a .machine directive for the Darwin assembler, and call
17819 the generic start_file routine. */
17822 rs6000_darwin_file_start (void)
17824 static const struct
17830 { "ppc64", "ppc64", MASK_64BIT },
17831 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17832 { "power4", "ppc970", 0 },
17833 { "G5", "ppc970", 0 },
17834 { "7450", "ppc7450", 0 },
17835 { "7400", "ppc7400", MASK_ALTIVEC },
17836 { "G4", "ppc7400", 0 },
17837 { "750", "ppc750", 0 },
17838 { "740", "ppc750", 0 },
17839 { "G3", "ppc750", 0 },
17840 { "604e", "ppc604e", 0 },
17841 { "604", "ppc604", 0 },
17842 { "603e", "ppc603", 0 },
17843 { "603", "ppc603", 0 },
17844 { "601", "ppc601", 0 },
17845 { NULL, "ppc", 0 } };
17846 const char *cpu_id = "";
17849 rs6000_file_start ();
17851 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17852 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17853 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17854 && rs6000_select[i].string[0] != '\0')
17855 cpu_id = rs6000_select[i].string;
17857 /* Look through the mapping array. Pick the first name that either
17858 matches the argument, has a bit set in IF_SET that is also set
17859 in the target flags, or has a NULL name. */
17862 while (mapping[i].arg != NULL
17863 && strcmp (mapping[i].arg, cpu_id) != 0
17864 && (mapping[i].if_set & target_flags) == 0)
17867 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17870 #endif /* TARGET_MACHO */
17873 static unsigned int
17874 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17876 return default_section_type_flags_1 (decl, name, reloc,
17877 flag_pic || DEFAULT_ABI == ABI_AIX);
17880 /* Record an element in the table of global constructors. SYMBOL is
17881 a SYMBOL_REF of the function to be called; PRIORITY is a number
17882 between 0 and MAX_INIT_PRIORITY.
17884 This differs from default_named_section_asm_out_constructor in
17885 that we have special handling for -mrelocatable. */
17888 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17890 const char *section = ".ctors";
17893 if (priority != DEFAULT_INIT_PRIORITY)
17895 sprintf (buf, ".ctors.%.5u",
17896 /* Invert the numbering so the linker puts us in the proper
17897 order; constructors are run from right to left, and the
17898 linker sorts in increasing order. */
17899 MAX_INIT_PRIORITY - priority);
17903 named_section_flags (section, SECTION_WRITE);
17904 assemble_align (POINTER_SIZE);
17906 if (TARGET_RELOCATABLE)
17908 fputs ("\t.long (", asm_out_file);
17909 output_addr_const (asm_out_file, symbol);
17910 fputs (")@fixup\n", asm_out_file);
17913 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17917 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17919 const char *section = ".dtors";
17922 if (priority != DEFAULT_INIT_PRIORITY)
17924 sprintf (buf, ".dtors.%.5u",
17925 /* Invert the numbering so the linker puts us in the proper
17926 order; constructors are run from right to left, and the
17927 linker sorts in increasing order. */
17928 MAX_INIT_PRIORITY - priority);
17932 named_section_flags (section, SECTION_WRITE);
17933 assemble_align (POINTER_SIZE);
17935 if (TARGET_RELOCATABLE)
17937 fputs ("\t.long (", asm_out_file);
17938 output_addr_const (asm_out_file, symbol);
17939 fputs (")@fixup\n", asm_out_file);
17942 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17946 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17950 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17951 ASM_OUTPUT_LABEL (file, name);
17952 fputs (DOUBLE_INT_ASM_OP, file);
17953 rs6000_output_function_entry (file, name);
17954 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17957 fputs ("\t.size\t", file);
17958 assemble_name (file, name);
17959 fputs (",24\n\t.type\t.", file);
17960 assemble_name (file, name);
17961 fputs (",@function\n", file);
17962 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17964 fputs ("\t.globl\t.", file);
17965 assemble_name (file, name);
17970 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17971 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17972 rs6000_output_function_entry (file, name);
17973 fputs (":\n", file);
17977 if (TARGET_RELOCATABLE
17978 && !TARGET_SECURE_PLT
17979 && (get_pool_size () != 0 || current_function_profile)
17984 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17986 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17987 fprintf (file, "\t.long ");
17988 assemble_name (file, buf);
17990 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17991 assemble_name (file, buf);
17995 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17996 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17998 if (DEFAULT_ABI == ABI_AIX)
18000 const char *desc_name, *orig_name;
18002 orig_name = (*targetm.strip_name_encoding) (name);
18003 desc_name = orig_name;
18004 while (*desc_name == '.')
18007 if (TREE_PUBLIC (decl))
18008 fprintf (file, "\t.globl %s\n", desc_name);
18010 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18011 fprintf (file, "%s:\n", desc_name);
18012 fprintf (file, "\t.long %s\n", orig_name);
18013 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18014 if (DEFAULT_ABI == ABI_AIX)
18015 fputs ("\t.long 0\n", file);
18016 fprintf (file, "\t.previous\n");
18018 ASM_OUTPUT_LABEL (file, name);
18022 rs6000_elf_end_indicate_exec_stack (void)
18025 file_end_indicate_exec_stack ();
18031 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18033 fputs (GLOBAL_ASM_OP, stream);
18034 RS6000_OUTPUT_BASENAME (stream, name);
18035 putc ('\n', stream);
18039 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18040 tree decl ATTRIBUTE_UNUSED)
18043 static const char * const suffix[3] = { "PR", "RO", "RW" };
18045 if (flags & SECTION_CODE)
18047 else if (flags & SECTION_WRITE)
18052 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18053 (flags & SECTION_CODE) ? "." : "",
18054 name, suffix[smclass], flags & SECTION_ENTSIZE);
18058 rs6000_xcoff_select_section (tree decl, int reloc,
18059 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18061 if (decl_readonly_section_1 (decl, reloc, 1))
18063 if (TREE_PUBLIC (decl))
18064 read_only_data_section ();
18066 read_only_private_data_section ();
18070 if (TREE_PUBLIC (decl))
18073 private_data_section ();
18078 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18082 /* Use select_section for private and uninitialized data. */
18083 if (!TREE_PUBLIC (decl)
18084 || DECL_COMMON (decl)
18085 || DECL_INITIAL (decl) == NULL_TREE
18086 || DECL_INITIAL (decl) == error_mark_node
18087 || (flag_zero_initialized_in_bss
18088 && initializer_zerop (DECL_INITIAL (decl))))
18091 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18092 name = (*targetm.strip_name_encoding) (name);
18093 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18096 /* Select section for constant in constant pool.
18098 On RS/6000, all constants are in the private read-only data area.
18099 However, if this is being placed in the TOC it must be output as a
18103 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18104 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18106 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18109 read_only_private_data_section ();
18112 /* Remove any trailing [DS] or the like from the symbol name. */
18114 static const char *
18115 rs6000_xcoff_strip_name_encoding (const char *name)
18120 len = strlen (name);
18121 if (name[len - 1] == ']')
18122 return ggc_alloc_string (name, len - 4);
18127 /* Section attributes. AIX is always PIC. */
18129 static unsigned int
18130 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18132 unsigned int align;
18133 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18135 /* Align to at least UNIT size. */
18136 if (flags & SECTION_CODE)
18137 align = MIN_UNITS_PER_WORD;
18139 /* Increase alignment of large objects if not already stricter. */
18140 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18141 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18142 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18144 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18147 /* Output at beginning of assembler file.
18149 Initialize the section names for the RS/6000 at this point.
18151 Specify filename, including full path, to assembler.
18153 We want to go into the TOC section so at least one .toc will be emitted.
18154 Also, in order to output proper .bs/.es pairs, we need at least one static
18155 [RW] section emitted.
18157 Finally, declare mcount when profiling to make the assembler happy. */
18160 rs6000_xcoff_file_start (void)
18162 rs6000_gen_section_name (&xcoff_bss_section_name,
18163 main_input_filename, ".bss_");
18164 rs6000_gen_section_name (&xcoff_private_data_section_name,
18165 main_input_filename, ".rw_");
18166 rs6000_gen_section_name (&xcoff_read_only_section_name,
18167 main_input_filename, ".ro_");
18169 fputs ("\t.file\t", asm_out_file);
18170 output_quoted_string (asm_out_file, main_input_filename);
18171 fputc ('\n', asm_out_file);
18172 if (write_symbols != NO_DEBUG)
18173 private_data_section ();
18176 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18177 rs6000_file_start ();
18180 /* Output at end of assembler file.
18181 On the RS/6000, referencing data should automatically pull in text. */
18184 rs6000_xcoff_file_end (void)
18187 fputs ("_section_.text:\n", asm_out_file);
18189 fputs (TARGET_32BIT
18190 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18193 #endif /* TARGET_XCOFF */
18195 /* Compute a (partial) cost for rtx X. Return true if the complete
18196 cost has been computed, and false if subexpressions should be
18197 scanned. In either case, *TOTAL contains the cost result. */
18200 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18202 enum machine_mode mode = GET_MODE (x);
18206 /* On the RS/6000, if it is valid in the insn, it is free. */
18208 if (((outer_code == SET
18209 || outer_code == PLUS
18210 || outer_code == MINUS)
18211 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18212 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
18213 || (outer_code == AND
18214 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18215 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18216 mode == SImode ? 'L' : 'J'))
18217 || mask_operand (x, mode)
18219 && mask64_operand (x, DImode))))
18220 || ((outer_code == IOR || outer_code == XOR)
18221 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18222 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18223 mode == SImode ? 'L' : 'J'))))
18224 || outer_code == ASHIFT
18225 || outer_code == ASHIFTRT
18226 || outer_code == LSHIFTRT
18227 || outer_code == ROTATE
18228 || outer_code == ROTATERT
18229 || outer_code == ZERO_EXTRACT
18230 || (outer_code == MULT
18231 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18232 || ((outer_code == DIV || outer_code == UDIV
18233 || outer_code == MOD || outer_code == UMOD)
18234 && exact_log2 (INTVAL (x)) >= 0)
18235 || (outer_code == COMPARE
18236 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18237 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
18238 || (outer_code == EQ
18239 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18240 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18241 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18242 mode == SImode ? 'L' : 'J'))))
18243 || (outer_code == GTU
18244 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18245 || (outer_code == LTU
18246 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
18251 else if ((outer_code == PLUS
18252 && reg_or_add_cint_operand (x, VOIDmode))
18253 || (outer_code == MINUS
18254 && reg_or_sub_cint_operand (x, VOIDmode))
18255 || ((outer_code == SET
18256 || outer_code == IOR
18257 || outer_code == XOR)
18259 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18261 *total = COSTS_N_INSNS (1);
18268 && ((outer_code == AND
18269 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18270 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
18271 || mask_operand (x, DImode)
18272 || mask64_operand (x, DImode)))
18273 || ((outer_code == IOR || outer_code == XOR)
18274 && CONST_DOUBLE_HIGH (x) == 0
18275 && (CONST_DOUBLE_LOW (x)
18276 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
18281 else if (mode == DImode
18282 && (outer_code == SET
18283 || outer_code == IOR
18284 || outer_code == XOR)
18285 && CONST_DOUBLE_HIGH (x) == 0)
18287 *total = COSTS_N_INSNS (1);
18296 /* When optimizing for size, MEM should be slightly more expensive
18297 than generating address, e.g., (plus (reg) (const)).
18298 L1 cache latency is about two instructions. */
18299 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18307 if (mode == DFmode)
18309 if (GET_CODE (XEXP (x, 0)) == MULT)
18311 /* FNMA accounted in outer NEG. */
18312 if (outer_code == NEG)
18313 *total = rs6000_cost->dmul - rs6000_cost->fp;
18315 *total = rs6000_cost->dmul;
18318 *total = rs6000_cost->fp;
18320 else if (mode == SFmode)
18322 /* FNMA accounted in outer NEG. */
18323 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18326 *total = rs6000_cost->fp;
18329 *total = COSTS_N_INSNS (1);
18333 if (mode == DFmode)
18335 if (GET_CODE (XEXP (x, 0)) == MULT)
18337 /* FNMA accounted in outer NEG. */
18338 if (outer_code == NEG)
18341 *total = rs6000_cost->dmul;
18344 *total = rs6000_cost->fp;
18346 else if (mode == SFmode)
18348 /* FNMA accounted in outer NEG. */
18349 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18352 *total = rs6000_cost->fp;
18355 *total = COSTS_N_INSNS (1);
18359 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18360 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
18362 if (INTVAL (XEXP (x, 1)) >= -256
18363 && INTVAL (XEXP (x, 1)) <= 255)
18364 *total = rs6000_cost->mulsi_const9;
18366 *total = rs6000_cost->mulsi_const;
18368 /* FMA accounted in outer PLUS/MINUS. */
18369 else if ((mode == DFmode || mode == SFmode)
18370 && (outer_code == PLUS || outer_code == MINUS))
18372 else if (mode == DFmode)
18373 *total = rs6000_cost->dmul;
18374 else if (mode == SFmode)
18375 *total = rs6000_cost->fp;
18376 else if (mode == DImode)
18377 *total = rs6000_cost->muldi;
18379 *total = rs6000_cost->mulsi;
18384 if (FLOAT_MODE_P (mode))
18386 *total = mode == DFmode ? rs6000_cost->ddiv
18387 : rs6000_cost->sdiv;
18394 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18395 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18397 if (code == DIV || code == MOD)
18399 *total = COSTS_N_INSNS (2);
18402 *total = COSTS_N_INSNS (1);
18406 if (GET_MODE (XEXP (x, 1)) == DImode)
18407 *total = rs6000_cost->divdi;
18409 *total = rs6000_cost->divsi;
18411 /* Add in shift and subtract for MOD. */
18412 if (code == MOD || code == UMOD)
18413 *total += COSTS_N_INSNS (2);
18417 *total = COSTS_N_INSNS (4);
18421 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18432 *total = COSTS_N_INSNS (1);
18440 /* Handle mul_highpart. */
18441 if (outer_code == TRUNCATE
18442 && GET_CODE (XEXP (x, 0)) == MULT)
18444 if (mode == DImode)
18445 *total = rs6000_cost->muldi;
18447 *total = rs6000_cost->mulsi;
18450 else if (outer_code == AND)
18453 *total = COSTS_N_INSNS (1);
18458 if (GET_CODE (XEXP (x, 0)) == MEM)
18461 *total = COSTS_N_INSNS (1);
18467 if (!FLOAT_MODE_P (mode))
18469 *total = COSTS_N_INSNS (1);
18475 case UNSIGNED_FLOAT:
18478 case FLOAT_TRUNCATE:
18479 *total = rs6000_cost->fp;
18483 if (mode == DFmode)
18486 *total = rs6000_cost->fp;
18490 switch (XINT (x, 1))
18493 *total = rs6000_cost->fp;
18505 *total = COSTS_N_INSNS (1);
18508 else if (FLOAT_MODE_P (mode)
18509 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18511 *total = rs6000_cost->fp;
18519 /* Carry bit requires mode == Pmode.
18520 NEG or PLUS already counted so only add one. */
18522 && (outer_code == NEG || outer_code == PLUS))
18524 *total = COSTS_N_INSNS (1);
18527 if (outer_code == SET)
18529 if (XEXP (x, 1) == const0_rtx)
18531 *total = COSTS_N_INSNS (2);
18534 else if (mode == Pmode)
18536 *total = COSTS_N_INSNS (3);
18545 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
18547 *total = COSTS_N_INSNS (2);
18551 if (outer_code == COMPARE)
18565 /* A C expression returning the cost of moving data from a register of class
18566 CLASS1 to one of CLASS2. */
18569 rs6000_register_move_cost (enum machine_mode mode,
18570 enum reg_class from, enum reg_class to)
18572 /* Moves from/to GENERAL_REGS. */
18573 if (reg_classes_intersect_p (to, GENERAL_REGS)
18574 || reg_classes_intersect_p (from, GENERAL_REGS))
18576 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18579 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18580 return (rs6000_memory_move_cost (mode, from, 0)
18581 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18583 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18585 else if (from == CR_REGS)
18589 /* A move will cost one instruction per GPR moved. */
18590 return 2 * hard_regno_nregs[0][mode];
18593 /* Moving between two similar registers is just one instruction. */
18594 else if (reg_classes_intersect_p (to, from))
18595 return mode == TFmode ? 4 : 2;
18597 /* Everything else has to go through GENERAL_REGS. */
18599 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
18600 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18603 /* A C expressions returning the cost of moving data of MODE from a register to
18607 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
18608 int in ATTRIBUTE_UNUSED)
18610 if (reg_classes_intersect_p (class, GENERAL_REGS))
18611 return 4 * hard_regno_nregs[0][mode];
18612 else if (reg_classes_intersect_p (class, FLOAT_REGS))
18613 return 4 * hard_regno_nregs[32][mode];
18614 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
18615 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
18617 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18620 /* Newton-Raphson approximation of single-precision floating point divide n/d.
18621 Assumes no trapping math and finite arguments. */
18624 rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
18626 rtx x0, e0, e1, y1, u0, v0, one;
18628 x0 = gen_reg_rtx (SFmode);
18629 e0 = gen_reg_rtx (SFmode);
18630 e1 = gen_reg_rtx (SFmode);
18631 y1 = gen_reg_rtx (SFmode);
18632 u0 = gen_reg_rtx (SFmode);
18633 v0 = gen_reg_rtx (SFmode);
18634 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
18636 /* x0 = 1./d estimate */
18637 emit_insn (gen_rtx_SET (VOIDmode, x0,
18638 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
18640 /* e0 = 1. - d * x0 */
18641 emit_insn (gen_rtx_SET (VOIDmode, e0,
18642 gen_rtx_MINUS (SFmode, one,
18643 gen_rtx_MULT (SFmode, d, x0))));
18644 /* e1 = e0 + e0 * e0 */
18645 emit_insn (gen_rtx_SET (VOIDmode, e1,
18646 gen_rtx_PLUS (SFmode,
18647 gen_rtx_MULT (SFmode, e0, e0), e0)));
18648 /* y1 = x0 + e1 * x0 */
18649 emit_insn (gen_rtx_SET (VOIDmode, y1,
18650 gen_rtx_PLUS (SFmode,
18651 gen_rtx_MULT (SFmode, e1, x0), x0)));
18653 emit_insn (gen_rtx_SET (VOIDmode, u0,
18654 gen_rtx_MULT (SFmode, n, y1)));
18655 /* v0 = n - d * u0 */
18656 emit_insn (gen_rtx_SET (VOIDmode, v0,
18657 gen_rtx_MINUS (SFmode, n,
18658 gen_rtx_MULT (SFmode, d, u0))));
18659 /* res = u0 + v0 * y1 */
18660 emit_insn (gen_rtx_SET (VOIDmode, res,
18661 gen_rtx_PLUS (SFmode,
18662 gen_rtx_MULT (SFmode, v0, y1), u0)));
18665 /* Newton-Raphson approximation of double-precision floating point divide n/d.
18666 Assumes no trapping math and finite arguments. */
18669 rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18671 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18673 x0 = gen_reg_rtx (DFmode);
18674 e0 = gen_reg_rtx (DFmode);
18675 e1 = gen_reg_rtx (DFmode);
18676 e2 = gen_reg_rtx (DFmode);
18677 y1 = gen_reg_rtx (DFmode);
18678 y2 = gen_reg_rtx (DFmode);
18679 y3 = gen_reg_rtx (DFmode);
18680 u0 = gen_reg_rtx (DFmode);
18681 v0 = gen_reg_rtx (DFmode);
18682 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18684 /* x0 = 1./d estimate */
18685 emit_insn (gen_rtx_SET (VOIDmode, x0,
18686 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18688 /* e0 = 1. - d * x0 */
18689 emit_insn (gen_rtx_SET (VOIDmode, e0,
18690 gen_rtx_MINUS (DFmode, one,
18691 gen_rtx_MULT (SFmode, d, x0))));
18692 /* y1 = x0 + e0 * x0 */
18693 emit_insn (gen_rtx_SET (VOIDmode, y1,
18694 gen_rtx_PLUS (DFmode,
18695 gen_rtx_MULT (DFmode, e0, x0), x0)));
18697 emit_insn (gen_rtx_SET (VOIDmode, e1,
18698 gen_rtx_MULT (DFmode, e0, e0)));
18699 /* y2 = y1 + e1 * y1 */
18700 emit_insn (gen_rtx_SET (VOIDmode, y2,
18701 gen_rtx_PLUS (DFmode,
18702 gen_rtx_MULT (DFmode, e1, y1), y1)));
18704 emit_insn (gen_rtx_SET (VOIDmode, e2,
18705 gen_rtx_MULT (DFmode, e1, e1)));
18706 /* y3 = y2 + e2 * y2 */
18707 emit_insn (gen_rtx_SET (VOIDmode, y3,
18708 gen_rtx_PLUS (DFmode,
18709 gen_rtx_MULT (DFmode, e2, y2), y2)));
18711 emit_insn (gen_rtx_SET (VOIDmode, u0,
18712 gen_rtx_MULT (DFmode, n, y3)));
18713 /* v0 = n - d * u0 */
18714 emit_insn (gen_rtx_SET (VOIDmode, v0,
18715 gen_rtx_MINUS (DFmode, n,
18716 gen_rtx_MULT (DFmode, d, u0))));
18717 /* res = u0 + v0 * y3 */
18718 emit_insn (gen_rtx_SET (VOIDmode, res,
18719 gen_rtx_PLUS (DFmode,
18720 gen_rtx_MULT (DFmode, v0, y3), u0)));
18723 /* Return an RTX representing where to find the function value of a
18724 function returning MODE. */
18726 rs6000_complex_function_value (enum machine_mode mode)
18728 unsigned int regno;
18730 enum machine_mode inner = GET_MODE_INNER (mode);
18731 unsigned int inner_bytes = GET_MODE_SIZE (inner);
18733 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18734 regno = FP_ARG_RETURN;
18737 regno = GP_ARG_RETURN;
18739 /* 32-bit is OK since it'll go in r3/r4. */
18740 if (TARGET_32BIT && inner_bytes >= 4)
18741 return gen_rtx_REG (mode, regno);
18744 if (inner_bytes >= 8)
18745 return gen_rtx_REG (mode, regno);
18747 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18749 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
18750 GEN_INT (inner_bytes));
18751 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18754 /* Define how to find the value returned by a function.
18755 VALTYPE is the data type of the value (as a tree).
18756 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18757 otherwise, FUNC is 0.
18759 On the SPE, both FPs and vectors are returned in r3.
18761 On RS/6000 an integer value is in r3 and a floating-point value is in
18762 fp1, unless -msoft-float. */
18765 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18767 enum machine_mode mode;
18768 unsigned int regno;
18770 /* Special handling for structs in darwin64. */
18771 if (rs6000_darwin64_abi
18772 && TYPE_MODE (valtype) == BLKmode
18773 && TREE_CODE (valtype) == RECORD_TYPE
18774 && int_size_in_bytes (valtype) > 0)
18776 CUMULATIVE_ARGS valcum;
18780 valcum.fregno = FP_ARG_MIN_REG;
18781 valcum.vregno = ALTIVEC_ARG_MIN_REG;
18782 /* Do a trial code generation as if this were going to be passed as
18783 an argument; if any part goes in memory, we return NULL. */
18784 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
18787 /* Otherwise fall through to standard ABI rules. */
18790 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18792 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18793 return gen_rtx_PARALLEL (DImode,
18795 gen_rtx_EXPR_LIST (VOIDmode,
18796 gen_rtx_REG (SImode, GP_ARG_RETURN),
18798 gen_rtx_EXPR_LIST (VOIDmode,
18799 gen_rtx_REG (SImode,
18800 GP_ARG_RETURN + 1),
18803 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
18805 return gen_rtx_PARALLEL (DCmode,
18807 gen_rtx_EXPR_LIST (VOIDmode,
18808 gen_rtx_REG (SImode, GP_ARG_RETURN),
18810 gen_rtx_EXPR_LIST (VOIDmode,
18811 gen_rtx_REG (SImode,
18812 GP_ARG_RETURN + 1),
18814 gen_rtx_EXPR_LIST (VOIDmode,
18815 gen_rtx_REG (SImode,
18816 GP_ARG_RETURN + 2),
18818 gen_rtx_EXPR_LIST (VOIDmode,
18819 gen_rtx_REG (SImode,
18820 GP_ARG_RETURN + 3),
18823 if ((INTEGRAL_TYPE_P (valtype)
18824 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18825 || POINTER_TYPE_P (valtype))
18826 mode = TARGET_32BIT ? SImode : DImode;
18828 mode = TYPE_MODE (valtype);
18830 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
18831 regno = FP_ARG_RETURN;
18832 else if (TREE_CODE (valtype) == COMPLEX_TYPE
18833 && targetm.calls.split_complex_arg)
18834 return rs6000_complex_function_value (mode);
18835 else if (TREE_CODE (valtype) == VECTOR_TYPE
18836 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
18837 && ALTIVEC_VECTOR_MODE (mode))
18838 regno = ALTIVEC_ARG_RETURN;
18839 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18840 && (mode == DFmode || mode == DCmode))
18841 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18843 regno = GP_ARG_RETURN;
18845 return gen_rtx_REG (mode, regno);
18848 /* Define how to find the value returned by a library function
18849 assuming the value has mode MODE. */
18851 rs6000_libcall_value (enum machine_mode mode)
18853 unsigned int regno;
18855 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18857 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18858 return gen_rtx_PARALLEL (DImode,
18860 gen_rtx_EXPR_LIST (VOIDmode,
18861 gen_rtx_REG (SImode, GP_ARG_RETURN),
18863 gen_rtx_EXPR_LIST (VOIDmode,
18864 gen_rtx_REG (SImode,
18865 GP_ARG_RETURN + 1),
18869 if (SCALAR_FLOAT_MODE_P (mode)
18870 && TARGET_HARD_FLOAT && TARGET_FPRS)
18871 regno = FP_ARG_RETURN;
18872 else if (ALTIVEC_VECTOR_MODE (mode)
18873 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
18874 regno = ALTIVEC_ARG_RETURN;
18875 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
18876 return rs6000_complex_function_value (mode);
18877 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18878 && (mode == DFmode || mode == DCmode))
18879 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18881 regno = GP_ARG_RETURN;
18883 return gen_rtx_REG (mode, regno);
18886 /* Define the offset between two registers, FROM to be eliminated and its
18887 replacement TO, at the start of a routine. */
18889 rs6000_initial_elimination_offset (int from, int to)
18891 rs6000_stack_t *info = rs6000_stack_info ();
18892 HOST_WIDE_INT offset;
18894 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18895 offset = info->push_p ? 0 : -info->total_size;
18896 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18898 offset = info->push_p ? 0 : -info->total_size;
18899 if (FRAME_GROWS_DOWNWARD)
18900 offset += info->fixed_size + info->vars_size + info->parm_size;
18902 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18903 offset = FRAME_GROWS_DOWNWARD
18904 ? info->fixed_size + info->vars_size + info->parm_size
18906 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18907 offset = info->total_size;
18908 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18909 offset = info->push_p ? info->total_size : 0;
18910 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18913 gcc_unreachable ();
18918 /* Return true if TYPE is a SPE or AltiVec opaque type. */
18921 rs6000_is_opaque_type (tree type)
18923 return (type == opaque_V2SI_type_node
18924 || type == opaque_V2SF_type_node
18925 || type == opaque_p_V2SI_type_node
18926 || type == opaque_V4SI_type_node);
18930 rs6000_dwarf_register_span (rtx reg)
18935 && (SPE_VECTOR_MODE (GET_MODE (reg))
18936 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18941 regno = REGNO (reg);
18943 /* The duality of the SPE register size wreaks all kinds of havoc.
18944 This is a way of distinguishing r0 in 32-bits from r0 in
18947 gen_rtx_PARALLEL (VOIDmode,
18950 gen_rtx_REG (SImode, regno + 1200),
18951 gen_rtx_REG (SImode, regno))
18953 gen_rtx_REG (SImode, regno),
18954 gen_rtx_REG (SImode, regno + 1200)));
18957 /* Map internal gcc register numbers to DWARF2 register numbers. */
18960 rs6000_dbx_register_number (unsigned int regno)
18962 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18964 if (regno == MQ_REGNO)
18966 if (regno == LINK_REGISTER_REGNUM)
18968 if (regno == COUNT_REGISTER_REGNUM)
18970 if (CR_REGNO_P (regno))
18971 return regno - CR0_REGNO + 86;
18972 if (regno == XER_REGNO)
18974 if (ALTIVEC_REGNO_P (regno))
18975 return regno - FIRST_ALTIVEC_REGNO + 1124;
18976 if (regno == VRSAVE_REGNO)
18978 if (regno == VSCR_REGNO)
18980 if (regno == SPE_ACC_REGNO)
18982 if (regno == SPEFSCR_REGNO)
18984 /* SPE high reg number. We get these values of regno from
18985 rs6000_dwarf_register_span. */
18986 gcc_assert (regno >= 1200 && regno < 1232);
18990 /* target hook eh_return_filter_mode */
18991 static enum machine_mode
18992 rs6000_eh_return_filter_mode (void)
18994 return TARGET_32BIT ? SImode : word_mode;
18997 /* Target hook for vector_mode_supported_p. */
18999 rs6000_vector_mode_supported_p (enum machine_mode mode)
19002 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19005 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19012 /* Target hook for invalid_arg_for_unprototyped_fn. */
19013 static const char *
19014 invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19016 return (!rs6000_darwin64_abi
19018 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19019 && (funcdecl == NULL_TREE
19020 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19021 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19022 ? N_("AltiVec argument passed to unprototyped function")
19026 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19027 setup by using __stack_chk_fail_local hidden function instead of
19028 calling __stack_chk_fail directly. Otherwise it is better to call
19029 __stack_chk_fail directly. */
19032 rs6000_stack_protect_fail (void)
19034 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19035 ? default_hidden_stack_protect_fail ()
19036 : default_external_stack_protect_fail ();
19039 #include "gt-rs6000.h"