1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
58 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61 #include "gstab.h" /* for N_SLINE */
64 #ifndef TARGET_NO_PROTOTYPE
65 #define TARGET_NO_PROTOTYPE 0
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 int world_save_p; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Always emit branch hint bits. */
128 static GTY(()) bool rs6000_always_hint;
130 /* Schedule instructions for group formation. */
131 static GTY(()) bool rs6000_sched_groups;
133 /* Support for -msched-costly-dep option. */
134 const char *rs6000_sched_costly_dep_str;
135 enum rs6000_dependence_cost rs6000_sched_costly_dep;
137 /* Support for -minsert-sched-nops option. */
138 const char *rs6000_sched_insert_nops_str;
139 enum rs6000_nop_insertion rs6000_sched_insert_nops;
141 /* Support targetm.vectorize.builtin_mask_for_load. */
142 static GTY(()) tree altivec_builtin_mask_for_load;
144 /* Size of long double */
145 int rs6000_long_double_type_size;
147 /* Whether -mabi=altivec has appeared */
148 int rs6000_altivec_abi;
150 /* Nonzero if we want SPE ABI extensions. */
153 /* Nonzero if floating point operations are done in the GPRs. */
154 int rs6000_float_gprs = 0;
156 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
157 int rs6000_darwin64_abi;
159 /* Set to nonzero once AIX common-mode calls have been defined. */
160 static GTY(()) int common_mode_defined;
162 /* Save information from a "cmpxx" operation until the branch or scc is
164 rtx rs6000_compare_op0, rs6000_compare_op1;
165 int rs6000_compare_fp_p;
167 /* Label number of label created for -mrelocatable, to call to so we can
168 get the address of the GOT section */
169 int rs6000_pic_labelno;
172 /* Which abi to adhere to */
173 const char *rs6000_abi_name;
175 /* Semantics of the small data area */
176 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
178 /* Which small data model to use */
179 const char *rs6000_sdata_name = (char *)0;
181 /* Counter for labels which are to be placed in .fixup. */
182 int fixuplabelno = 0;
185 /* Bit size of immediate TLS offsets and string from which it is decoded. */
186 int rs6000_tls_size = 32;
187 const char *rs6000_tls_size_string;
189 /* ABI enumeration available for subtarget to use. */
190 enum rs6000_abi rs6000_current_abi;
192 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
196 const char *rs6000_debug_name;
197 int rs6000_debug_stack; /* debug stack applications */
198 int rs6000_debug_arg; /* debug argument handling */
200 /* Value is TRUE if register/mode pair is acceptable. */
201 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
203 /* Built in types. */
205 tree rs6000_builtin_types[RS6000_BTI_MAX];
206 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
208 const char *rs6000_traceback_name;
210 traceback_default = 0,
216 /* Flag to say the TOC is initialized */
218 char toc_label_name[10];
220 /* Alias set for saves and restores from the rs6000 stack. */
221 static GTY(()) int rs6000_sr_alias_set;
223 /* Control alignment for fields within structures. */
224 /* String from -malign-XXXXX. */
225 int rs6000_alignment_flags;
227 /* True for any options that were explicitly set. */
229 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
230 bool alignment; /* True if -malign- was used. */
231 bool abi; /* True if -mabi= was used. */
232 bool spe; /* True if -mspe= was used. */
233 bool float_gprs; /* True if -mfloat-gprs= was used. */
234 bool isel; /* True if -misel was used. */
235 bool long_double; /* True if -mlong-double- was used. */
236 } rs6000_explicit_options;
238 struct builtin_description
240 /* mask is not const because we're going to alter it below. This
241 nonsense will go away when we rewrite the -march infrastructure
242 to give us more target flag bits. */
244 const enum insn_code icode;
245 const char *const name;
246 const enum rs6000_builtins code;
249 /* Target cpu costs. */
251 struct processor_costs {
252 const int mulsi; /* cost of SImode multiplication. */
253 const int mulsi_const; /* cost of SImode multiplication by constant. */
254 const int mulsi_const9; /* cost of SImode mult by short constant. */
255 const int muldi; /* cost of DImode multiplication. */
256 const int divsi; /* cost of SImode division. */
257 const int divdi; /* cost of DImode division. */
258 const int fp; /* cost of simple SFmode and DFmode insns. */
259 const int dmul; /* cost of DFmode multiplication (and fmadd). */
260 const int sdiv; /* cost of SFmode division (fdivs). */
261 const int ddiv; /* cost of DFmode division (fdiv). */
264 const struct processor_costs *rs6000_cost;
266 /* Processor costs (relative to an add) */
268 /* Instruction size costs on 32bit processors. */
270 struct processor_costs size32_cost = {
271 COSTS_N_INSNS (1), /* mulsi */
272 COSTS_N_INSNS (1), /* mulsi_const */
273 COSTS_N_INSNS (1), /* mulsi_const9 */
274 COSTS_N_INSNS (1), /* muldi */
275 COSTS_N_INSNS (1), /* divsi */
276 COSTS_N_INSNS (1), /* divdi */
277 COSTS_N_INSNS (1), /* fp */
278 COSTS_N_INSNS (1), /* dmul */
279 COSTS_N_INSNS (1), /* sdiv */
280 COSTS_N_INSNS (1), /* ddiv */
283 /* Instruction size costs on 64bit processors. */
285 struct processor_costs size64_cost = {
286 COSTS_N_INSNS (1), /* mulsi */
287 COSTS_N_INSNS (1), /* mulsi_const */
288 COSTS_N_INSNS (1), /* mulsi_const9 */
289 COSTS_N_INSNS (1), /* muldi */
290 COSTS_N_INSNS (1), /* divsi */
291 COSTS_N_INSNS (1), /* divdi */
292 COSTS_N_INSNS (1), /* fp */
293 COSTS_N_INSNS (1), /* dmul */
294 COSTS_N_INSNS (1), /* sdiv */
295 COSTS_N_INSNS (1), /* ddiv */
298 /* Instruction costs on RIOS1 processors. */
300 struct processor_costs rios1_cost = {
301 COSTS_N_INSNS (5), /* mulsi */
302 COSTS_N_INSNS (4), /* mulsi_const */
303 COSTS_N_INSNS (3), /* mulsi_const9 */
304 COSTS_N_INSNS (5), /* muldi */
305 COSTS_N_INSNS (19), /* divsi */
306 COSTS_N_INSNS (19), /* divdi */
307 COSTS_N_INSNS (2), /* fp */
308 COSTS_N_INSNS (2), /* dmul */
309 COSTS_N_INSNS (19), /* sdiv */
310 COSTS_N_INSNS (19), /* ddiv */
313 /* Instruction costs on RIOS2 processors. */
315 struct processor_costs rios2_cost = {
316 COSTS_N_INSNS (2), /* mulsi */
317 COSTS_N_INSNS (2), /* mulsi_const */
318 COSTS_N_INSNS (2), /* mulsi_const9 */
319 COSTS_N_INSNS (2), /* muldi */
320 COSTS_N_INSNS (13), /* divsi */
321 COSTS_N_INSNS (13), /* divdi */
322 COSTS_N_INSNS (2), /* fp */
323 COSTS_N_INSNS (2), /* dmul */
324 COSTS_N_INSNS (17), /* sdiv */
325 COSTS_N_INSNS (17), /* ddiv */
328 /* Instruction costs on RS64A processors. */
330 struct processor_costs rs64a_cost = {
331 COSTS_N_INSNS (20), /* mulsi */
332 COSTS_N_INSNS (12), /* mulsi_const */
333 COSTS_N_INSNS (8), /* mulsi_const9 */
334 COSTS_N_INSNS (34), /* muldi */
335 COSTS_N_INSNS (65), /* divsi */
336 COSTS_N_INSNS (67), /* divdi */
337 COSTS_N_INSNS (4), /* fp */
338 COSTS_N_INSNS (4), /* dmul */
339 COSTS_N_INSNS (31), /* sdiv */
340 COSTS_N_INSNS (31), /* ddiv */
343 /* Instruction costs on MPCCORE processors. */
345 struct processor_costs mpccore_cost = {
346 COSTS_N_INSNS (2), /* mulsi */
347 COSTS_N_INSNS (2), /* mulsi_const */
348 COSTS_N_INSNS (2), /* mulsi_const9 */
349 COSTS_N_INSNS (2), /* muldi */
350 COSTS_N_INSNS (6), /* divsi */
351 COSTS_N_INSNS (6), /* divdi */
352 COSTS_N_INSNS (4), /* fp */
353 COSTS_N_INSNS (5), /* dmul */
354 COSTS_N_INSNS (10), /* sdiv */
355 COSTS_N_INSNS (17), /* ddiv */
358 /* Instruction costs on PPC403 processors. */
360 struct processor_costs ppc403_cost = {
361 COSTS_N_INSNS (4), /* mulsi */
362 COSTS_N_INSNS (4), /* mulsi_const */
363 COSTS_N_INSNS (4), /* mulsi_const9 */
364 COSTS_N_INSNS (4), /* muldi */
365 COSTS_N_INSNS (33), /* divsi */
366 COSTS_N_INSNS (33), /* divdi */
367 COSTS_N_INSNS (11), /* fp */
368 COSTS_N_INSNS (11), /* dmul */
369 COSTS_N_INSNS (11), /* sdiv */
370 COSTS_N_INSNS (11), /* ddiv */
373 /* Instruction costs on PPC405 processors. */
375 struct processor_costs ppc405_cost = {
376 COSTS_N_INSNS (5), /* mulsi */
377 COSTS_N_INSNS (4), /* mulsi_const */
378 COSTS_N_INSNS (3), /* mulsi_const9 */
379 COSTS_N_INSNS (5), /* muldi */
380 COSTS_N_INSNS (35), /* divsi */
381 COSTS_N_INSNS (35), /* divdi */
382 COSTS_N_INSNS (11), /* fp */
383 COSTS_N_INSNS (11), /* dmul */
384 COSTS_N_INSNS (11), /* sdiv */
385 COSTS_N_INSNS (11), /* ddiv */
388 /* Instruction costs on PPC440 processors. */
390 struct processor_costs ppc440_cost = {
391 COSTS_N_INSNS (3), /* mulsi */
392 COSTS_N_INSNS (2), /* mulsi_const */
393 COSTS_N_INSNS (2), /* mulsi_const9 */
394 COSTS_N_INSNS (3), /* muldi */
395 COSTS_N_INSNS (34), /* divsi */
396 COSTS_N_INSNS (34), /* divdi */
397 COSTS_N_INSNS (5), /* fp */
398 COSTS_N_INSNS (5), /* dmul */
399 COSTS_N_INSNS (19), /* sdiv */
400 COSTS_N_INSNS (33), /* ddiv */
403 /* Instruction costs on PPC601 processors. */
405 struct processor_costs ppc601_cost = {
406 COSTS_N_INSNS (5), /* mulsi */
407 COSTS_N_INSNS (5), /* mulsi_const */
408 COSTS_N_INSNS (5), /* mulsi_const9 */
409 COSTS_N_INSNS (5), /* muldi */
410 COSTS_N_INSNS (36), /* divsi */
411 COSTS_N_INSNS (36), /* divdi */
412 COSTS_N_INSNS (4), /* fp */
413 COSTS_N_INSNS (5), /* dmul */
414 COSTS_N_INSNS (17), /* sdiv */
415 COSTS_N_INSNS (31), /* ddiv */
418 /* Instruction costs on PPC603 processors. */
420 struct processor_costs ppc603_cost = {
421 COSTS_N_INSNS (5), /* mulsi */
422 COSTS_N_INSNS (3), /* mulsi_const */
423 COSTS_N_INSNS (2), /* mulsi_const9 */
424 COSTS_N_INSNS (5), /* muldi */
425 COSTS_N_INSNS (37), /* divsi */
426 COSTS_N_INSNS (37), /* divdi */
427 COSTS_N_INSNS (3), /* fp */
428 COSTS_N_INSNS (4), /* dmul */
429 COSTS_N_INSNS (18), /* sdiv */
430 COSTS_N_INSNS (33), /* ddiv */
433 /* Instruction costs on PPC604 processors. */
435 struct processor_costs ppc604_cost = {
436 COSTS_N_INSNS (4), /* mulsi */
437 COSTS_N_INSNS (4), /* mulsi_const */
438 COSTS_N_INSNS (4), /* mulsi_const9 */
439 COSTS_N_INSNS (4), /* muldi */
440 COSTS_N_INSNS (20), /* divsi */
441 COSTS_N_INSNS (20), /* divdi */
442 COSTS_N_INSNS (3), /* fp */
443 COSTS_N_INSNS (3), /* dmul */
444 COSTS_N_INSNS (18), /* sdiv */
445 COSTS_N_INSNS (32), /* ddiv */
448 /* Instruction costs on PPC604e processors. */
450 struct processor_costs ppc604e_cost = {
451 COSTS_N_INSNS (2), /* mulsi */
452 COSTS_N_INSNS (2), /* mulsi_const */
453 COSTS_N_INSNS (2), /* mulsi_const9 */
454 COSTS_N_INSNS (2), /* muldi */
455 COSTS_N_INSNS (20), /* divsi */
456 COSTS_N_INSNS (20), /* divdi */
457 COSTS_N_INSNS (3), /* fp */
458 COSTS_N_INSNS (3), /* dmul */
459 COSTS_N_INSNS (18), /* sdiv */
460 COSTS_N_INSNS (32), /* ddiv */
463 /* Instruction costs on PPC620 processors. */
465 struct processor_costs ppc620_cost = {
466 COSTS_N_INSNS (5), /* mulsi */
467 COSTS_N_INSNS (4), /* mulsi_const */
468 COSTS_N_INSNS (3), /* mulsi_const9 */
469 COSTS_N_INSNS (7), /* muldi */
470 COSTS_N_INSNS (21), /* divsi */
471 COSTS_N_INSNS (37), /* divdi */
472 COSTS_N_INSNS (3), /* fp */
473 COSTS_N_INSNS (3), /* dmul */
474 COSTS_N_INSNS (18), /* sdiv */
475 COSTS_N_INSNS (32), /* ddiv */
478 /* Instruction costs on PPC630 processors. */
480 struct processor_costs ppc630_cost = {
481 COSTS_N_INSNS (5), /* mulsi */
482 COSTS_N_INSNS (4), /* mulsi_const */
483 COSTS_N_INSNS (3), /* mulsi_const9 */
484 COSTS_N_INSNS (7), /* muldi */
485 COSTS_N_INSNS (21), /* divsi */
486 COSTS_N_INSNS (37), /* divdi */
487 COSTS_N_INSNS (3), /* fp */
488 COSTS_N_INSNS (3), /* dmul */
489 COSTS_N_INSNS (17), /* sdiv */
490 COSTS_N_INSNS (21), /* ddiv */
493 /* Instruction costs on PPC750 and PPC7400 processors. */
495 struct processor_costs ppc750_cost = {
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (17), /* divsi */
501 COSTS_N_INSNS (17), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (3), /* dmul */
504 COSTS_N_INSNS (17), /* sdiv */
505 COSTS_N_INSNS (31), /* ddiv */
508 /* Instruction costs on PPC7450 processors. */
510 struct processor_costs ppc7450_cost = {
511 COSTS_N_INSNS (4), /* mulsi */
512 COSTS_N_INSNS (3), /* mulsi_const */
513 COSTS_N_INSNS (3), /* mulsi_const9 */
514 COSTS_N_INSNS (4), /* muldi */
515 COSTS_N_INSNS (23), /* divsi */
516 COSTS_N_INSNS (23), /* divdi */
517 COSTS_N_INSNS (5), /* fp */
518 COSTS_N_INSNS (5), /* dmul */
519 COSTS_N_INSNS (21), /* sdiv */
520 COSTS_N_INSNS (35), /* ddiv */
523 /* Instruction costs on PPC8540 processors. */
525 struct processor_costs ppc8540_cost = {
526 COSTS_N_INSNS (4), /* mulsi */
527 COSTS_N_INSNS (4), /* mulsi_const */
528 COSTS_N_INSNS (4), /* mulsi_const9 */
529 COSTS_N_INSNS (4), /* muldi */
530 COSTS_N_INSNS (19), /* divsi */
531 COSTS_N_INSNS (19), /* divdi */
532 COSTS_N_INSNS (4), /* fp */
533 COSTS_N_INSNS (4), /* dmul */
534 COSTS_N_INSNS (29), /* sdiv */
535 COSTS_N_INSNS (29), /* ddiv */
538 /* Instruction costs on POWER4 and POWER5 processors. */
540 struct processor_costs power4_cost = {
541 COSTS_N_INSNS (3), /* mulsi */
542 COSTS_N_INSNS (2), /* mulsi_const */
543 COSTS_N_INSNS (2), /* mulsi_const9 */
544 COSTS_N_INSNS (4), /* muldi */
545 COSTS_N_INSNS (18), /* divsi */
546 COSTS_N_INSNS (34), /* divdi */
547 COSTS_N_INSNS (3), /* fp */
548 COSTS_N_INSNS (3), /* dmul */
549 COSTS_N_INSNS (17), /* sdiv */
550 COSTS_N_INSNS (17), /* ddiv */
554 static bool rs6000_function_ok_for_sibcall (tree, tree);
555 static const char *rs6000_invalid_within_doloop (rtx);
556 static rtx rs6000_generate_compare (enum rtx_code);
557 static void rs6000_maybe_dead (rtx);
558 static void rs6000_emit_stack_tie (void);
559 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
560 static rtx spe_synthesize_frame_save (rtx);
561 static bool spe_func_has_64bit_regs_p (void);
562 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
564 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
565 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
566 static unsigned rs6000_hash_constant (rtx);
567 static unsigned toc_hash_function (const void *);
568 static int toc_hash_eq (const void *, const void *);
569 static int constant_pool_expr_1 (rtx, int *, int *);
570 static bool constant_pool_expr_p (rtx);
571 static bool legitimate_small_data_p (enum machine_mode, rtx);
572 static bool legitimate_indexed_address_p (rtx, int);
573 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
574 static struct machine_function * rs6000_init_machine_status (void);
575 static bool rs6000_assemble_integer (rtx, unsigned int, int);
576 static bool no_global_regs_above (int);
577 #ifdef HAVE_GAS_HIDDEN
578 static void rs6000_assemble_visibility (tree, int);
580 static int rs6000_ra_ever_killed (void);
581 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
582 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
583 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
584 static const char *rs6000_mangle_fundamental_type (tree);
585 extern const struct attribute_spec rs6000_attribute_table[];
586 static void rs6000_set_default_type_attributes (tree);
587 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
588 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
589 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
591 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
592 static bool rs6000_return_in_memory (tree, tree);
593 static void rs6000_file_start (void);
595 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
596 static void rs6000_elf_asm_out_constructor (rtx, int);
597 static void rs6000_elf_asm_out_destructor (rtx, int);
598 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
599 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
600 static void rs6000_elf_unique_section (tree, int);
601 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
602 unsigned HOST_WIDE_INT);
603 static void rs6000_elf_encode_section_info (tree, rtx, int)
605 static bool rs6000_elf_in_small_data_p (tree);
608 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
609 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
610 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
611 static void rs6000_xcoff_unique_section (tree, int);
612 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
613 unsigned HOST_WIDE_INT);
614 static const char * rs6000_xcoff_strip_name_encoding (const char *);
615 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
616 static void rs6000_xcoff_file_start (void);
617 static void rs6000_xcoff_file_end (void);
619 static int rs6000_variable_issue (FILE *, int, rtx, int);
620 static bool rs6000_rtx_costs (rtx, int, int, int *);
621 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
622 static bool is_microcoded_insn (rtx);
623 static int is_dispatch_slot_restricted (rtx);
624 static bool is_cracked_insn (rtx);
625 static bool is_branch_slot_insn (rtx);
626 static int rs6000_adjust_priority (rtx, int);
627 static int rs6000_issue_rate (void);
628 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
629 static rtx get_next_active_insn (rtx, rtx);
630 static bool insn_terminates_group_p (rtx , enum group_termination);
631 static bool is_costly_group (rtx *, rtx);
632 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
633 static int redefine_groups (FILE *, int, rtx, rtx);
634 static int pad_groups (FILE *, int, rtx, rtx);
635 static void rs6000_sched_finish (FILE *, int);
636 static int rs6000_use_sched_lookahead (void);
637 static tree rs6000_builtin_mask_for_load (void);
639 static void def_builtin (int, const char *, tree, int);
640 static void rs6000_init_builtins (void);
641 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
642 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
643 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
644 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
645 static void altivec_init_builtins (void);
646 static void rs6000_common_init_builtins (void);
647 static void rs6000_init_libfuncs (void);
649 static void enable_mask_for_builtins (struct builtin_description *, int,
650 enum rs6000_builtins,
651 enum rs6000_builtins);
652 static tree build_opaque_vector_type (tree, int);
653 static void spe_init_builtins (void);
654 static rtx spe_expand_builtin (tree, rtx, bool *);
655 static rtx spe_expand_stv_builtin (enum insn_code, tree);
656 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
657 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
658 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
659 static rs6000_stack_t *rs6000_stack_info (void);
660 static void debug_stack_info (rs6000_stack_t *);
662 static rtx altivec_expand_builtin (tree, rtx, bool *);
663 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
664 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
665 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
666 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
667 static rtx altivec_expand_predicate_builtin (enum insn_code,
668 const char *, tree, rtx);
669 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
670 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
671 static bool rs6000_handle_option (size_t, const char *, int);
672 static void rs6000_parse_tls_size_option (void);
673 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
674 static int first_altivec_reg_to_save (void);
675 static unsigned int compute_vrsave_mask (void);
676 static void compute_save_world_info (rs6000_stack_t *info_ptr);
677 static void is_altivec_return_reg (rtx, void *);
678 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
679 int easy_vector_constant (rtx, enum machine_mode);
680 static bool rs6000_is_opaque_type (tree);
681 static rtx rs6000_dwarf_register_span (rtx);
682 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
683 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
684 static rtx rs6000_tls_get_addr (void);
685 static rtx rs6000_got_sym (void);
686 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
687 static const char *rs6000_get_some_local_dynamic_name (void);
688 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
689 static rtx rs6000_complex_function_value (enum machine_mode);
690 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
691 enum machine_mode, tree);
692 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
694 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
695 tree, HOST_WIDE_INT);
696 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
699 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
702 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
703 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
704 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
705 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
706 enum machine_mode, tree,
708 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
710 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
712 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
714 static void macho_branch_islands (void);
715 static void add_compiler_branch_island (tree, tree, int);
716 static int no_previous_def (tree function_name);
717 static tree get_prev_label (tree function_name);
718 static void rs6000_darwin_file_start (void);
721 static tree rs6000_build_builtin_va_list (void);
722 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
723 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
724 static bool rs6000_vector_mode_supported_p (enum machine_mode);
725 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
727 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
729 static int get_vsel_insn (enum machine_mode);
730 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
733 const int INSN_NOT_AVAILABLE = -1;
734 static enum machine_mode rs6000_eh_return_filter_mode (void);
736 /* Hash table stuff for keeping track of TOC entries. */
738 struct toc_hash_struct GTY(())
740 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
741 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
743 enum machine_mode key_mode;
747 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
749 /* Default register names. */
750 char rs6000_reg_names[][8] =
752 "0", "1", "2", "3", "4", "5", "6", "7",
753 "8", "9", "10", "11", "12", "13", "14", "15",
754 "16", "17", "18", "19", "20", "21", "22", "23",
755 "24", "25", "26", "27", "28", "29", "30", "31",
756 "0", "1", "2", "3", "4", "5", "6", "7",
757 "8", "9", "10", "11", "12", "13", "14", "15",
758 "16", "17", "18", "19", "20", "21", "22", "23",
759 "24", "25", "26", "27", "28", "29", "30", "31",
760 "mq", "lr", "ctr","ap",
761 "0", "1", "2", "3", "4", "5", "6", "7",
763 /* AltiVec registers. */
764 "0", "1", "2", "3", "4", "5", "6", "7",
765 "8", "9", "10", "11", "12", "13", "14", "15",
766 "16", "17", "18", "19", "20", "21", "22", "23",
767 "24", "25", "26", "27", "28", "29", "30", "31",
773 #ifdef TARGET_REGNAMES
774 static const char alt_reg_names[][8] =
776 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
777 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
778 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
779 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
780 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
781 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
782 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
783 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
784 "mq", "lr", "ctr", "ap",
785 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
787 /* AltiVec registers. */
788 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
789 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
790 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
791 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
798 #ifndef MASK_STRICT_ALIGN
799 #define MASK_STRICT_ALIGN 0
801 #ifndef TARGET_PROFILE_KERNEL
802 #define TARGET_PROFILE_KERNEL 0
805 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
806 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
808 /* Initialize the GCC target structure. */
809 #undef TARGET_ATTRIBUTE_TABLE
810 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
811 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
812 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
814 #undef TARGET_ASM_ALIGNED_DI_OP
815 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
817 /* Default unaligned ops are only provided for ELF. Find the ops needed
818 for non-ELF systems. */
819 #ifndef OBJECT_FORMAT_ELF
821 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
823 #undef TARGET_ASM_UNALIGNED_HI_OP
824 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
825 #undef TARGET_ASM_UNALIGNED_SI_OP
826 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
827 #undef TARGET_ASM_UNALIGNED_DI_OP
828 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
831 #undef TARGET_ASM_UNALIGNED_HI_OP
832 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
833 #undef TARGET_ASM_UNALIGNED_SI_OP
834 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
835 #undef TARGET_ASM_UNALIGNED_DI_OP
836 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
837 #undef TARGET_ASM_ALIGNED_DI_OP
838 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
842 /* This hook deals with fixups for relocatable code and DI-mode objects
844 #undef TARGET_ASM_INTEGER
845 #define TARGET_ASM_INTEGER rs6000_assemble_integer
847 #ifdef HAVE_GAS_HIDDEN
848 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
849 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
852 #undef TARGET_HAVE_TLS
853 #define TARGET_HAVE_TLS HAVE_AS_TLS
855 #undef TARGET_CANNOT_FORCE_CONST_MEM
856 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
858 #undef TARGET_ASM_FUNCTION_PROLOGUE
859 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
860 #undef TARGET_ASM_FUNCTION_EPILOGUE
861 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
863 #undef TARGET_SCHED_VARIABLE_ISSUE
864 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
866 #undef TARGET_SCHED_ISSUE_RATE
867 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
868 #undef TARGET_SCHED_ADJUST_COST
869 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
870 #undef TARGET_SCHED_ADJUST_PRIORITY
871 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
872 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
873 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
874 #undef TARGET_SCHED_FINISH
875 #define TARGET_SCHED_FINISH rs6000_sched_finish
877 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
878 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
880 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
881 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
883 #undef TARGET_INIT_BUILTINS
884 #define TARGET_INIT_BUILTINS rs6000_init_builtins
886 #undef TARGET_EXPAND_BUILTIN
887 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
889 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
890 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
892 #undef TARGET_INIT_LIBFUNCS
893 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
896 #undef TARGET_BINDS_LOCAL_P
897 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
900 #undef TARGET_ASM_OUTPUT_MI_THUNK
901 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
903 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
904 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
906 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
907 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
909 #undef TARGET_INVALID_WITHIN_DOLOOP
910 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
912 #undef TARGET_RTX_COSTS
913 #define TARGET_RTX_COSTS rs6000_rtx_costs
914 #undef TARGET_ADDRESS_COST
915 #define TARGET_ADDRESS_COST hook_int_rtx_0
917 #undef TARGET_VECTOR_OPAQUE_P
918 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
920 #undef TARGET_DWARF_REGISTER_SPAN
921 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
923 /* On rs6000, function arguments are promoted, as are function return
925 #undef TARGET_PROMOTE_FUNCTION_ARGS
926 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
927 #undef TARGET_PROMOTE_FUNCTION_RETURN
928 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
930 #undef TARGET_RETURN_IN_MEMORY
931 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
933 #undef TARGET_SETUP_INCOMING_VARARGS
934 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
936 /* Always strict argument naming on rs6000. */
937 #undef TARGET_STRICT_ARGUMENT_NAMING
938 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
939 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
940 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
941 #undef TARGET_SPLIT_COMPLEX_ARG
942 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
943 #undef TARGET_MUST_PASS_IN_STACK
944 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
945 #undef TARGET_PASS_BY_REFERENCE
946 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
947 #undef TARGET_ARG_PARTIAL_BYTES
948 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
950 #undef TARGET_BUILD_BUILTIN_VA_LIST
951 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
953 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
954 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
956 #undef TARGET_EH_RETURN_FILTER_MODE
957 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
959 #undef TARGET_VECTOR_MODE_SUPPORTED_P
960 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
962 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
963 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
965 #undef TARGET_HANDLE_OPTION
966 #define TARGET_HANDLE_OPTION rs6000_handle_option
968 #undef TARGET_DEFAULT_TARGET_FLAGS
969 #define TARGET_DEFAULT_TARGET_FLAGS \
970 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
972 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
973 The PowerPC architecture requires only weak consistency among
974 processors--that is, memory accesses between processors need not be
975 sequentially consistent and memory accesses among processors can occur
976 in any order. The ability to order memory accesses weakly provides
977 opportunities for more efficient use of the system bus. Unless a
978 dependency exists, the 604e allows read operations to precede store
980 #undef TARGET_RELAXED_ORDERING
981 #define TARGET_RELAXED_ORDERING true
984 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
985 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
988 struct gcc_target targetm = TARGET_INITIALIZER;
991 /* Value is 1 if hard register REGNO can hold a value of machine-mode
994 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
996 /* The GPRs can hold any mode, but values bigger than one register
997 cannot go past R31. */
998 if (INT_REGNO_P (regno))
999 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1001 /* The float registers can only hold floating modes and DImode. */
1002 if (FP_REGNO_P (regno))
1004 (GET_MODE_CLASS (mode) == MODE_FLOAT
1005 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1006 || (GET_MODE_CLASS (mode) == MODE_INT
1007 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1009 /* The CR register can only hold CC modes. */
1010 if (CR_REGNO_P (regno))
1011 return GET_MODE_CLASS (mode) == MODE_CC;
1013 if (XER_REGNO_P (regno))
1014 return mode == PSImode;
1016 /* AltiVec only in AldyVec registers. */
1017 if (ALTIVEC_REGNO_P (regno))
1018 return ALTIVEC_VECTOR_MODE (mode);
1020 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1021 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1024 /* We cannot put TImode anywhere except general register and it must be
1025 able to fit within the register set. */
1027 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1030 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1032 rs6000_init_hard_regno_mode_ok (void)
1036 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1037 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1038 if (rs6000_hard_regno_mode_ok (r, m))
1039 rs6000_hard_regno_mode_ok_p[m][r] = true;
1042 /* If not otherwise specified by a target, make 'long double' equivalent to
1045 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1046 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1049 /* Override command line options. Mostly we process the processor
1050 type and sometimes adjust other TARGET_ options. */
1053 rs6000_override_options (const char *default_cpu)
1056 struct rs6000_cpu_select *ptr;
1059 /* Simplifications for entries below. */
1062 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1063 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1066 /* This table occasionally claims that a processor does not support
1067 a particular feature even though it does, but the feature is slower
1068 than the alternative. Thus, it shouldn't be relied on as a
1069 complete description of the processor's support.
1071 Please keep this list in order, and don't forget to update the
1072 documentation in invoke.texi when adding a new processor or
1076 const char *const name; /* Canonical processor name. */
1077 const enum processor_type processor; /* Processor type enum value. */
1078 const int target_enable; /* Target flags to enable. */
1079 } const processor_target_table[]
1080 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1081 {"403", PROCESSOR_PPC403,
1082 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1083 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1084 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1085 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1086 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1087 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1088 {"601", PROCESSOR_PPC601,
1089 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1090 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1091 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1092 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1093 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1094 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1095 {"620", PROCESSOR_PPC620,
1096 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1097 {"630", PROCESSOR_PPC630,
1098 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1099 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1100 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1101 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1102 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1103 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1104 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1105 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1106 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1107 /* 8548 has a dummy entry for now. */
1108 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1109 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1110 {"970", PROCESSOR_POWER4,
1111 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1112 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1113 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1114 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1115 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1116 {"G5", PROCESSOR_POWER4,
1117 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1118 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1119 {"power2", PROCESSOR_POWER,
1120 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1121 {"power3", PROCESSOR_PPC630,
1122 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1123 {"power4", PROCESSOR_POWER4,
1124 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1125 {"power5", PROCESSOR_POWER5,
1126 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1127 | MASK_MFCRF | MASK_POPCNTB},
1128 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1129 {"powerpc64", PROCESSOR_POWERPC64,
1130 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1131 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1132 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1133 {"rios2", PROCESSOR_RIOS2,
1134 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1135 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1136 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1137 {"rs64", PROCESSOR_RS64A,
1138 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1141 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1143 /* Some OSs don't support saving the high part of 64-bit registers on
1144 context switch. Other OSs don't support saving Altivec registers.
1145 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1146 settings; if the user wants either, the user must explicitly specify
1147 them and we won't interfere with the user's specification. */
1150 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1151 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1152 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1156 rs6000_init_hard_regno_mode_ok ();
1158 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1159 #ifdef OS_MISSING_POWERPC64
1160 if (OS_MISSING_POWERPC64)
1161 set_masks &= ~MASK_POWERPC64;
1163 #ifdef OS_MISSING_ALTIVEC
1164 if (OS_MISSING_ALTIVEC)
1165 set_masks &= ~MASK_ALTIVEC;
1168 /* Don't override by the processor default if given explicitly. */
1169 set_masks &= ~target_flags_explicit;
1171 /* Identify the processor type. */
1172 rs6000_select[0].string = default_cpu;
1173 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1175 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1177 ptr = &rs6000_select[i];
1178 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1180 for (j = 0; j < ptt_size; j++)
1181 if (! strcmp (ptr->string, processor_target_table[j].name))
1183 if (ptr->set_tune_p)
1184 rs6000_cpu = processor_target_table[j].processor;
1186 if (ptr->set_arch_p)
1188 target_flags &= ~set_masks;
1189 target_flags |= (processor_target_table[j].target_enable
1196 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1203 /* If we are optimizing big endian systems for space, use the load/store
1204 multiple and string instructions. */
1205 if (BYTES_BIG_ENDIAN && optimize_size)
1206 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1208 /* Don't allow -mmultiple or -mstring on little endian systems
1209 unless the cpu is a 750, because the hardware doesn't support the
1210 instructions used in little endian mode, and causes an alignment
1211 trap. The 750 does not cause an alignment trap (except when the
1212 target is unaligned). */
1214 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1216 if (TARGET_MULTIPLE)
1218 target_flags &= ~MASK_MULTIPLE;
1219 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1220 warning (0, "-mmultiple is not supported on little endian systems");
1225 target_flags &= ~MASK_STRING;
1226 if ((target_flags_explicit & MASK_STRING) != 0)
1227 warning (0, "-mstring is not supported on little endian systems");
1231 /* Set debug flags */
1232 if (rs6000_debug_name)
1234 if (! strcmp (rs6000_debug_name, "all"))
1235 rs6000_debug_stack = rs6000_debug_arg = 1;
1236 else if (! strcmp (rs6000_debug_name, "stack"))
1237 rs6000_debug_stack = 1;
1238 else if (! strcmp (rs6000_debug_name, "arg"))
1239 rs6000_debug_arg = 1;
1241 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1244 if (rs6000_traceback_name)
1246 if (! strncmp (rs6000_traceback_name, "full", 4))
1247 rs6000_traceback = traceback_full;
1248 else if (! strncmp (rs6000_traceback_name, "part", 4))
1249 rs6000_traceback = traceback_part;
1250 else if (! strncmp (rs6000_traceback_name, "no", 2))
1251 rs6000_traceback = traceback_none;
1253 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1254 rs6000_traceback_name);
1257 if (!rs6000_explicit_options.long_double)
1258 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1260 /* Set Altivec ABI as default for powerpc64 linux. */
1261 if (TARGET_ELF && TARGET_64BIT)
1263 rs6000_altivec_abi = 1;
1264 TARGET_ALTIVEC_VRSAVE = 1;
1267 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1268 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1270 rs6000_darwin64_abi = 1;
1272 darwin_one_byte_bool = 1;
1274 /* Default to natural alignment, for better performance. */
1275 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1278 /* Handle -mtls-size option. */
1279 rs6000_parse_tls_size_option ();
1281 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1282 SUBTARGET_OVERRIDE_OPTIONS;
1284 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1285 SUBSUBTARGET_OVERRIDE_OPTIONS;
1287 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1288 SUB3TARGET_OVERRIDE_OPTIONS;
1294 error ("AltiVec and E500 instructions cannot coexist");
1296 /* The e500 does not have string instructions, and we set
1297 MASK_STRING above when optimizing for size. */
1298 if ((target_flags & MASK_STRING) != 0)
1299 target_flags = target_flags & ~MASK_STRING;
1301 else if (rs6000_select[1].string != NULL)
1303 /* For the powerpc-eabispe configuration, we set all these by
1304 default, so let's unset them if we manually set another
1305 CPU that is not the E500. */
1306 if (!rs6000_explicit_options.abi)
1308 if (!rs6000_explicit_options.spe)
1310 if (!rs6000_explicit_options.float_gprs)
1311 rs6000_float_gprs = 0;
1312 if (!rs6000_explicit_options.isel)
1314 if (!rs6000_explicit_options.long_double)
1315 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1318 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1319 && rs6000_cpu != PROCESSOR_POWER5);
1320 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1321 || rs6000_cpu == PROCESSOR_POWER5);
1323 rs6000_sched_restricted_insns_priority
1324 = (rs6000_sched_groups ? 1 : 0);
1326 /* Handle -msched-costly-dep option. */
1327 rs6000_sched_costly_dep
1328 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1330 if (rs6000_sched_costly_dep_str)
1332 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1333 rs6000_sched_costly_dep = no_dep_costly;
1334 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1335 rs6000_sched_costly_dep = all_deps_costly;
1336 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1337 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1338 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1339 rs6000_sched_costly_dep = store_to_load_dep_costly;
1341 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1344 /* Handle -minsert-sched-nops option. */
1345 rs6000_sched_insert_nops
1346 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1348 if (rs6000_sched_insert_nops_str)
1350 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1351 rs6000_sched_insert_nops = sched_finish_none;
1352 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1353 rs6000_sched_insert_nops = sched_finish_pad_groups;
1354 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1355 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1357 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1360 #ifdef TARGET_REGNAMES
1361 /* If the user desires alternate register names, copy in the
1362 alternate names now. */
1363 if (TARGET_REGNAMES)
1364 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1367 /* Set aix_struct_return last, after the ABI is determined.
1368 If -maix-struct-return or -msvr4-struct-return was explicitly
1369 used, don't override with the ABI default. */
1370 if (!rs6000_explicit_options.aix_struct_ret)
1371 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1373 if (TARGET_LONG_DOUBLE_128
1374 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1375 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1377 /* Allocate an alias set for register saves & restores from stack. */
1378 rs6000_sr_alias_set = new_alias_set ();
1381 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1383 /* We can only guarantee the availability of DI pseudo-ops when
1384 assembling for 64-bit targets. */
1387 targetm.asm_out.aligned_op.di = NULL;
1388 targetm.asm_out.unaligned_op.di = NULL;
1391 /* Set branch target alignment, if not optimizing for size. */
1394 if (rs6000_sched_groups)
1396 if (align_functions <= 0)
1397 align_functions = 16;
1398 if (align_jumps <= 0)
1400 if (align_loops <= 0)
1403 if (align_jumps_max_skip <= 0)
1404 align_jumps_max_skip = 15;
1405 if (align_loops_max_skip <= 0)
1406 align_loops_max_skip = 15;
1409 /* Arrange to save and restore machine status around nested functions. */
1410 init_machine_status = rs6000_init_machine_status;
1412 /* We should always be splitting complex arguments, but we can't break
1413 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1414 if (DEFAULT_ABI != ABI_AIX)
1415 targetm.calls.split_complex_arg = NULL;
1417 /* Initialize rs6000_cost with the appropriate target costs. */
1419 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1423 case PROCESSOR_RIOS1:
1424 rs6000_cost = &rios1_cost;
1427 case PROCESSOR_RIOS2:
1428 rs6000_cost = &rios2_cost;
1431 case PROCESSOR_RS64A:
1432 rs6000_cost = &rs64a_cost;
1435 case PROCESSOR_MPCCORE:
1436 rs6000_cost = &mpccore_cost;
1439 case PROCESSOR_PPC403:
1440 rs6000_cost = &ppc403_cost;
1443 case PROCESSOR_PPC405:
1444 rs6000_cost = &ppc405_cost;
1447 case PROCESSOR_PPC440:
1448 rs6000_cost = &ppc440_cost;
1451 case PROCESSOR_PPC601:
1452 rs6000_cost = &ppc601_cost;
1455 case PROCESSOR_PPC603:
1456 rs6000_cost = &ppc603_cost;
1459 case PROCESSOR_PPC604:
1460 rs6000_cost = &ppc604_cost;
1463 case PROCESSOR_PPC604e:
1464 rs6000_cost = &ppc604e_cost;
1467 case PROCESSOR_PPC620:
1468 rs6000_cost = &ppc620_cost;
1471 case PROCESSOR_PPC630:
1472 rs6000_cost = &ppc630_cost;
1475 case PROCESSOR_PPC750:
1476 case PROCESSOR_PPC7400:
1477 rs6000_cost = &ppc750_cost;
1480 case PROCESSOR_PPC7450:
1481 rs6000_cost = &ppc7450_cost;
1484 case PROCESSOR_PPC8540:
1485 rs6000_cost = &ppc8540_cost;
1488 case PROCESSOR_POWER4:
1489 case PROCESSOR_POWER5:
1490 rs6000_cost = &power4_cost;
1498 /* Implement targetm.vectorize.builtin_mask_for_load. */
1500 rs6000_builtin_mask_for_load (void)
1503 return altivec_builtin_mask_for_load;
1508 /* Handle generic options of the form -mfoo=yes/no.
1509 NAME is the option name.
1510 VALUE is the option value.
1511 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1512 whether the option value is 'yes' or 'no' respectively. */
1514 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1518 else if (!strcmp (value, "yes"))
1520 else if (!strcmp (value, "no"))
1523 error ("unknown -m%s= option specified: '%s'", name, value);
1526 /* Validate and record the size specified with the -mtls-size option. */
1529 rs6000_parse_tls_size_option (void)
1531 if (rs6000_tls_size_string == 0)
1533 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1534 rs6000_tls_size = 16;
1535 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1536 rs6000_tls_size = 32;
1537 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1538 rs6000_tls_size = 64;
1540 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1544 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1546 if (DEFAULT_ABI == ABI_DARWIN)
1547 /* The Darwin libraries never set errno, so we might as well
1548 avoid calling them when that's the only reason we would. */
1549 flag_errno_math = 0;
1552 /* Implement TARGET_HANDLE_OPTION. */
1555 rs6000_handle_option (size_t code, const char *arg, int value)
1560 target_flags &= ~(MASK_POWER | MASK_POWER2
1561 | MASK_MULTIPLE | MASK_STRING);
1562 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1563 | MASK_MULTIPLE | MASK_STRING);
1565 case OPT_mno_powerpc:
1566 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1567 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1568 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1569 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1572 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1573 | MASK_NO_SUM_IN_TOC);
1574 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1575 | MASK_NO_SUM_IN_TOC);
1576 #ifdef TARGET_USES_SYSV4_OPT
1577 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1578 just the same as -mminimal-toc. */
1579 target_flags |= MASK_MINIMAL_TOC;
1580 target_flags_explicit |= MASK_MINIMAL_TOC;
1584 #ifdef TARGET_USES_SYSV4_OPT
1586 /* Make -mtoc behave like -mminimal-toc. */
1587 target_flags |= MASK_MINIMAL_TOC;
1588 target_flags_explicit |= MASK_MINIMAL_TOC;
1592 #ifdef TARGET_USES_AIX64_OPT
1597 target_flags |= MASK_POWERPC64 | MASK_POWERPC | MASK_PPC_GFXOPT;
1598 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC
1602 #ifdef TARGET_USES_AIX64_OPT
1607 target_flags &= ~MASK_POWERPC64;
1608 target_flags_explicit |= MASK_POWERPC64;
1611 case OPT_minsert_sched_nops_:
1612 rs6000_sched_insert_nops_str = arg;
1615 case OPT_mminimal_toc:
1618 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1619 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1626 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1627 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1634 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1635 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1639 case OPT_mpowerpc_gpopt:
1640 case OPT_mpowerpc_gfxopt:
1643 target_flags |= MASK_POWERPC;
1644 target_flags_explicit |= MASK_POWERPC;
1648 case OPT_maix_struct_return:
1649 case OPT_msvr4_struct_return:
1650 rs6000_explicit_options.aix_struct_ret = true;
1654 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1658 rs6000_explicit_options.isel = true;
1659 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1663 rs6000_explicit_options.spe = true;
1664 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1665 /* No SPE means 64-bit long doubles, even if an E500. */
1667 rs6000_long_double_type_size = 64;
1671 rs6000_debug_name = arg;
1674 #ifdef TARGET_USES_SYSV4_OPT
1676 rs6000_abi_name = arg;
1680 rs6000_sdata_name = arg;
1683 case OPT_mtls_size_:
1684 rs6000_tls_size_string = arg;
1687 case OPT_mrelocatable:
1690 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1691 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1695 case OPT_mrelocatable_lib:
1698 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1699 | MASK_NO_FP_IN_TOC;
1700 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1701 | MASK_NO_FP_IN_TOC;
1705 target_flags &= ~MASK_RELOCATABLE;
1706 target_flags_explicit |= MASK_RELOCATABLE;
1712 rs6000_explicit_options.abi = true;
1713 if (!strcmp (arg, "altivec"))
1715 rs6000_altivec_abi = 1;
1718 else if (! strcmp (arg, "no-altivec"))
1719 rs6000_altivec_abi = 0;
1720 else if (! strcmp (arg, "spe"))
1723 rs6000_altivec_abi = 0;
1724 if (!TARGET_SPE_ABI)
1725 error ("not configured for ABI: '%s'", arg);
1727 else if (! strcmp (arg, "no-spe"))
1730 /* These are here for testing during development only, do not
1731 document in the manual please. */
1732 else if (! strcmp (arg, "d64"))
1734 rs6000_darwin64_abi = 1;
1735 warning (0, "Using darwin64 ABI");
1737 else if (! strcmp (arg, "d32"))
1739 rs6000_darwin64_abi = 0;
1740 warning (0, "Using old darwin ABI");
1745 error ("unknown ABI specified: '%s'", arg);
1751 rs6000_select[1].string = arg;
1755 rs6000_select[2].string = arg;
1758 case OPT_mtraceback_:
1759 rs6000_traceback_name = arg;
1762 case OPT_mfloat_gprs_:
1763 rs6000_explicit_options.float_gprs = true;
1764 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1765 rs6000_float_gprs = 1;
1766 else if (! strcmp (arg, "double"))
1767 rs6000_float_gprs = 2;
1768 else if (! strcmp (arg, "no"))
1769 rs6000_float_gprs = 0;
1772 error ("invalid option for -mfloat-gprs: '%s'", arg);
1777 case OPT_mlong_double_:
1778 rs6000_explicit_options.long_double = true;
1779 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1780 if (value != 64 && value != 128)
1782 error ("Unknown switch -mlong-double-%s", arg);
1783 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1787 rs6000_long_double_type_size = value;
1790 case OPT_msched_costly_dep_:
1791 rs6000_sched_costly_dep_str = arg;
1795 rs6000_explicit_options.alignment = true;
1796 if (! strcmp (arg, "power"))
1798 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1799 some C library functions, so warn about it. The flag may be
1800 useful for performance studies from time to time though, so
1801 don't disable it entirely. */
1802 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1803 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1804 " it is incompatible with the installed C and C++ libraries");
1805 rs6000_alignment_flags = MASK_ALIGN_POWER;
1807 else if (! strcmp (arg, "natural"))
1808 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1811 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1819 /* Do anything needed at the start of the asm file. */
1822 rs6000_file_start (void)
1826 const char *start = buffer;
1827 struct rs6000_cpu_select *ptr;
1828 const char *default_cpu = TARGET_CPU_DEFAULT;
1829 FILE *file = asm_out_file;
1831 default_file_start ();
1833 #ifdef TARGET_BI_ARCH
1834 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1838 if (flag_verbose_asm)
1840 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1841 rs6000_select[0].string = default_cpu;
1843 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1845 ptr = &rs6000_select[i];
1846 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1848 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1853 #ifdef USING_ELFOS_H
1854 switch (rs6000_sdata)
1856 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1857 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1858 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1859 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1862 if (rs6000_sdata && g_switch_value)
1864 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1874 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1882 /* Return nonzero if this function is known to have a null epilogue. */
1885 direct_return (void)
1887 if (reload_completed)
1889 rs6000_stack_t *info = rs6000_stack_info ();
1891 if (info->first_gp_reg_save == 32
1892 && info->first_fp_reg_save == 64
1893 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1894 && ! info->lr_save_p
1895 && ! info->cr_save_p
1896 && info->vrsave_mask == 0
1904 /* Return the number of instructions it takes to form a constant in an
1905 integer register. */
1908 num_insns_constant_wide (HOST_WIDE_INT value)
1910 /* signed constant loadable with {cal|addi} */
1911 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1914 /* constant loadable with {cau|addis} */
1915 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1918 #if HOST_BITS_PER_WIDE_INT == 64
1919 else if (TARGET_POWERPC64)
1921 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1922 HOST_WIDE_INT high = value >> 31;
1924 if (high == 0 || high == -1)
1930 return num_insns_constant_wide (high) + 1;
1932 return (num_insns_constant_wide (high)
1933 + num_insns_constant_wide (low) + 1);
1942 num_insns_constant (rtx op, enum machine_mode mode)
1944 HOST_WIDE_INT low, high;
1946 switch (GET_CODE (op))
1949 #if HOST_BITS_PER_WIDE_INT == 64
1950 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1951 && mask_operand (op, mode))
1955 return num_insns_constant_wide (INTVAL (op));
1963 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1964 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1965 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1968 if (mode == VOIDmode || mode == DImode)
1970 high = CONST_DOUBLE_HIGH (op);
1971 low = CONST_DOUBLE_LOW (op);
1978 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1979 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1980 high = l[WORDS_BIG_ENDIAN == 0];
1981 low = l[WORDS_BIG_ENDIAN != 0];
1985 return (num_insns_constant_wide (low)
1986 + num_insns_constant_wide (high));
1989 if ((high == 0 && low >= 0)
1990 || (high == -1 && low < 0))
1991 return num_insns_constant_wide (low);
1993 else if (mask_operand (op, mode))
1997 return num_insns_constant_wide (high) + 1;
2000 return (num_insns_constant_wide (high)
2001 + num_insns_constant_wide (low) + 1);
2009 /* Returns the constant for the splat instruction, if exists. */
2012 easy_vector_splat_const (int cst, enum machine_mode mode)
2017 if (EASY_VECTOR_15 (cst)
2018 || EASY_VECTOR_15_ADD_SELF (cst))
2020 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2026 if (EASY_VECTOR_15 (cst)
2027 || EASY_VECTOR_15_ADD_SELF (cst))
2029 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2035 if (EASY_VECTOR_15 (cst)
2036 || EASY_VECTOR_15_ADD_SELF (cst))
2044 /* Return nonzero if all elements of a vector have the same value. */
2047 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2051 units = CONST_VECTOR_NUNITS (op);
2053 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2054 for (i = 1; i < units; ++i)
2055 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2057 if (i == units && easy_vector_splat_const (cst, mode))
2062 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2065 gen_easy_vector_constant_add_self (rtx op)
2069 units = GET_MODE_NUNITS (GET_MODE (op));
2070 v = rtvec_alloc (units);
2072 for (i = 0; i < units; i++)
2074 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2075 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2079 output_vec_const_move (rtx *operands)
2082 enum machine_mode mode;
2088 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2089 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2090 mode = GET_MODE (dest);
2094 if (zero_constant (vec, mode))
2095 return "vxor %0,%0,%0";
2097 gcc_assert (easy_vector_constant (vec, mode));
2099 operands[1] = GEN_INT (cst);
2103 if (EASY_VECTOR_15 (cst))
2105 operands[1] = GEN_INT (cst);
2106 return "vspltisw %0,%1";
2108 else if (EASY_VECTOR_15_ADD_SELF (cst))
2114 if (EASY_VECTOR_15 (cst))
2116 operands[1] = GEN_INT (cst);
2117 return "vspltish %0,%1";
2119 else if (EASY_VECTOR_15_ADD_SELF (cst))
2125 if (EASY_VECTOR_15 (cst))
2127 operands[1] = GEN_INT (cst);
2128 return "vspltisb %0,%1";
2130 else if (EASY_VECTOR_15_ADD_SELF (cst))
2138 gcc_assert (TARGET_SPE);
2140 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2141 pattern of V1DI, V4HI, and V2SF.
2143 FIXME: We should probably return # and add post reload
2144 splitters for these, but this way is so easy ;-). */
2145 operands[1] = GEN_INT (cst);
2146 operands[2] = GEN_INT (cst2);
2148 return "li %0,%1\n\tevmergelo %0,%0,%0";
2150 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2154 mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
2157 if (GET_CODE (op) == CONST_INT)
2159 HOST_WIDE_INT c, lsb;
2164 /* Disallow all zeros. */
2168 /* We can use a single rlwinm insn if no upper bits of C are set
2169 AND there are zero, one or two transitions in the _whole_ of
2171 one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
2173 /* We don't change the number of transitions by inverting,
2174 so make sure we start with the LS bit zero. */
2178 /* Find the first transition. */
2181 /* Invert to look for a second transition. */
2184 /* Erase first transition. */
2187 /* Find the second transition. */
2190 /* Invert to look for a third transition. */
2193 /* Erase second transition. */
2196 if (one_ok && !(allow_one || c))
2199 /* Find the third transition (if any). */
2202 /* Match if all the bits above are 1's (or c is zero). */
2208 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2209 implement ANDing by the mask IN. */
2211 build_mask64_2_operands (rtx in, rtx *out)
2213 #if HOST_BITS_PER_WIDE_INT >= 64
2214 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2217 gcc_assert (GET_CODE (in) == CONST_INT);
2222 /* Assume c initially something like 0x00fff000000fffff. The idea
2223 is to rotate the word so that the middle ^^^^^^ group of zeros
2224 is at the MS end and can be cleared with an rldicl mask. We then
2225 rotate back and clear off the MS ^^ group of zeros with a
2227 c = ~c; /* c == 0xff000ffffff00000 */
2228 lsb = c & -c; /* lsb == 0x0000000000100000 */
2229 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2230 c = ~c; /* c == 0x00fff000000fffff */
2231 c &= -lsb; /* c == 0x00fff00000000000 */
2232 lsb = c & -c; /* lsb == 0x0000100000000000 */
2233 c = ~c; /* c == 0xff000fffffffffff */
2234 c &= -lsb; /* c == 0xff00000000000000 */
2236 while ((lsb >>= 1) != 0)
2237 shift++; /* shift == 44 on exit from loop */
2238 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2239 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2240 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2244 /* Assume c initially something like 0xff000f0000000000. The idea
2245 is to rotate the word so that the ^^^ middle group of zeros
2246 is at the LS end and can be cleared with an rldicr mask. We then
2247 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2249 lsb = c & -c; /* lsb == 0x0000010000000000 */
2250 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2251 c = ~c; /* c == 0x00fff0ffffffffff */
2252 c &= -lsb; /* c == 0x00fff00000000000 */
2253 lsb = c & -c; /* lsb == 0x0000100000000000 */
2254 c = ~c; /* c == 0xff000fffffffffff */
2255 c &= -lsb; /* c == 0xff00000000000000 */
2257 while ((lsb >>= 1) != 0)
2258 shift++; /* shift == 44 on exit from loop */
2259 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2260 m1 >>= shift; /* m1 == 0x0000000000000fff */
2261 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2264 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2265 masks will be all 1's. We are guaranteed more than one transition. */
2266 out[0] = GEN_INT (64 - shift);
2267 out[1] = GEN_INT (m1);
2268 out[2] = GEN_INT (shift);
2269 out[3] = GEN_INT (m2);
2277 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2280 invalid_e500_subreg (rtx op, enum machine_mode mode)
2282 /* Reject (subreg:SI (reg:DF)). */
2283 if (GET_CODE (op) == SUBREG
2285 && REG_P (SUBREG_REG (op))
2286 && GET_MODE (SUBREG_REG (op)) == DFmode)
2289 /* Reject (subreg:DF (reg:DI)). */
2290 if (GET_CODE (op) == SUBREG
2292 && REG_P (SUBREG_REG (op))
2293 && GET_MODE (SUBREG_REG (op)) == DImode)
2299 /* Darwin, AIX increases natural record alignment to doubleword if the first
2300 field is an FP double while the FP fields remain word aligned. */
2303 rs6000_special_round_type_align (tree type, int computed, int specified)
2305 tree field = TYPE_FIELDS (type);
2307 /* Skip all non field decls */
2308 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2309 field = TREE_CHAIN (field);
2311 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2312 return MAX (computed, specified);
2314 return MAX (MAX (computed, specified), 64);
2317 /* Return 1 for an operand in small memory on V.4/eabi. */
2320 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2321 enum machine_mode mode ATTRIBUTE_UNUSED)
2326 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2329 if (DEFAULT_ABI != ABI_V4)
2332 if (GET_CODE (op) == SYMBOL_REF)
2335 else if (GET_CODE (op) != CONST
2336 || GET_CODE (XEXP (op, 0)) != PLUS
2337 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2338 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2343 rtx sum = XEXP (op, 0);
2344 HOST_WIDE_INT summand;
2346 /* We have to be careful here, because it is the referenced address
2347 that must be 32k from _SDA_BASE_, not just the symbol. */
2348 summand = INTVAL (XEXP (sum, 1));
2349 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2352 sym_ref = XEXP (sum, 0);
2355 return SYMBOL_REF_SMALL_P (sym_ref);
2361 /* Return true if either operand is a general purpose register. */
2364 gpr_or_gpr_p (rtx op0, rtx op1)
2366 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2367 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2371 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2374 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2376 switch (GET_CODE (op))
2379 if (RS6000_SYMBOL_REF_TLS_P (op))
2381 else if (CONSTANT_POOL_ADDRESS_P (op))
2383 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2391 else if (! strcmp (XSTR (op, 0), toc_label_name))
2400 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2401 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2403 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2412 constant_pool_expr_p (rtx op)
2416 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2420 toc_relative_expr_p (rtx op)
2424 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2428 legitimate_constant_pool_address_p (rtx x)
2431 && GET_CODE (x) == PLUS
2432 && GET_CODE (XEXP (x, 0)) == REG
2433 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2434 && constant_pool_expr_p (XEXP (x, 1)));
2438 legitimate_small_data_p (enum machine_mode mode, rtx x)
2440 return (DEFAULT_ABI == ABI_V4
2441 && !flag_pic && !TARGET_TOC
2442 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2443 && small_data_operand (x, mode));
2446 /* SPE offset addressing is limited to 5-bits worth of double words. */
2447 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2450 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2452 unsigned HOST_WIDE_INT offset, extra;
2454 if (GET_CODE (x) != PLUS)
2456 if (GET_CODE (XEXP (x, 0)) != REG)
2458 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2460 if (legitimate_constant_pool_address_p (x))
2462 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2465 offset = INTVAL (XEXP (x, 1));
2473 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2474 which leaves the only valid constant offset of zero, which by
2475 canonicalization rules is also invalid. */
2482 /* SPE vector modes. */
2483 return SPE_CONST_OFFSET_OK (offset);
2486 if (TARGET_E500_DOUBLE)
2487 return SPE_CONST_OFFSET_OK (offset);
2490 /* On e500v2, we may have:
2492 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2494 Which gets addressed with evldd instructions. */
2495 if (TARGET_E500_DOUBLE)
2496 return SPE_CONST_OFFSET_OK (offset);
2498 if (mode == DFmode || !TARGET_POWERPC64)
2500 else if (offset & 3)
2506 if (mode == TFmode || !TARGET_POWERPC64)
2508 else if (offset & 3)
2519 return (offset < 0x10000) && (offset + extra < 0x10000);
2523 legitimate_indexed_address_p (rtx x, int strict)
2527 if (GET_CODE (x) != PLUS)
2533 if (!REG_P (op0) || !REG_P (op1))
2536 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2537 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2538 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2539 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2543 legitimate_indirect_address_p (rtx x, int strict)
2545 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2549 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2551 if (!TARGET_MACHO || !flag_pic
2552 || mode != SImode || GET_CODE (x) != MEM)
2556 if (GET_CODE (x) != LO_SUM)
2558 if (GET_CODE (XEXP (x, 0)) != REG)
2560 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2564 return CONSTANT_P (x);
2568 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2570 if (GET_CODE (x) != LO_SUM)
2572 if (GET_CODE (XEXP (x, 0)) != REG)
2574 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2576 /* Restrict addressing for DI because of our SUBREG hackery. */
2577 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2581 if (TARGET_ELF || TARGET_MACHO)
2583 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2587 if (GET_MODE_NUNITS (mode) != 1)
2589 if (GET_MODE_BITSIZE (mode) > 64
2590 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2591 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2594 return CONSTANT_P (x);
2601 /* Try machine-dependent ways of modifying an illegitimate address
2602 to be legitimate. If we find one, return the new, valid address.
2603 This is used from only one place: `memory_address' in explow.c.
2605 OLDX is the address as it was before break_out_memory_refs was
2606 called. In some cases it is useful to look at this to decide what
2609 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2611 It is always safe for this function to do nothing. It exists to
2612 recognize opportunities to optimize the output.
2614 On RS/6000, first check for the sum of a register with a constant
2615 integer that is out of range. If so, generate code to add the
2616 constant with the low-order 16 bits masked to the register and force
2617 this result into another register (this can be done with `cau').
2618 Then generate an address of REG+(CONST&0xffff), allowing for the
2619 possibility of bit 16 being a one.
2621 Then check for the sum of a register and something not constant, try to
2622 load the other things into a register and return the sum. */
2625 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2626 enum machine_mode mode)
2628 if (GET_CODE (x) == SYMBOL_REF)
2630 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2632 return rs6000_legitimize_tls_address (x, model);
2635 if (GET_CODE (x) == PLUS
2636 && GET_CODE (XEXP (x, 0)) == REG
2637 && GET_CODE (XEXP (x, 1)) == CONST_INT
2638 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2640 HOST_WIDE_INT high_int, low_int;
2642 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2643 high_int = INTVAL (XEXP (x, 1)) - low_int;
2644 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2645 GEN_INT (high_int)), 0);
2646 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2648 else if (GET_CODE (x) == PLUS
2649 && GET_CODE (XEXP (x, 0)) == REG
2650 && GET_CODE (XEXP (x, 1)) != CONST_INT
2651 && GET_MODE_NUNITS (mode) == 1
2652 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2654 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2656 && (TARGET_POWERPC64 || mode != DImode)
2659 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2660 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2662 else if (ALTIVEC_VECTOR_MODE (mode))
2666 /* Make sure both operands are registers. */
2667 if (GET_CODE (x) == PLUS)
2668 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2669 force_reg (Pmode, XEXP (x, 1)));
2671 reg = force_reg (Pmode, x);
2674 else if (SPE_VECTOR_MODE (mode)
2675 || (TARGET_E500_DOUBLE && (mode == DFmode
2676 || mode == DImode)))
2680 /* We accept [reg + reg] and [reg + OFFSET]. */
2682 if (GET_CODE (x) == PLUS)
2684 rtx op1 = XEXP (x, 0);
2685 rtx op2 = XEXP (x, 1);
2687 op1 = force_reg (Pmode, op1);
2689 if (GET_CODE (op2) != REG
2690 && (GET_CODE (op2) != CONST_INT
2691 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2692 op2 = force_reg (Pmode, op2);
2694 return gen_rtx_PLUS (Pmode, op1, op2);
2697 return force_reg (Pmode, x);
2703 && GET_CODE (x) != CONST_INT
2704 && GET_CODE (x) != CONST_DOUBLE
2706 && GET_MODE_NUNITS (mode) == 1
2707 && (GET_MODE_BITSIZE (mode) <= 32
2708 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2710 rtx reg = gen_reg_rtx (Pmode);
2711 emit_insn (gen_elf_high (reg, x));
2712 return gen_rtx_LO_SUM (Pmode, reg, x);
2714 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2717 && ! MACHO_DYNAMIC_NO_PIC_P
2719 && GET_CODE (x) != CONST_INT
2720 && GET_CODE (x) != CONST_DOUBLE
2722 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2726 rtx reg = gen_reg_rtx (Pmode);
2727 emit_insn (gen_macho_high (reg, x));
2728 return gen_rtx_LO_SUM (Pmode, reg, x);
2731 && constant_pool_expr_p (x)
2732 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2734 return create_TOC_reference (x);
2740 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
2741 We need to emit DTP-relative relocations. */
2744 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2749 fputs ("\t.long\t", file);
2752 fputs (DOUBLE_INT_ASM_OP, file);
2757 output_addr_const (file, x);
2758 fputs ("@dtprel+0x8000", file);
2761 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2763 static GTY(()) rtx rs6000_tls_symbol;
2765 rs6000_tls_get_addr (void)
2767 if (!rs6000_tls_symbol)
2768 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2770 return rs6000_tls_symbol;
2773 /* Construct the SYMBOL_REF for TLS GOT references. */
2775 static GTY(()) rtx rs6000_got_symbol;
2777 rs6000_got_sym (void)
2779 if (!rs6000_got_symbol)
2781 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2782 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2783 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2786 return rs6000_got_symbol;
2789 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2790 this (thread-local) address. */
2793 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2797 dest = gen_reg_rtx (Pmode);
2798 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2804 tlsreg = gen_rtx_REG (Pmode, 13);
2805 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2809 tlsreg = gen_rtx_REG (Pmode, 2);
2810 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2814 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2818 tmp = gen_reg_rtx (Pmode);
2821 tlsreg = gen_rtx_REG (Pmode, 13);
2822 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2826 tlsreg = gen_rtx_REG (Pmode, 2);
2827 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2831 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2833 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2838 rtx r3, got, tga, tmp1, tmp2, eqv;
2841 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2845 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2848 rtx gsym = rs6000_got_sym ();
2849 got = gen_reg_rtx (Pmode);
2851 rs6000_emit_move (got, gsym, Pmode);
2854 rtx tempLR, tmp3, mem;
2857 tempLR = gen_reg_rtx (Pmode);
2858 tmp1 = gen_reg_rtx (Pmode);
2859 tmp2 = gen_reg_rtx (Pmode);
2860 tmp3 = gen_reg_rtx (Pmode);
2861 mem = gen_const_mem (Pmode, tmp1);
2863 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
2864 emit_move_insn (tmp1, tempLR);
2865 emit_move_insn (tmp2, mem);
2866 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2867 last = emit_move_insn (got, tmp3);
2868 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2870 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2872 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2878 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2880 r3 = gen_rtx_REG (Pmode, 3);
2882 insn = gen_tls_gd_64 (r3, got, addr);
2884 insn = gen_tls_gd_32 (r3, got, addr);
2887 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2888 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2889 insn = emit_call_insn (insn);
2890 CONST_OR_PURE_CALL_P (insn) = 1;
2891 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2892 insn = get_insns ();
2894 emit_libcall_block (insn, dest, r3, addr);
2896 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2898 r3 = gen_rtx_REG (Pmode, 3);
2900 insn = gen_tls_ld_64 (r3, got);
2902 insn = gen_tls_ld_32 (r3, got);
2905 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2906 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2907 insn = emit_call_insn (insn);
2908 CONST_OR_PURE_CALL_P (insn) = 1;
2909 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2910 insn = get_insns ();
2912 tmp1 = gen_reg_rtx (Pmode);
2913 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2915 emit_libcall_block (insn, tmp1, r3, eqv);
2916 if (rs6000_tls_size == 16)
2919 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2921 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2923 else if (rs6000_tls_size == 32)
2925 tmp2 = gen_reg_rtx (Pmode);
2927 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2929 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2932 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2934 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2938 tmp2 = gen_reg_rtx (Pmode);
2940 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2942 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2944 insn = gen_rtx_SET (Pmode, dest,
2945 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2951 /* IE, or 64 bit offset LE. */
2952 tmp2 = gen_reg_rtx (Pmode);
2954 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2956 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2959 insn = gen_tls_tls_64 (dest, tmp2, addr);
2961 insn = gen_tls_tls_32 (dest, tmp2, addr);
2969 /* Return 1 if X contains a thread-local symbol. */
2972 rs6000_tls_referenced_p (rtx x)
2974 if (! TARGET_HAVE_TLS)
2977 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2980 /* Return 1 if *X is a thread-local symbol. This is the same as
2981 rs6000_tls_symbol_ref except for the type of the unused argument. */
2984 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2986 return RS6000_SYMBOL_REF_TLS_P (*x);
2989 /* The convention appears to be to define this wherever it is used.
2990 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2991 is now used here. */
2992 #ifndef REG_MODE_OK_FOR_BASE_P
2993 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2996 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2997 replace the input X, or the original X if no replacement is called for.
2998 The output parameter *WIN is 1 if the calling macro should goto WIN,
3001 For RS/6000, we wish to handle large displacements off a base
3002 register by splitting the addend across an addiu/addis and the mem insn.
3003 This cuts number of extra insns needed from 3 to 1.
3005 On Darwin, we use this to generate code for floating point constants.
3006 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3007 The Darwin code is inside #if TARGET_MACHO because only then is
3008 machopic_function_base_name() defined. */
3010 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3011 int opnum, int type,
3012 int ind_levels ATTRIBUTE_UNUSED, int *win)
3014 /* We must recognize output that we have already generated ourselves. */
3015 if (GET_CODE (x) == PLUS
3016 && GET_CODE (XEXP (x, 0)) == PLUS
3017 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3018 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3019 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3021 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3022 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3023 opnum, (enum reload_type)type);
3029 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3030 && GET_CODE (x) == LO_SUM
3031 && GET_CODE (XEXP (x, 0)) == PLUS
3032 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3033 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3034 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3035 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3036 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3037 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3038 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3040 /* Result of previous invocation of this function on Darwin
3041 floating point constant. */
3042 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3043 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3044 opnum, (enum reload_type)type);
3050 /* Force ld/std non-word aligned offset into base register by wrapping
3052 if (GET_CODE (x) == PLUS
3053 && GET_CODE (XEXP (x, 0)) == REG
3054 && REGNO (XEXP (x, 0)) < 32
3055 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3056 && GET_CODE (XEXP (x, 1)) == CONST_INT
3057 && (INTVAL (XEXP (x, 1)) & 3) != 0
3058 && !ALTIVEC_VECTOR_MODE (mode)
3059 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3060 && TARGET_POWERPC64)
3062 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3063 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3064 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3065 opnum, (enum reload_type) type);
3070 if (GET_CODE (x) == PLUS
3071 && GET_CODE (XEXP (x, 0)) == REG
3072 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3073 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3074 && GET_CODE (XEXP (x, 1)) == CONST_INT
3075 && !SPE_VECTOR_MODE (mode)
3076 && !(TARGET_E500_DOUBLE && (mode == DFmode
3078 && !ALTIVEC_VECTOR_MODE (mode))
3080 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3081 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3083 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3085 /* Check for 32-bit overflow. */
3086 if (high + low != val)
3092 /* Reload the high part into a base reg; leave the low part
3093 in the mem directly. */
3095 x = gen_rtx_PLUS (GET_MODE (x),
3096 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3100 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3101 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3102 opnum, (enum reload_type)type);
3108 if (GET_CODE (x) == SYMBOL_REF
3109 && DEFAULT_ABI == ABI_DARWIN
3110 && !ALTIVEC_VECTOR_MODE (mode)
3111 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3112 /* Don't do this for TFmode, since the result isn't offsettable.
3113 The same goes for DImode without 64-bit gprs. */
3115 && (mode != DImode || TARGET_POWERPC64))
3119 rtx offset = gen_rtx_CONST (Pmode,
3120 gen_rtx_MINUS (Pmode, x,
3121 machopic_function_base_sym ()));
3122 x = gen_rtx_LO_SUM (GET_MODE (x),
3123 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3124 gen_rtx_HIGH (Pmode, offset)), offset);
3127 x = gen_rtx_LO_SUM (GET_MODE (x),
3128 gen_rtx_HIGH (Pmode, x), x);
3130 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3131 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3132 opnum, (enum reload_type)type);
3139 && constant_pool_expr_p (x)
3140 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3142 (x) = create_TOC_reference (x);
3150 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3151 that is a valid memory address for an instruction.
3152 The MODE argument is the machine mode for the MEM expression
3153 that wants to use this address.
3155 On the RS/6000, there are four valid address: a SYMBOL_REF that
3156 refers to a constant pool entry of an address (or the sum of it
3157 plus a constant), a short (16-bit signed) constant plus a register,
3158 the sum of two registers, or a register indirect, possibly with an
3159 auto-increment. For DFmode and DImode with a constant plus register,
3160 we must ensure that both words are addressable or PowerPC64 with offset
3163 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3164 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3165 adjacent memory cells are accessed by adding word-sized offsets
3166 during assembly output. */
3168 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3170 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3172 && ALTIVEC_VECTOR_MODE (mode)
3173 && GET_CODE (x) == AND
3174 && GET_CODE (XEXP (x, 1)) == CONST_INT
3175 && INTVAL (XEXP (x, 1)) == -16)
3178 if (RS6000_SYMBOL_REF_TLS_P (x))
3180 if (legitimate_indirect_address_p (x, reg_ok_strict))
3182 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3183 && !ALTIVEC_VECTOR_MODE (mode)
3184 && !SPE_VECTOR_MODE (mode)
3185 /* Restrict addressing for DI because of our SUBREG hackery. */
3186 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3188 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3190 if (legitimate_small_data_p (mode, x))
3192 if (legitimate_constant_pool_address_p (x))
3194 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3196 && GET_CODE (x) == PLUS
3197 && GET_CODE (XEXP (x, 0)) == REG
3198 && (XEXP (x, 0) == virtual_stack_vars_rtx
3199 || XEXP (x, 0) == arg_pointer_rtx)
3200 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3202 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3206 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3208 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3209 && (TARGET_POWERPC64 || mode != DImode)
3210 && legitimate_indexed_address_p (x, reg_ok_strict))
3212 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3217 /* Go to LABEL if ADDR (a legitimate address expression)
3218 has an effect that depends on the machine mode it is used for.
3220 On the RS/6000 this is true of all integral offsets (since AltiVec
3221 modes don't allow them) or is a pre-increment or decrement.
3223 ??? Except that due to conceptual problems in offsettable_address_p
3224 we can't really report the problems of integral offsets. So leave
3225 this assuming that the adjustable offset must be valid for the
3226 sub-words of a TFmode operand, which is what we had before. */
3229 rs6000_mode_dependent_address (rtx addr)
3231 switch (GET_CODE (addr))
3234 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3236 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3237 return val + 12 + 0x8000 >= 0x10000;
3246 return TARGET_UPDATE;
3255 /* Return number of consecutive hard regs needed starting at reg REGNO
3256 to hold something of mode MODE.
3257 This is ordinarily the length in words of a value of mode MODE
3258 but can be less for certain modes in special long registers.
3260 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3261 scalar instructions. The upper 32 bits are only available to the
3264 POWER and PowerPC GPRs hold 32 bits worth;
3265 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3268 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3270 if (FP_REGNO_P (regno))
3271 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3273 if (TARGET_E500_DOUBLE && mode == DFmode)
3276 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3277 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3279 if (ALTIVEC_REGNO_P (regno))
3281 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3283 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3286 /* Change register usage conditional on target flags. */
3288 rs6000_conditional_register_usage (void)
3292 /* Set MQ register fixed (already call_used) if not POWER
3293 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3298 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3300 fixed_regs[13] = call_used_regs[13]
3301 = call_really_used_regs[13] = 1;
3303 /* Conditionally disable FPRs. */
3304 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3305 for (i = 32; i < 64; i++)
3306 fixed_regs[i] = call_used_regs[i]
3307 = call_really_used_regs[i] = 1;
3309 /* The TOC register is not killed across calls in a way that is
3310 visible to the compiler. */
3311 if (DEFAULT_ABI == ABI_AIX)
3312 call_really_used_regs[2] = 0;
3314 if (DEFAULT_ABI == ABI_V4
3315 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3317 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3319 if (DEFAULT_ABI == ABI_V4
3320 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3322 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3323 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3324 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3326 if (DEFAULT_ABI == ABI_DARWIN
3327 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3328 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3329 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3330 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3332 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3333 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3334 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3337 global_regs[VSCR_REGNO] = 1;
3341 global_regs[SPEFSCR_REGNO] = 1;
3342 fixed_regs[FIXED_SCRATCH]
3343 = call_used_regs[FIXED_SCRATCH]
3344 = call_really_used_regs[FIXED_SCRATCH] = 1;
3347 if (! TARGET_ALTIVEC)
3349 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3350 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3351 call_really_used_regs[VRSAVE_REGNO] = 1;
3354 if (TARGET_ALTIVEC_ABI)
3355 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3356 call_used_regs[i] = call_really_used_regs[i] = 1;
3359 /* Try to output insns to set TARGET equal to the constant C if it can
3360 be done in less than N insns. Do all computations in MODE.
3361 Returns the place where the output has been placed if it can be
3362 done and the insns have been emitted. If it would take more than N
3363 insns, zero is returned and no insns and emitted. */
3366 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3367 rtx source, int n ATTRIBUTE_UNUSED)
3369 rtx result, insn, set;
3370 HOST_WIDE_INT c0, c1;
3377 dest = gen_reg_rtx (mode);
3378 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3382 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3384 emit_insn (gen_rtx_SET (VOIDmode, result,
3385 GEN_INT (INTVAL (source)
3386 & (~ (HOST_WIDE_INT) 0xffff))));
3387 emit_insn (gen_rtx_SET (VOIDmode, dest,
3388 gen_rtx_IOR (SImode, result,
3389 GEN_INT (INTVAL (source) & 0xffff))));
3394 switch (GET_CODE (source))
3397 c0 = INTVAL (source);
3402 #if HOST_BITS_PER_WIDE_INT >= 64
3403 c0 = CONST_DOUBLE_LOW (source);
3406 c0 = CONST_DOUBLE_LOW (source);
3407 c1 = CONST_DOUBLE_HIGH (source);
3415 result = rs6000_emit_set_long_const (dest, c0, c1);
3422 insn = get_last_insn ();
3423 set = single_set (insn);
3424 if (! CONSTANT_P (SET_SRC (set)))
3425 set_unique_reg_note (insn, REG_EQUAL, source);
3430 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3431 fall back to a straight forward decomposition. We do this to avoid
3432 exponential run times encountered when looking for longer sequences
3433 with rs6000_emit_set_const. */
3435 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3437 if (!TARGET_POWERPC64)
3439 rtx operand1, operand2;
3441 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3443 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3445 emit_move_insn (operand1, GEN_INT (c1));
3446 emit_move_insn (operand2, GEN_INT (c2));
3450 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3453 ud2 = (c1 & 0xffff0000) >> 16;
3454 #if HOST_BITS_PER_WIDE_INT >= 64
3458 ud4 = (c2 & 0xffff0000) >> 16;
3460 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3461 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3464 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3466 emit_move_insn (dest, GEN_INT (ud1));
3469 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3470 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3473 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3476 emit_move_insn (dest, GEN_INT (ud2 << 16));
3478 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3480 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3481 || (ud4 == 0 && ! (ud3 & 0x8000)))
3484 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3487 emit_move_insn (dest, GEN_INT (ud3 << 16));
3490 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3491 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3493 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3498 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3501 emit_move_insn (dest, GEN_INT (ud4 << 16));
3504 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3506 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3508 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3509 GEN_INT (ud2 << 16)));
3511 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3517 /* Helper for the following. Get rid of [r+r] memory refs
3518 in cases where it won't work (TImode, TFmode). */
3521 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3523 if (GET_CODE (operands[0]) == MEM
3524 && GET_CODE (XEXP (operands[0], 0)) != REG
3525 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3526 && ! reload_in_progress)
3528 = replace_equiv_address (operands[0],
3529 copy_addr_to_reg (XEXP (operands[0], 0)));
3531 if (GET_CODE (operands[1]) == MEM
3532 && GET_CODE (XEXP (operands[1], 0)) != REG
3533 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3534 && ! reload_in_progress)
3536 = replace_equiv_address (operands[1],
3537 copy_addr_to_reg (XEXP (operands[1], 0)));
3540 /* Emit a move from SOURCE to DEST in mode MODE. */
3542 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3546 operands[1] = source;
3548 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3549 if (GET_CODE (operands[1]) == CONST_DOUBLE
3550 && ! FLOAT_MODE_P (mode)
3551 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3553 /* FIXME. This should never happen. */
3554 /* Since it seems that it does, do the safe thing and convert
3556 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3558 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3559 || FLOAT_MODE_P (mode)
3560 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3561 || CONST_DOUBLE_LOW (operands[1]) < 0)
3562 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3563 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3565 /* Check if GCC is setting up a block move that will end up using FP
3566 registers as temporaries. We must make sure this is acceptable. */
3567 if (GET_CODE (operands[0]) == MEM
3568 && GET_CODE (operands[1]) == MEM
3570 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3571 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3572 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3573 ? 32 : MEM_ALIGN (operands[0])))
3574 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3576 : MEM_ALIGN (operands[1]))))
3577 && ! MEM_VOLATILE_P (operands [0])
3578 && ! MEM_VOLATILE_P (operands [1]))
3580 emit_move_insn (adjust_address (operands[0], SImode, 0),
3581 adjust_address (operands[1], SImode, 0));
3582 emit_move_insn (adjust_address (operands[0], SImode, 4),
3583 adjust_address (operands[1], SImode, 4));
3587 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3588 && !gpc_reg_operand (operands[1], mode))
3589 operands[1] = force_reg (mode, operands[1]);
3591 if (mode == SFmode && ! TARGET_POWERPC
3592 && TARGET_HARD_FLOAT && TARGET_FPRS
3593 && GET_CODE (operands[0]) == MEM)
3597 if (reload_in_progress || reload_completed)
3598 regnum = true_regnum (operands[1]);
3599 else if (GET_CODE (operands[1]) == REG)
3600 regnum = REGNO (operands[1]);
3604 /* If operands[1] is a register, on POWER it may have
3605 double-precision data in it, so truncate it to single
3607 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3610 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3611 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3612 operands[1] = newreg;
3616 /* Recognize the case where operand[1] is a reference to thread-local
3617 data and load its address to a register. */
3618 if (rs6000_tls_referenced_p (operands[1]))
3620 enum tls_model model;
3621 rtx tmp = operands[1];
3624 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3626 addend = XEXP (XEXP (tmp, 0), 1);
3627 tmp = XEXP (XEXP (tmp, 0), 0);
3630 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3631 model = SYMBOL_REF_TLS_MODEL (tmp);
3632 gcc_assert (model != 0);
3634 tmp = rs6000_legitimize_tls_address (tmp, model);
3637 tmp = gen_rtx_PLUS (mode, tmp, addend);
3638 tmp = force_operand (tmp, operands[0]);
3643 /* Handle the case where reload calls us with an invalid address. */
3644 if (reload_in_progress && mode == Pmode
3645 && (! general_operand (operands[1], mode)
3646 || ! nonimmediate_operand (operands[0], mode)))
3649 /* 128-bit constant floating-point values on Darwin should really be
3650 loaded as two parts. */
3651 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3652 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3653 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3655 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3656 know how to get a DFmode SUBREG of a TFmode. */
3657 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3658 simplify_gen_subreg (DImode, operands[1], mode, 0),
3660 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3661 GET_MODE_SIZE (DImode)),
3662 simplify_gen_subreg (DImode, operands[1], mode,
3663 GET_MODE_SIZE (DImode)),
3668 /* FIXME: In the long term, this switch statement should go away
3669 and be replaced by a sequence of tests based on things like
3675 if (CONSTANT_P (operands[1])
3676 && GET_CODE (operands[1]) != CONST_INT)
3677 operands[1] = force_const_mem (mode, operands[1]);
3681 rs6000_eliminate_indexed_memrefs (operands);
3686 if (CONSTANT_P (operands[1])
3687 && ! easy_fp_constant (operands[1], mode))
3688 operands[1] = force_const_mem (mode, operands[1]);
3699 if (CONSTANT_P (operands[1])
3700 && !easy_vector_constant (operands[1], mode))
3701 operands[1] = force_const_mem (mode, operands[1]);
3706 /* Use default pattern for address of ELF small data */
3709 && DEFAULT_ABI == ABI_V4
3710 && (GET_CODE (operands[1]) == SYMBOL_REF
3711 || GET_CODE (operands[1]) == CONST)
3712 && small_data_operand (operands[1], mode))
3714 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3718 if (DEFAULT_ABI == ABI_V4
3719 && mode == Pmode && mode == SImode
3720 && flag_pic == 1 && got_operand (operands[1], mode))
3722 emit_insn (gen_movsi_got (operands[0], operands[1]));
3726 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3730 && CONSTANT_P (operands[1])
3731 && GET_CODE (operands[1]) != HIGH
3732 && GET_CODE (operands[1]) != CONST_INT)
3734 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3736 /* If this is a function address on -mcall-aixdesc,
3737 convert it to the address of the descriptor. */
3738 if (DEFAULT_ABI == ABI_AIX
3739 && GET_CODE (operands[1]) == SYMBOL_REF
3740 && XSTR (operands[1], 0)[0] == '.')
3742 const char *name = XSTR (operands[1], 0);
3744 while (*name == '.')
3746 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3747 CONSTANT_POOL_ADDRESS_P (new_ref)
3748 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3749 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3750 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3751 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3752 operands[1] = new_ref;
3755 if (DEFAULT_ABI == ABI_DARWIN)
3758 if (MACHO_DYNAMIC_NO_PIC_P)
3760 /* Take care of any required data indirection. */
3761 operands[1] = rs6000_machopic_legitimize_pic_address (
3762 operands[1], mode, operands[0]);
3763 if (operands[0] != operands[1])
3764 emit_insn (gen_rtx_SET (VOIDmode,
3765 operands[0], operands[1]));
3769 emit_insn (gen_macho_high (target, operands[1]));
3770 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3774 emit_insn (gen_elf_high (target, operands[1]));
3775 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3779 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3780 and we have put it in the TOC, we just need to make a TOC-relative
3783 && GET_CODE (operands[1]) == SYMBOL_REF
3784 && constant_pool_expr_p (operands[1])
3785 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3786 get_pool_mode (operands[1])))
3788 operands[1] = create_TOC_reference (operands[1]);
3790 else if (mode == Pmode
3791 && CONSTANT_P (operands[1])
3792 && ((GET_CODE (operands[1]) != CONST_INT
3793 && ! easy_fp_constant (operands[1], mode))
3794 || (GET_CODE (operands[1]) == CONST_INT
3795 && num_insns_constant (operands[1], mode) > 2)
3796 || (GET_CODE (operands[0]) == REG
3797 && FP_REGNO_P (REGNO (operands[0]))))
3798 && GET_CODE (operands[1]) != HIGH
3799 && ! legitimate_constant_pool_address_p (operands[1])
3800 && ! toc_relative_expr_p (operands[1]))
3802 /* Emit a USE operation so that the constant isn't deleted if
3803 expensive optimizations are turned on because nobody
3804 references it. This should only be done for operands that
3805 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3806 This should not be done for operands that contain LABEL_REFs.
3807 For now, we just handle the obvious case. */
3808 if (GET_CODE (operands[1]) != LABEL_REF)
3809 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3812 /* Darwin uses a special PIC legitimizer. */
3813 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3816 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3818 if (operands[0] != operands[1])
3819 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3824 /* If we are to limit the number of things we put in the TOC and
3825 this is a symbol plus a constant we can add in one insn,
3826 just put the symbol in the TOC and add the constant. Don't do
3827 this if reload is in progress. */
3828 if (GET_CODE (operands[1]) == CONST
3829 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3830 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3831 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3832 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3833 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3834 && ! side_effects_p (operands[0]))
3837 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3838 rtx other = XEXP (XEXP (operands[1], 0), 1);
3840 sym = force_reg (mode, sym);
3842 emit_insn (gen_addsi3 (operands[0], sym, other));
3844 emit_insn (gen_adddi3 (operands[0], sym, other));
3848 operands[1] = force_const_mem (mode, operands[1]);
3851 && constant_pool_expr_p (XEXP (operands[1], 0))
3852 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3853 get_pool_constant (XEXP (operands[1], 0)),
3854 get_pool_mode (XEXP (operands[1], 0))))
3857 = gen_const_mem (mode,
3858 create_TOC_reference (XEXP (operands[1], 0)));
3859 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3865 rs6000_eliminate_indexed_memrefs (operands);
3869 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3871 gen_rtx_SET (VOIDmode,
3872 operands[0], operands[1]),
3873 gen_rtx_CLOBBER (VOIDmode,
3874 gen_rtx_SCRATCH (SImode)))));
3883 /* Above, we may have called force_const_mem which may have returned
3884 an invalid address. If we can, fix this up; otherwise, reload will
3885 have to deal with it. */
3886 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3887 operands[1] = validize_mem (operands[1]);
3890 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3893 /* Nonzero if we can use a floating-point register to pass this arg. */
3894 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3895 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3896 && (CUM)->fregno <= FP_ARG_MAX_REG \
3897 && TARGET_HARD_FLOAT && TARGET_FPRS)
3899 /* Nonzero if we can use an AltiVec register to pass this arg. */
3900 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3901 (ALTIVEC_VECTOR_MODE (MODE) \
3902 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3903 && TARGET_ALTIVEC_ABI \
3906 /* Return a nonzero value to say to return the function value in
3907 memory, just as large structures are always returned. TYPE will be
3908 the data type of the value, and FNTYPE will be the type of the
3909 function doing the returning, or @code{NULL} for libcalls.
3911 The AIX ABI for the RS/6000 specifies that all structures are
3912 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3913 specifies that structures <= 8 bytes are returned in r3/r4, but a
3914 draft put them in memory, and GCC used to implement the draft
3915 instead of the final standard. Therefore, aix_struct_return
3916 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3917 compatibility can change DRAFT_V4_STRUCT_RET to override the
3918 default, and -m switches get the final word. See
3919 rs6000_override_options for more details.
3921 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3922 long double support is enabled. These values are returned in memory.
3924 int_size_in_bytes returns -1 for variable size objects, which go in
3925 memory always. The cast to unsigned makes -1 > 8. */
3928 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3930 /* In the darwin64 abi, try to use registers for larger structs
3932 if (rs6000_darwin64_abi
3933 && TREE_CODE (type) == RECORD_TYPE
3934 && int_size_in_bytes (type) > 0)
3936 CUMULATIVE_ARGS valcum;
3940 valcum.fregno = FP_ARG_MIN_REG;
3941 valcum.vregno = ALTIVEC_ARG_MIN_REG;
3942 /* Do a trial code generation as if this were going to be passed
3943 as an argument; if any part goes in memory, we return NULL. */
3944 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
3947 /* Otherwise fall through to more conventional ABI rules. */
3950 if (AGGREGATE_TYPE_P (type)
3951 && (aix_struct_return
3952 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3955 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
3956 modes only exist for GCC vector types if -maltivec. */
3957 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
3958 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
3961 /* Return synthetic vectors in memory. */
3962 if (TREE_CODE (type) == VECTOR_TYPE
3963 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
3965 static bool warned_for_return_big_vectors = false;
3966 if (!warned_for_return_big_vectors)
3968 warning (0, "GCC vector returned by reference: "
3969 "non-standard ABI extension with no compatibility guarantee");
3970 warned_for_return_big_vectors = true;
3975 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3981 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3982 for a call to a function whose data type is FNTYPE.
3983 For a library call, FNTYPE is 0.
3985 For incoming args we set the number of arguments in the prototype large
3986 so we never return a PARALLEL. */
3989 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3990 rtx libname ATTRIBUTE_UNUSED, int incoming,
3991 int libcall, int n_named_args)
3993 static CUMULATIVE_ARGS zero_cumulative;
3995 *cum = zero_cumulative;
3997 cum->fregno = FP_ARG_MIN_REG;
3998 cum->vregno = ALTIVEC_ARG_MIN_REG;
3999 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4000 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4001 ? CALL_LIBCALL : CALL_NORMAL);
4002 cum->sysv_gregno = GP_ARG_MIN_REG;
4003 cum->stdarg = fntype
4004 && (TYPE_ARG_TYPES (fntype) != 0
4005 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4006 != void_type_node));
4008 cum->nargs_prototype = 0;
4009 if (incoming || cum->prototype)
4010 cum->nargs_prototype = n_named_args;
4012 /* Check for a longcall attribute. */
4013 if ((!fntype && rs6000_default_long_calls)
4015 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4016 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4017 cum->call_cookie |= CALL_LONG;
4019 if (TARGET_DEBUG_ARG)
4021 fprintf (stderr, "\ninit_cumulative_args:");
4024 tree ret_type = TREE_TYPE (fntype);
4025 fprintf (stderr, " ret code = %s,",
4026 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4029 if (cum->call_cookie & CALL_LONG)
4030 fprintf (stderr, " longcall,");
4032 fprintf (stderr, " proto = %d, nargs = %d\n",
4033 cum->prototype, cum->nargs_prototype);
4038 && TARGET_ALTIVEC_ABI
4039 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4041 error ("Cannot return value in vector register because"
4042 " altivec instructions are disabled, use -maltivec"
4043 " to enable them.");
4047 /* Return true if TYPE must be passed on the stack and not in registers. */
4050 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4052 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4053 return must_pass_in_stack_var_size (mode, type);
4055 return must_pass_in_stack_var_size_or_pad (mode, type);
4058 /* If defined, a C expression which determines whether, and in which
4059 direction, to pad out an argument with extra space. The value
4060 should be of type `enum direction': either `upward' to pad above
4061 the argument, `downward' to pad below, or `none' to inhibit
4064 For the AIX ABI structs are always stored left shifted in their
4068 function_arg_padding (enum machine_mode mode, tree type)
4070 #ifndef AGGREGATE_PADDING_FIXED
4071 #define AGGREGATE_PADDING_FIXED 0
4073 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4074 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4077 if (!AGGREGATE_PADDING_FIXED)
4079 /* GCC used to pass structures of the same size as integer types as
4080 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4081 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4082 passed padded downward, except that -mstrict-align further
4083 muddied the water in that multi-component structures of 2 and 4
4084 bytes in size were passed padded upward.
4086 The following arranges for best compatibility with previous
4087 versions of gcc, but removes the -mstrict-align dependency. */
4088 if (BYTES_BIG_ENDIAN)
4090 HOST_WIDE_INT size = 0;
4092 if (mode == BLKmode)
4094 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4095 size = int_size_in_bytes (type);
4098 size = GET_MODE_SIZE (mode);
4100 if (size == 1 || size == 2 || size == 4)
4106 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4108 if (type != 0 && AGGREGATE_TYPE_P (type))
4112 /* Fall back to the default. */
4113 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4116 /* If defined, a C expression that gives the alignment boundary, in bits,
4117 of an argument with the specified mode and type. If it is not defined,
4118 PARM_BOUNDARY is used for all arguments.
4120 V.4 wants long longs to be double word aligned.
4121 Doubleword align SPE vectors.
4122 Quadword align Altivec vectors.
4123 Quadword align large synthetic vector types. */
4126 function_arg_boundary (enum machine_mode mode, tree type)
4128 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4130 else if (SPE_VECTOR_MODE (mode)
4131 || (type && TREE_CODE (type) == VECTOR_TYPE
4132 && int_size_in_bytes (type) >= 8
4133 && int_size_in_bytes (type) < 16))
4135 else if (ALTIVEC_VECTOR_MODE (mode)
4136 || (type && TREE_CODE (type) == VECTOR_TYPE
4137 && int_size_in_bytes (type) >= 16))
4139 else if (rs6000_darwin64_abi && mode == BLKmode
4140 && type && TYPE_ALIGN (type) > 64)
4143 return PARM_BOUNDARY;
4146 /* For a function parm of MODE and TYPE, return the starting word in
4147 the parameter area. NWORDS of the parameter area are already used. */
4150 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4153 unsigned int parm_offset;
4155 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4156 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4157 return nwords + (-(parm_offset + nwords) & align);
4160 /* Compute the size (in words) of a function argument. */
4162 static unsigned long
4163 rs6000_arg_size (enum machine_mode mode, tree type)
4167 if (mode != BLKmode)
4168 size = GET_MODE_SIZE (mode);
4170 size = int_size_in_bytes (type);
4173 return (size + 3) >> 2;
4175 return (size + 7) >> 3;
4178 /* Use this to flush pending int fields. */
4181 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4182 HOST_WIDE_INT bitpos)
4184 unsigned int startbit, endbit;
4185 int intregs, intoffset;
4186 enum machine_mode mode;
4188 if (cum->intoffset == -1)
4191 intoffset = cum->intoffset;
4192 cum->intoffset = -1;
4194 if (intoffset % BITS_PER_WORD != 0)
4196 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4198 if (mode == BLKmode)
4200 /* We couldn't find an appropriate mode, which happens,
4201 e.g., in packed structs when there are 3 bytes to load.
4202 Back intoffset back to the beginning of the word in this
4204 intoffset = intoffset & -BITS_PER_WORD;
4208 startbit = intoffset & -BITS_PER_WORD;
4209 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4210 intregs = (endbit - startbit) / BITS_PER_WORD;
4211 cum->words += intregs;
4214 /* The darwin64 ABI calls for us to recurse down through structs,
4215 looking for elements passed in registers. Unfortunately, we have
4216 to track int register count here also because of misalignments
4217 in powerpc alignment mode. */
4220 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4222 HOST_WIDE_INT startbitpos)
4226 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4227 if (TREE_CODE (f) == FIELD_DECL)
4229 HOST_WIDE_INT bitpos = startbitpos;
4230 tree ftype = TREE_TYPE (f);
4231 enum machine_mode mode = TYPE_MODE (ftype);
4233 if (DECL_SIZE (f) != 0
4234 && host_integerp (bit_position (f), 1))
4235 bitpos += int_bit_position (f);
4237 /* ??? FIXME: else assume zero offset. */
4239 if (TREE_CODE (ftype) == RECORD_TYPE)
4240 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4241 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4243 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4244 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4245 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4247 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4249 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4253 else if (cum->intoffset == -1)
4254 cum->intoffset = bitpos;
4258 /* Update the data in CUM to advance over an argument
4259 of mode MODE and data type TYPE.
4260 (TYPE is null for libcalls where that information may not be available.)
4262 Note that for args passed by reference, function_arg will be called
4263 with MODE and TYPE set to that of the pointer to the arg, not the arg
4267 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4268 tree type, int named, int depth)
4272 /* Only tick off an argument if we're not recursing. */
4274 cum->nargs_prototype--;
4276 if (TARGET_ALTIVEC_ABI
4277 && (ALTIVEC_VECTOR_MODE (mode)
4278 || (type && TREE_CODE (type) == VECTOR_TYPE
4279 && int_size_in_bytes (type) == 16)))
4283 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4286 if (!TARGET_ALTIVEC)
4287 error ("Cannot pass argument in vector register because"
4288 " altivec instructions are disabled, use -maltivec"
4289 " to enable them.");
4291 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4292 even if it is going to be passed in a vector register.
4293 Darwin does the same for variable-argument functions. */
4294 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4295 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4305 /* Vector parameters must be 16-byte aligned. This places
4306 them at 2 mod 4 in terms of words in 32-bit mode, since
4307 the parameter save area starts at offset 24 from the
4308 stack. In 64-bit mode, they just have to start on an
4309 even word, since the parameter save area is 16-byte
4310 aligned. Space for GPRs is reserved even if the argument
4311 will be passed in memory. */
4313 align = (2 - cum->words) & 3;
4315 align = cum->words & 1;
4316 cum->words += align + rs6000_arg_size (mode, type);
4318 if (TARGET_DEBUG_ARG)
4320 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4322 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4323 cum->nargs_prototype, cum->prototype,
4324 GET_MODE_NAME (mode));
4328 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4330 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4333 else if (rs6000_darwin64_abi
4335 && TREE_CODE (type) == RECORD_TYPE
4336 && (size = int_size_in_bytes (type)) > 0)
4338 /* Variable sized types have size == -1 and are
4339 treated as if consisting entirely of ints.
4340 Pad to 16 byte boundary if needed. */
4341 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4342 && (cum->words % 2) != 0)
4344 /* For varargs, we can just go up by the size of the struct. */
4346 cum->words += (size + 7) / 8;
4349 /* It is tempting to say int register count just goes up by
4350 sizeof(type)/8, but this is wrong in a case such as
4351 { int; double; int; } [powerpc alignment]. We have to
4352 grovel through the fields for these too. */
4354 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4355 rs6000_darwin64_record_arg_advance_flush (cum,
4356 size * BITS_PER_UNIT);
4359 else if (DEFAULT_ABI == ABI_V4)
4361 if (TARGET_HARD_FLOAT && TARGET_FPRS
4362 && (mode == SFmode || mode == DFmode))
4364 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4369 cum->words += cum->words & 1;
4370 cum->words += rs6000_arg_size (mode, type);
4375 int n_words = rs6000_arg_size (mode, type);
4376 int gregno = cum->sysv_gregno;
4378 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4379 (r7,r8) or (r9,r10). As does any other 2 word item such
4380 as complex int due to a historical mistake. */
4382 gregno += (1 - gregno) & 1;
4384 /* Multi-reg args are not split between registers and stack. */
4385 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4387 /* Long long and SPE vectors are aligned on the stack.
4388 So are other 2 word items such as complex int due to
4389 a historical mistake. */
4391 cum->words += cum->words & 1;
4392 cum->words += n_words;
4395 /* Note: continuing to accumulate gregno past when we've started
4396 spilling to the stack indicates the fact that we've started
4397 spilling to the stack to expand_builtin_saveregs. */
4398 cum->sysv_gregno = gregno + n_words;
4401 if (TARGET_DEBUG_ARG)
4403 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4404 cum->words, cum->fregno);
4405 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4406 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4407 fprintf (stderr, "mode = %4s, named = %d\n",
4408 GET_MODE_NAME (mode), named);
4413 int n_words = rs6000_arg_size (mode, type);
4414 int start_words = cum->words;
4415 int align_words = rs6000_parm_start (mode, type, start_words);
4417 cum->words = align_words + n_words;
4419 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4420 && TARGET_HARD_FLOAT && TARGET_FPRS)
4421 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4423 if (TARGET_DEBUG_ARG)
4425 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4426 cum->words, cum->fregno);
4427 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4428 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4429 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4430 named, align_words - start_words, depth);
4436 spe_build_register_parallel (enum machine_mode mode, int gregno)
4443 r1 = gen_rtx_REG (DImode, gregno);
4444 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4445 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4448 r1 = gen_rtx_REG (DImode, gregno);
4449 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4450 r3 = gen_rtx_REG (DImode, gregno + 2);
4451 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4452 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4459 /* Determine where to put a SIMD argument on the SPE. */
4461 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4464 int gregno = cum->sysv_gregno;
4466 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4467 are passed and returned in a pair of GPRs for ABI compatibility. */
4468 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4470 int n_words = rs6000_arg_size (mode, type);
4472 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4474 gregno += (1 - gregno) & 1;
4476 /* Multi-reg args are not split between registers and stack. */
4477 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4480 return spe_build_register_parallel (mode, gregno);
4484 int n_words = rs6000_arg_size (mode, type);
4486 /* SPE vectors are put in odd registers. */
4487 if (n_words == 2 && (gregno & 1) == 0)
4490 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4493 enum machine_mode m = SImode;
4495 r1 = gen_rtx_REG (m, gregno);
4496 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4497 r2 = gen_rtx_REG (m, gregno + 1);
4498 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4499 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4506 if (gregno <= GP_ARG_MAX_REG)
4507 return gen_rtx_REG (mode, gregno);
4513 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4514 structure between cum->intoffset and bitpos to integer registers. */
4517 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4518 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4520 enum machine_mode mode;
4522 unsigned int startbit, endbit;
4523 int this_regno, intregs, intoffset;
4526 if (cum->intoffset == -1)
4529 intoffset = cum->intoffset;
4530 cum->intoffset = -1;
4532 /* If this is the trailing part of a word, try to only load that
4533 much into the register. Otherwise load the whole register. Note
4534 that in the latter case we may pick up unwanted bits. It's not a
4535 problem at the moment but may wish to revisit. */
4537 if (intoffset % BITS_PER_WORD != 0)
4539 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4541 if (mode == BLKmode)
4543 /* We couldn't find an appropriate mode, which happens,
4544 e.g., in packed structs when there are 3 bytes to load.
4545 Back intoffset back to the beginning of the word in this
4547 intoffset = intoffset & -BITS_PER_WORD;
4554 startbit = intoffset & -BITS_PER_WORD;
4555 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4556 intregs = (endbit - startbit) / BITS_PER_WORD;
4557 this_regno = cum->words + intoffset / BITS_PER_WORD;
4559 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4562 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4566 intoffset /= BITS_PER_UNIT;
4569 regno = GP_ARG_MIN_REG + this_regno;
4570 reg = gen_rtx_REG (mode, regno);
4572 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4575 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4579 while (intregs > 0);
4582 /* Recursive workhorse for the following. */
4585 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
4586 HOST_WIDE_INT startbitpos, rtx rvec[],
4591 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4592 if (TREE_CODE (f) == FIELD_DECL)
4594 HOST_WIDE_INT bitpos = startbitpos;
4595 tree ftype = TREE_TYPE (f);
4596 enum machine_mode mode = TYPE_MODE (ftype);
4598 if (DECL_SIZE (f) != 0
4599 && host_integerp (bit_position (f), 1))
4600 bitpos += int_bit_position (f);
4602 /* ??? FIXME: else assume zero offset. */
4604 if (TREE_CODE (ftype) == RECORD_TYPE)
4605 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4606 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
4611 case SCmode: mode = SFmode; break;
4612 case DCmode: mode = DFmode; break;
4613 case TCmode: mode = TFmode; break;
4617 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4619 = gen_rtx_EXPR_LIST (VOIDmode,
4620 gen_rtx_REG (mode, cum->fregno++),
4621 GEN_INT (bitpos / BITS_PER_UNIT));
4625 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4627 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4629 = gen_rtx_EXPR_LIST (VOIDmode,
4630 gen_rtx_REG (mode, cum->vregno++),
4631 GEN_INT (bitpos / BITS_PER_UNIT));
4633 else if (cum->intoffset == -1)
4634 cum->intoffset = bitpos;
4638 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4639 the register(s) to be used for each field and subfield of a struct
4640 being passed by value, along with the offset of where the
4641 register's value may be found in the block. FP fields go in FP
4642 register, vector fields go in vector registers, and everything
4643 else goes in int registers, packed as in memory.
4645 This code is also used for function return values. RETVAL indicates
4646 whether this is the case.
4648 Much of this is taken from the Sparc V9 port, which has a similar
4649 calling convention. */
4652 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4653 int named, bool retval)
4655 rtx rvec[FIRST_PSEUDO_REGISTER];
4656 int k = 1, kbase = 1;
4657 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4658 /* This is a copy; modifications are not visible to our caller. */
4659 CUMULATIVE_ARGS copy_cum = *orig_cum;
4660 CUMULATIVE_ARGS *cum = ©_cum;
4662 /* Pad to 16 byte boundary if needed. */
4663 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4664 && (cum->words % 2) != 0)
4671 /* Put entries into rvec[] for individual FP and vector fields, and
4672 for the chunks of memory that go in int regs. Note we start at
4673 element 1; 0 is reserved for an indication of using memory, and
4674 may or may not be filled in below. */
4675 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4676 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4678 /* If any part of the struct went on the stack put all of it there.
4679 This hack is because the generic code for
4680 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4681 parts of the struct are not at the beginning. */
4685 return NULL_RTX; /* doesn't go in registers at all */
4687 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4689 if (k > 1 || cum->use_stack)
4690 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
4695 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4698 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4702 rtx rvec[GP_ARG_NUM_REG + 1];
4704 if (align_words >= GP_ARG_NUM_REG)
4707 n_units = rs6000_arg_size (mode, type);
4709 /* Optimize the simple case where the arg fits in one gpr, except in
4710 the case of BLKmode due to assign_parms assuming that registers are
4711 BITS_PER_WORD wide. */
4713 || (n_units == 1 && mode != BLKmode))
4714 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4717 if (align_words + n_units > GP_ARG_NUM_REG)
4718 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4719 using a magic NULL_RTX component.
4720 FIXME: This is not strictly correct. Only some of the arg
4721 belongs in memory, not all of it. However, there isn't any way
4722 to do this currently, apart from building rtx descriptions for
4723 the pieces of memory we want stored. Due to bugs in the generic
4724 code we can't use the normal function_arg_partial_nregs scheme
4725 with the PARALLEL arg description we emit here.
4726 In any case, the code to store the whole arg to memory is often
4727 more efficient than code to store pieces, and we know that space
4728 is available in the right place for the whole arg. */
4729 /* FIXME: This should be fixed since the conversion to
4730 TARGET_ARG_PARTIAL_BYTES. */
4731 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4736 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4737 rtx off = GEN_INT (i++ * 4);
4738 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4740 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4742 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4745 /* Determine where to put an argument to a function.
4746 Value is zero to push the argument on the stack,
4747 or a hard register in which to store the argument.
4749 MODE is the argument's machine mode.
4750 TYPE is the data type of the argument (as a tree).
4751 This is null for libcalls where that information may
4753 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4754 the preceding args and about the function being called. It is
4755 not modified in this routine.
4756 NAMED is nonzero if this argument is a named parameter
4757 (otherwise it is an extra parameter matching an ellipsis).
4759 On RS/6000 the first eight words of non-FP are normally in registers
4760 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4761 Under V.4, the first 8 FP args are in registers.
4763 If this is floating-point and no prototype is specified, we use
4764 both an FP and integer register (or possibly FP reg and stack). Library
4765 functions (when CALL_LIBCALL is set) always have the proper types for args,
4766 so we can pass the FP value just in one register. emit_library_function
4767 doesn't support PARALLEL anyway.
4769 Note that for args passed by reference, function_arg will be called
4770 with MODE and TYPE set to that of the pointer to the arg, not the arg
4774 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4775 tree type, int named)
4777 enum rs6000_abi abi = DEFAULT_ABI;
4779 /* Return a marker to indicate whether CR1 needs to set or clear the
4780 bit that V.4 uses to say fp args were passed in registers.
4781 Assume that we don't need the marker for software floating point,
4782 or compiler generated library calls. */
4783 if (mode == VOIDmode)
4786 && cum->nargs_prototype < 0
4787 && (cum->call_cookie & CALL_LIBCALL) == 0
4788 && (cum->prototype || TARGET_NO_PROTOTYPE))
4790 /* For the SPE, we need to crxor CR6 always. */
4792 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4793 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4794 return GEN_INT (cum->call_cookie
4795 | ((cum->fregno == FP_ARG_MIN_REG)
4796 ? CALL_V4_SET_FP_ARGS
4797 : CALL_V4_CLEAR_FP_ARGS));
4800 return GEN_INT (cum->call_cookie);
4803 if (rs6000_darwin64_abi && mode == BLKmode
4804 && TREE_CODE (type) == RECORD_TYPE)
4806 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
4807 if (rslt != NULL_RTX)
4809 /* Else fall through to usual handling. */
4812 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4813 if (TARGET_64BIT && ! cum->prototype)
4815 /* Vector parameters get passed in vector register
4816 and also in GPRs or memory, in absence of prototype. */
4819 align_words = (cum->words + 1) & ~1;
4821 if (align_words >= GP_ARG_NUM_REG)
4827 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4829 return gen_rtx_PARALLEL (mode,
4831 gen_rtx_EXPR_LIST (VOIDmode,
4833 gen_rtx_EXPR_LIST (VOIDmode,
4834 gen_rtx_REG (mode, cum->vregno),
4838 return gen_rtx_REG (mode, cum->vregno);
4839 else if (TARGET_ALTIVEC_ABI
4840 && (ALTIVEC_VECTOR_MODE (mode)
4841 || (type && TREE_CODE (type) == VECTOR_TYPE
4842 && int_size_in_bytes (type) == 16)))
4844 if (named || abi == ABI_V4)
4848 /* Vector parameters to varargs functions under AIX or Darwin
4849 get passed in memory and possibly also in GPRs. */
4850 int align, align_words, n_words;
4851 enum machine_mode part_mode;
4853 /* Vector parameters must be 16-byte aligned. This places them at
4854 2 mod 4 in terms of words in 32-bit mode, since the parameter
4855 save area starts at offset 24 from the stack. In 64-bit mode,
4856 they just have to start on an even word, since the parameter
4857 save area is 16-byte aligned. */
4859 align = (2 - cum->words) & 3;
4861 align = cum->words & 1;
4862 align_words = cum->words + align;
4864 /* Out of registers? Memory, then. */
4865 if (align_words >= GP_ARG_NUM_REG)
4868 if (TARGET_32BIT && TARGET_POWERPC64)
4869 return rs6000_mixed_function_arg (mode, type, align_words);
4871 /* The vector value goes in GPRs. Only the part of the
4872 value in GPRs is reported here. */
4874 n_words = rs6000_arg_size (mode, type);
4875 if (align_words + n_words > GP_ARG_NUM_REG)
4876 /* Fortunately, there are only two possibilities, the value
4877 is either wholly in GPRs or half in GPRs and half not. */
4880 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4883 else if (TARGET_SPE_ABI && TARGET_SPE
4884 && (SPE_VECTOR_MODE (mode)
4885 || (TARGET_E500_DOUBLE && (mode == DFmode
4886 || mode == DCmode))))
4887 return rs6000_spe_function_arg (cum, mode, type);
4889 else if (abi == ABI_V4)
4891 if (TARGET_HARD_FLOAT && TARGET_FPRS
4892 && (mode == SFmode || mode == DFmode))
4894 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4895 return gen_rtx_REG (mode, cum->fregno);
4901 int n_words = rs6000_arg_size (mode, type);
4902 int gregno = cum->sysv_gregno;
4904 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4905 (r7,r8) or (r9,r10). As does any other 2 word item such
4906 as complex int due to a historical mistake. */
4908 gregno += (1 - gregno) & 1;
4910 /* Multi-reg args are not split between registers and stack. */
4911 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4914 if (TARGET_32BIT && TARGET_POWERPC64)
4915 return rs6000_mixed_function_arg (mode, type,
4916 gregno - GP_ARG_MIN_REG);
4917 return gen_rtx_REG (mode, gregno);
4922 int align_words = rs6000_parm_start (mode, type, cum->words);
4924 if (USE_FP_FOR_ARG_P (cum, mode, type))
4926 rtx rvec[GP_ARG_NUM_REG + 1];
4930 enum machine_mode fmode = mode;
4931 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4933 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4935 /* Currently, we only ever need one reg here because complex
4936 doubles are split. */
4937 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
4939 /* Long double split over regs and memory. */
4943 /* Do we also need to pass this arg in the parameter save
4946 && (cum->nargs_prototype <= 0
4947 || (DEFAULT_ABI == ABI_AIX
4949 && align_words >= GP_ARG_NUM_REG)));
4951 if (!needs_psave && mode == fmode)
4952 return gen_rtx_REG (fmode, cum->fregno);
4957 /* Describe the part that goes in gprs or the stack.
4958 This piece must come first, before the fprs. */
4959 if (align_words < GP_ARG_NUM_REG)
4961 unsigned long n_words = rs6000_arg_size (mode, type);
4963 if (align_words + n_words > GP_ARG_NUM_REG
4964 || (TARGET_32BIT && TARGET_POWERPC64))
4966 /* If this is partially on the stack, then we only
4967 include the portion actually in registers here. */
4968 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
4971 if (align_words + n_words > GP_ARG_NUM_REG
4972 && (TARGET_32BIT && TARGET_POWERPC64))
4973 /* Not all of the arg fits in gprs. Say that it
4974 goes in memory too, using a magic NULL_RTX
4975 component. Also see comment in
4976 rs6000_mixed_function_arg for why the normal
4977 function_arg_partial_nregs scheme doesn't work
4979 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
4983 r = gen_rtx_REG (rmode,
4984 GP_ARG_MIN_REG + align_words);
4985 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
4986 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4988 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
4992 /* The whole arg fits in gprs. */
4993 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4994 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4998 /* It's entirely in memory. */
4999 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5002 /* Describe where this piece goes in the fprs. */
5003 r = gen_rtx_REG (fmode, cum->fregno);
5004 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5006 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5008 else if (align_words < GP_ARG_NUM_REG)
5010 if (TARGET_32BIT && TARGET_POWERPC64)
5011 return rs6000_mixed_function_arg (mode, type, align_words);
5013 if (mode == BLKmode)
5016 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5023 /* For an arg passed partly in registers and partly in memory, this is
5024 the number of bytes passed in registers. For args passed entirely in
5025 registers or entirely in memory, zero. When an arg is described by a
5026 PARALLEL, perhaps using more than one register type, this function
5027 returns the number of bytes used by the first element of the PARALLEL. */
5030 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5031 tree type, bool named)
5036 if (DEFAULT_ABI == ABI_V4)
5039 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5040 && cum->nargs_prototype >= 0)
5043 /* In this complicated case we just disable the partial_nregs code. */
5044 if (rs6000_darwin64_abi && mode == BLKmode
5045 && TREE_CODE (type) == RECORD_TYPE
5046 && int_size_in_bytes (type) > 0)
5049 align_words = rs6000_parm_start (mode, type, cum->words);
5051 if (USE_FP_FOR_ARG_P (cum, mode, type)
5052 /* If we are passing this arg in the fixed parameter save area
5053 (gprs or memory) as well as fprs, then this function should
5054 return the number of bytes passed in the parameter save area
5055 rather than bytes passed in fprs. */
5057 && (cum->nargs_prototype <= 0
5058 || (DEFAULT_ABI == ABI_AIX
5060 && align_words >= GP_ARG_NUM_REG))))
5062 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5063 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5064 else if (cum->nargs_prototype >= 0)
5068 if (align_words < GP_ARG_NUM_REG
5069 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5070 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5072 if (ret != 0 && TARGET_DEBUG_ARG)
5073 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5078 /* A C expression that indicates when an argument must be passed by
5079 reference. If nonzero for an argument, a copy of that argument is
5080 made in memory and a pointer to the argument is passed instead of
5081 the argument itself. The pointer is passed in whatever way is
5082 appropriate for passing a pointer to that type.
5084 Under V.4, aggregates and long double are passed by reference.
5086 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5087 reference unless the AltiVec vector extension ABI is in force.
5089 As an extension to all ABIs, variable sized types are passed by
5093 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5094 enum machine_mode mode, tree type,
5095 bool named ATTRIBUTE_UNUSED)
5097 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
5099 if (TARGET_DEBUG_ARG)
5100 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5107 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5109 if (TARGET_DEBUG_ARG)
5110 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5114 if (int_size_in_bytes (type) < 0)
5116 if (TARGET_DEBUG_ARG)
5117 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5121 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5122 modes only exist for GCC vector types if -maltivec. */
5123 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5125 if (TARGET_DEBUG_ARG)
5126 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5130 /* Pass synthetic vectors in memory. */
5131 if (TREE_CODE (type) == VECTOR_TYPE
5132 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5134 static bool warned_for_pass_big_vectors = false;
5135 if (TARGET_DEBUG_ARG)
5136 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5137 if (!warned_for_pass_big_vectors)
5139 warning (0, "GCC vector passed by reference: "
5140 "non-standard ABI extension with no compatibility guarantee");
5141 warned_for_pass_big_vectors = true;
5150 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5153 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5158 for (i = 0; i < nregs; i++)
5160 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5161 if (reload_completed)
5163 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5166 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5167 i * GET_MODE_SIZE (reg_mode));
5170 tem = replace_equiv_address (tem, XEXP (tem, 0));
5174 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5178 /* Perform any needed actions needed for a function that is receiving a
5179 variable number of arguments.
5183 MODE and TYPE are the mode and type of the current parameter.
5185 PRETEND_SIZE is a variable that should be set to the amount of stack
5186 that must be pushed by the prolog to pretend that our caller pushed
5189 Normally, this macro will push all remaining incoming registers on the
5190 stack and set PRETEND_SIZE to the length of the registers pushed. */
5193 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5194 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5197 CUMULATIVE_ARGS next_cum;
5198 int reg_size = TARGET_32BIT ? 4 : 8;
5199 rtx save_area = NULL_RTX, mem;
5200 int first_reg_offset, set;
5202 /* Skip the last named argument. */
5204 function_arg_advance (&next_cum, mode, type, 1, 0);
5206 if (DEFAULT_ABI == ABI_V4)
5209 save_area = plus_constant (virtual_stack_vars_rtx,
5210 - RS6000_VARARGS_SIZE);
5212 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5216 first_reg_offset = next_cum.words;
5217 save_area = virtual_incoming_args_rtx;
5219 if (targetm.calls.must_pass_in_stack (mode, type))
5220 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5223 set = get_varargs_alias_set ();
5224 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5225 && cfun->va_list_gpr_size)
5227 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5229 if (va_list_gpr_counter_field)
5231 /* V4 va_list_gpr_size counts number of registers needed. */
5232 if (nregs > cfun->va_list_gpr_size)
5233 nregs = cfun->va_list_gpr_size;
5237 /* char * va_list instead counts number of bytes needed. */
5238 if (nregs > cfun->va_list_gpr_size / reg_size)
5239 nregs = cfun->va_list_gpr_size / reg_size;
5242 mem = gen_rtx_MEM (BLKmode,
5243 plus_constant (save_area,
5244 first_reg_offset * reg_size)),
5245 set_mem_alias_set (mem, set);
5246 set_mem_align (mem, BITS_PER_WORD);
5248 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5252 /* Save FP registers if needed. */
5253 if (DEFAULT_ABI == ABI_V4
5254 && TARGET_HARD_FLOAT && TARGET_FPRS
5256 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5257 && cfun->va_list_fpr_size)
5259 int fregno = next_cum.fregno, nregs;
5260 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5261 rtx lab = gen_label_rtx ();
5262 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5265 (gen_rtx_SET (VOIDmode,
5267 gen_rtx_IF_THEN_ELSE (VOIDmode,
5268 gen_rtx_NE (VOIDmode, cr1,
5270 gen_rtx_LABEL_REF (VOIDmode, lab),
5274 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5275 fregno++, off += 8, nregs++)
5277 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5278 set_mem_alias_set (mem, set);
5279 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5280 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5287 /* Create the va_list data type. */
5290 rs6000_build_builtin_va_list (void)
5292 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5294 /* For AIX, prefer 'char *' because that's what the system
5295 header files like. */
5296 if (DEFAULT_ABI != ABI_V4)
5297 return build_pointer_type (char_type_node);
5299 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5300 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5302 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5303 unsigned_char_type_node);
5304 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5305 unsigned_char_type_node);
5306 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5308 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5309 short_unsigned_type_node);
5310 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5312 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5315 va_list_gpr_counter_field = f_gpr;
5316 va_list_fpr_counter_field = f_fpr;
5318 DECL_FIELD_CONTEXT (f_gpr) = record;
5319 DECL_FIELD_CONTEXT (f_fpr) = record;
5320 DECL_FIELD_CONTEXT (f_res) = record;
5321 DECL_FIELD_CONTEXT (f_ovf) = record;
5322 DECL_FIELD_CONTEXT (f_sav) = record;
5324 TREE_CHAIN (record) = type_decl;
5325 TYPE_NAME (record) = type_decl;
5326 TYPE_FIELDS (record) = f_gpr;
5327 TREE_CHAIN (f_gpr) = f_fpr;
5328 TREE_CHAIN (f_fpr) = f_res;
5329 TREE_CHAIN (f_res) = f_ovf;
5330 TREE_CHAIN (f_ovf) = f_sav;
5332 layout_type (record);
5334 /* The correct type is an array type of one element. */
5335 return build_array_type (record, build_index_type (size_zero_node));
5338 /* Implement va_start. */
5341 rs6000_va_start (tree valist, rtx nextarg)
5343 HOST_WIDE_INT words, n_gpr, n_fpr;
5344 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5345 tree gpr, fpr, ovf, sav, t;
5347 /* Only SVR4 needs something special. */
5348 if (DEFAULT_ABI != ABI_V4)
5350 std_expand_builtin_va_start (valist, nextarg);
5354 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5355 f_fpr = TREE_CHAIN (f_gpr);
5356 f_res = TREE_CHAIN (f_fpr);
5357 f_ovf = TREE_CHAIN (f_res);
5358 f_sav = TREE_CHAIN (f_ovf);
5360 valist = build_va_arg_indirect_ref (valist);
5361 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5362 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5363 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5364 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5366 /* Count number of gp and fp argument registers used. */
5367 words = current_function_args_info.words;
5368 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5370 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5373 if (TARGET_DEBUG_ARG)
5374 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5375 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5376 words, n_gpr, n_fpr);
5378 if (cfun->va_list_gpr_size)
5380 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5381 build_int_cst (NULL_TREE, n_gpr));
5382 TREE_SIDE_EFFECTS (t) = 1;
5383 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5386 if (cfun->va_list_fpr_size)
5388 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5389 build_int_cst (NULL_TREE, n_fpr));
5390 TREE_SIDE_EFFECTS (t) = 1;
5391 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5394 /* Find the overflow area. */
5395 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5397 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5398 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5399 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5400 TREE_SIDE_EFFECTS (t) = 1;
5401 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5403 /* If there were no va_arg invocations, don't set up the register
5405 if (!cfun->va_list_gpr_size
5406 && !cfun->va_list_fpr_size
5407 && n_gpr < GP_ARG_NUM_REG
5408 && n_fpr < FP_ARG_V4_MAX_REG)
5411 /* Find the register save area. */
5412 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5413 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5414 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE));
5415 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5416 TREE_SIDE_EFFECTS (t) = 1;
5417 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5420 /* Implement va_arg. */
5423 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5425 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5426 tree gpr, fpr, ovf, sav, reg, t, u;
5427 int size, rsize, n_reg, sav_ofs, sav_scale;
5428 tree lab_false, lab_over, addr;
5430 tree ptrtype = build_pointer_type (type);
5432 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5434 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5435 return build_va_arg_indirect_ref (t);
5438 if (DEFAULT_ABI != ABI_V4)
5440 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5442 tree elem_type = TREE_TYPE (type);
5443 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5444 int elem_size = GET_MODE_SIZE (elem_mode);
5446 if (elem_size < UNITS_PER_WORD)
5448 tree real_part, imag_part;
5449 tree post = NULL_TREE;
5451 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5453 /* Copy the value into a temporary, lest the formal temporary
5454 be reused out from under us. */
5455 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5456 append_to_statement_list (post, pre_p);
5458 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5461 return build (COMPLEX_EXPR, type, real_part, imag_part);
5465 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5468 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5469 f_fpr = TREE_CHAIN (f_gpr);
5470 f_res = TREE_CHAIN (f_fpr);
5471 f_ovf = TREE_CHAIN (f_res);
5472 f_sav = TREE_CHAIN (f_ovf);
5474 valist = build_va_arg_indirect_ref (valist);
5475 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5476 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5477 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5478 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5480 size = int_size_in_bytes (type);
5481 rsize = (size + 3) / 4;
5484 if (TARGET_HARD_FLOAT && TARGET_FPRS
5485 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5487 /* FP args go in FP registers, if present. */
5492 if (TYPE_MODE (type) == DFmode)
5497 /* Otherwise into GP registers. */
5506 /* Pull the value out of the saved registers.... */
5509 addr = create_tmp_var (ptr_type_node, "addr");
5510 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5512 /* AltiVec vectors never go in registers when -mabi=altivec. */
5513 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5517 lab_false = create_artificial_label ();
5518 lab_over = create_artificial_label ();
5520 /* Long long and SPE vectors are aligned in the registers.
5521 As are any other 2 gpr item such as complex int due to a
5522 historical mistake. */
5526 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5527 size_int (n_reg - 1));
5528 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5531 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5532 t = build2 (GE_EXPR, boolean_type_node, u, t);
5533 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5534 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5535 gimplify_and_add (t, pre_p);
5539 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5541 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5542 u = build1 (CONVERT_EXPR, integer_type_node, u);
5543 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5544 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5546 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5547 gimplify_and_add (t, pre_p);
5549 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5550 gimplify_and_add (t, pre_p);
5552 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5553 append_to_statement_list (t, pre_p);
5557 /* Ensure that we don't find any more args in regs.
5558 Alignment has taken care of the n_reg == 2 case. */
5559 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5560 gimplify_and_add (t, pre_p);
5564 /* ... otherwise out of the overflow area. */
5566 /* Care for on-stack alignment if needed. */
5570 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5571 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5572 build_int_cst (NULL_TREE, -align));
5574 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5576 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5577 gimplify_and_add (u, pre_p);
5579 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5580 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5581 gimplify_and_add (t, pre_p);
5585 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5586 append_to_statement_list (t, pre_p);
5589 addr = fold_convert (ptrtype, addr);
5590 return build_va_arg_indirect_ref (addr);
5596 def_builtin (int mask, const char *name, tree type, int code)
5598 if (mask & target_flags)
5600 if (rs6000_builtin_decls[code])
5603 rs6000_builtin_decls[code] =
5604 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5609 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5611 static const struct builtin_description bdesc_3arg[] =
5613 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5614 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5615 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5616 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5617 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5618 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5619 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5620 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5621 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5622 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5623 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5624 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5625 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5626 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5627 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5628 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5629 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5630 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5631 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5632 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5633 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5634 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5635 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5637 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5638 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5639 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5640 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5641 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5642 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5643 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5644 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5645 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5646 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5647 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5648 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5649 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5650 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5651 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
5654 /* DST operations: void foo (void *, const int, const char). */
5656 static const struct builtin_description bdesc_dst[] =
5658 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5659 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5660 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5661 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5663 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5664 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5665 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5666 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
5669 /* Simple binary operations: VECc = foo (VECa, VECb). */
5671 static struct builtin_description bdesc_2arg[] =
5673 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5674 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5675 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5676 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5684 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5685 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5686 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5687 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5688 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5689 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5690 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5691 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5692 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5693 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5694 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5695 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5696 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5697 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5698 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5699 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5700 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5701 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5702 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5703 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5704 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5705 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5706 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5707 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5708 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5709 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5710 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5711 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5712 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5713 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5714 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5715 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5716 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5717 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5718 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5719 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5720 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5721 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5722 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5723 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5724 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5725 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5726 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5727 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5728 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5729 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5730 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5731 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5732 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5733 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5734 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5735 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5736 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5737 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5738 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5739 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5740 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5741 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5742 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5743 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5744 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5745 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5746 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5747 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5748 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5749 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5750 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5751 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5752 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5753 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5754 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5755 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5756 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5757 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5758 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5759 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5760 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5761 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5762 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5763 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5764 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5765 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5766 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5767 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5768 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5769 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5770 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5771 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5772 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5773 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5774 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5775 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5776 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5777 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5778 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5779 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5780 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5781 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5782 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5783 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5784 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5785 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
5788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
5789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
5790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
5791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
5792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
5793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
5794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
5795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
5796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
5797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
5798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
5799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
5800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
5801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
5802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
5803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
5804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
5805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
5806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
5807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
5808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
5809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
5810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
5811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
5812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
5813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
5814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
5815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
5816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
5817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
5818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
5819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
5820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
5821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
5822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
5823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
5824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
5825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
5826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
5827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
5828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
5829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
5830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
5831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
5832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
5833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
5834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
5835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
5836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
5837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
5838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
5839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
5840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
5841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
5842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
5843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
5844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
5845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
5846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
5847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
5848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
5849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
5850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
5851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
5852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
5853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
5854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
5855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
5856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
5857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
5858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
5859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
5860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
5861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
5862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
5863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
5864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
5865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
5866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
5867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
5868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
5869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
5870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
5871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
5872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
5873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
5874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
5875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
5876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
5877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
5878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
5879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
5880 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
5881 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
5882 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
5883 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
5884 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
5885 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
5886 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
5887 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
5888 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
5889 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
5890 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
5891 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
5892 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
5893 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
5894 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
5895 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
5896 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
5897 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
5898 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
5899 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
5900 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
5901 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
5902 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
5903 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
5904 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
5905 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
5906 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
5907 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
5908 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
5909 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
5910 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
5911 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
5912 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
5913 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
5915 /* Place holder, leave as first spe builtin. */
5916 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5917 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5918 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5919 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5920 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5921 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5922 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5923 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5924 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5925 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5926 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5927 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5928 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5929 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5930 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5931 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5932 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5933 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5934 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5935 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5936 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5937 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5938 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5939 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5940 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5941 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5942 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5943 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5944 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5945 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5946 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5947 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5948 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5949 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5950 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5951 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5952 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5953 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5954 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5955 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5956 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5957 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5958 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5959 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5960 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5961 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5962 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5963 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5964 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5965 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5966 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5967 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5968 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5969 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5970 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5971 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5972 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5973 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5974 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5975 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5976 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5977 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5978 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5979 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5980 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5981 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5982 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5983 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5984 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5985 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5986 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5987 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5988 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5989 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5990 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5991 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5992 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5993 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5994 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5995 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5996 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5997 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5998 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5999 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6000 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6001 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6002 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6003 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6004 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6005 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6006 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6007 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6008 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6009 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6010 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6011 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6012 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6013 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6014 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6015 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6016 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6017 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6018 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6019 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6020 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6021 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6022 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6023 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6024 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6026 /* SPE binary operations expecting a 5-bit unsigned literal. */
6027 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6029 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6030 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6031 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6032 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6033 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6034 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6035 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6036 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6037 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6038 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6039 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6040 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6041 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6042 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6043 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6044 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6045 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6046 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6047 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6048 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6049 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6050 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6051 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6052 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6053 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6054 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6056 /* Place-holder. Leave as last binary SPE builtin. */
6057 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6060 /* AltiVec predicates. */
6062 struct builtin_description_predicates
6064 const unsigned int mask;
6065 const enum insn_code icode;
6067 const char *const name;
6068 const enum rs6000_builtins code;
6071 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6073 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6074 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6075 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6076 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6077 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6078 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6079 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6080 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6081 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6082 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6083 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6084 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6085 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6087 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6088 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6089 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6092 /* SPE predicates. */
6093 static struct builtin_description bdesc_spe_predicates[] =
6095 /* Place-holder. Leave as first. */
6096 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6097 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6098 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6099 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6100 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6101 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6102 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6103 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6104 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6105 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6106 /* Place-holder. Leave as last. */
6107 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6110 /* SPE evsel predicates. */
6111 static struct builtin_description bdesc_spe_evsel[] =
6113 /* Place-holder. Leave as first. */
6114 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6115 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6116 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6117 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6118 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6119 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6120 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6121 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6122 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6123 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6124 /* Place-holder. Leave as last. */
6125 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6128 /* ABS* operations. */
6130 static const struct builtin_description bdesc_abs[] =
6132 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6133 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6134 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6135 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6136 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6137 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6138 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6141 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6144 static struct builtin_description bdesc_1arg[] =
6146 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6147 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6148 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6149 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6150 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6151 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6152 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6153 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6154 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6155 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6156 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6157 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6158 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6159 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6160 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6161 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6162 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6184 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6185 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6186 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6187 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6188 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6189 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6190 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6191 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6192 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6193 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6194 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6195 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6196 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6197 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6198 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6199 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6200 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6201 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6202 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6203 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6204 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6205 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6206 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6207 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6208 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6209 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6210 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6211 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6212 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6213 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6215 /* Place-holder. Leave as last unary SPE builtin. */
6216 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6220 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6223 tree arg0 = TREE_VALUE (arglist);
6224 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6225 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6226 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6228 if (icode == CODE_FOR_nothing)
6229 /* Builtin not supported on this processor. */
6232 /* If we got invalid arguments bail out before generating bad rtl. */
6233 if (arg0 == error_mark_node)
6236 if (icode == CODE_FOR_altivec_vspltisb
6237 || icode == CODE_FOR_altivec_vspltish
6238 || icode == CODE_FOR_altivec_vspltisw
6239 || icode == CODE_FOR_spe_evsplatfi
6240 || icode == CODE_FOR_spe_evsplati)
6242 /* Only allow 5-bit *signed* literals. */
6243 if (GET_CODE (op0) != CONST_INT
6244 || INTVAL (op0) > 15
6245 || INTVAL (op0) < -16)
6247 error ("argument 1 must be a 5-bit signed literal");
6253 || GET_MODE (target) != tmode
6254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6255 target = gen_reg_rtx (tmode);
6257 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6258 op0 = copy_to_mode_reg (mode0, op0);
6260 pat = GEN_FCN (icode) (target, op0);
6269 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6271 rtx pat, scratch1, scratch2;
6272 tree arg0 = TREE_VALUE (arglist);
6273 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6274 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6275 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6277 /* If we have invalid arguments, bail out before generating bad rtl. */
6278 if (arg0 == error_mark_node)
6282 || GET_MODE (target) != tmode
6283 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6284 target = gen_reg_rtx (tmode);
6286 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6287 op0 = copy_to_mode_reg (mode0, op0);
6289 scratch1 = gen_reg_rtx (mode0);
6290 scratch2 = gen_reg_rtx (mode0);
6292 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6301 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6304 tree arg0 = TREE_VALUE (arglist);
6305 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6306 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6307 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6308 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6309 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6310 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6312 if (icode == CODE_FOR_nothing)
6313 /* Builtin not supported on this processor. */
6316 /* If we got invalid arguments bail out before generating bad rtl. */
6317 if (arg0 == error_mark_node || arg1 == error_mark_node)
6320 if (icode == CODE_FOR_altivec_vcfux
6321 || icode == CODE_FOR_altivec_vcfsx
6322 || icode == CODE_FOR_altivec_vctsxs
6323 || icode == CODE_FOR_altivec_vctuxs
6324 || icode == CODE_FOR_altivec_vspltb
6325 || icode == CODE_FOR_altivec_vsplth
6326 || icode == CODE_FOR_altivec_vspltw
6327 || icode == CODE_FOR_spe_evaddiw
6328 || icode == CODE_FOR_spe_evldd
6329 || icode == CODE_FOR_spe_evldh
6330 || icode == CODE_FOR_spe_evldw
6331 || icode == CODE_FOR_spe_evlhhesplat
6332 || icode == CODE_FOR_spe_evlhhossplat
6333 || icode == CODE_FOR_spe_evlhhousplat
6334 || icode == CODE_FOR_spe_evlwhe
6335 || icode == CODE_FOR_spe_evlwhos
6336 || icode == CODE_FOR_spe_evlwhou
6337 || icode == CODE_FOR_spe_evlwhsplat
6338 || icode == CODE_FOR_spe_evlwwsplat
6339 || icode == CODE_FOR_spe_evrlwi
6340 || icode == CODE_FOR_spe_evslwi
6341 || icode == CODE_FOR_spe_evsrwis
6342 || icode == CODE_FOR_spe_evsubifw
6343 || icode == CODE_FOR_spe_evsrwiu)
6345 /* Only allow 5-bit unsigned literals. */
6347 if (TREE_CODE (arg1) != INTEGER_CST
6348 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6350 error ("argument 2 must be a 5-bit unsigned literal");
6356 || GET_MODE (target) != tmode
6357 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6358 target = gen_reg_rtx (tmode);
6360 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6361 op0 = copy_to_mode_reg (mode0, op0);
6362 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6363 op1 = copy_to_mode_reg (mode1, op1);
6365 pat = GEN_FCN (icode) (target, op0, op1);
6374 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6375 tree arglist, rtx target)
6378 tree cr6_form = TREE_VALUE (arglist);
6379 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6380 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6381 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6382 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6383 enum machine_mode tmode = SImode;
6384 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6385 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6388 if (TREE_CODE (cr6_form) != INTEGER_CST)
6390 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6394 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6396 gcc_assert (mode0 == mode1);
6398 /* If we have invalid arguments, bail out before generating bad rtl. */
6399 if (arg0 == error_mark_node || arg1 == error_mark_node)
6403 || GET_MODE (target) != tmode
6404 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6405 target = gen_reg_rtx (tmode);
6407 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6408 op0 = copy_to_mode_reg (mode0, op0);
6409 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6410 op1 = copy_to_mode_reg (mode1, op1);
6412 scratch = gen_reg_rtx (mode0);
6414 pat = GEN_FCN (icode) (scratch, op0, op1,
6415 gen_rtx_SYMBOL_REF (Pmode, opcode));
6420 /* The vec_any* and vec_all* predicates use the same opcodes for two
6421 different operations, but the bits in CR6 will be different
6422 depending on what information we want. So we have to play tricks
6423 with CR6 to get the right bits out.
6425 If you think this is disgusting, look at the specs for the
6426 AltiVec predicates. */
6428 switch (cr6_form_int)
6431 emit_insn (gen_cr6_test_for_zero (target));
6434 emit_insn (gen_cr6_test_for_zero_reverse (target));
6437 emit_insn (gen_cr6_test_for_lt (target));
6440 emit_insn (gen_cr6_test_for_lt_reverse (target));
6443 error ("argument 1 of __builtin_altivec_predicate is out of range");
6451 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6454 tree arg0 = TREE_VALUE (arglist);
6455 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6456 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6457 enum machine_mode mode0 = Pmode;
6458 enum machine_mode mode1 = Pmode;
6459 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6460 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6462 if (icode == CODE_FOR_nothing)
6463 /* Builtin not supported on this processor. */
6466 /* If we got invalid arguments bail out before generating bad rtl. */
6467 if (arg0 == error_mark_node || arg1 == error_mark_node)
6471 || GET_MODE (target) != tmode
6472 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6473 target = gen_reg_rtx (tmode);
6475 op1 = copy_to_mode_reg (mode1, op1);
6477 if (op0 == const0_rtx)
6479 addr = gen_rtx_MEM (tmode, op1);
6483 op0 = copy_to_mode_reg (mode0, op0);
6484 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6487 pat = GEN_FCN (icode) (target, addr);
6497 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6499 tree arg0 = TREE_VALUE (arglist);
6500 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6501 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6502 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6503 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6504 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6506 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6507 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6508 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6510 /* Invalid arguments. Bail before doing anything stoopid! */
6511 if (arg0 == error_mark_node
6512 || arg1 == error_mark_node
6513 || arg2 == error_mark_node)
6516 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6517 op0 = copy_to_mode_reg (mode2, op0);
6518 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6519 op1 = copy_to_mode_reg (mode0, op1);
6520 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6521 op2 = copy_to_mode_reg (mode1, op2);
6523 pat = GEN_FCN (icode) (op1, op2, op0);
6530 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6532 tree arg0 = TREE_VALUE (arglist);
6533 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6534 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6535 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6536 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6537 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6539 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6540 enum machine_mode mode1 = Pmode;
6541 enum machine_mode mode2 = Pmode;
6543 /* Invalid arguments. Bail before doing anything stoopid! */
6544 if (arg0 == error_mark_node
6545 || arg1 == error_mark_node
6546 || arg2 == error_mark_node)
6549 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6550 op0 = copy_to_mode_reg (tmode, op0);
6552 op2 = copy_to_mode_reg (mode2, op2);
6554 if (op1 == const0_rtx)
6556 addr = gen_rtx_MEM (tmode, op2);
6560 op1 = copy_to_mode_reg (mode1, op1);
6561 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6564 pat = GEN_FCN (icode) (addr, op0);
6571 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6574 tree arg0 = TREE_VALUE (arglist);
6575 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6576 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6577 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6578 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6579 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6580 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6581 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6582 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6583 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6585 if (icode == CODE_FOR_nothing)
6586 /* Builtin not supported on this processor. */
6589 /* If we got invalid arguments bail out before generating bad rtl. */
6590 if (arg0 == error_mark_node
6591 || arg1 == error_mark_node
6592 || arg2 == error_mark_node)
6595 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6596 || icode == CODE_FOR_altivec_vsldoi_v4si
6597 || icode == CODE_FOR_altivec_vsldoi_v8hi
6598 || icode == CODE_FOR_altivec_vsldoi_v16qi)
6600 /* Only allow 4-bit unsigned literals. */
6602 if (TREE_CODE (arg2) != INTEGER_CST
6603 || TREE_INT_CST_LOW (arg2) & ~0xf)
6605 error ("argument 3 must be a 4-bit unsigned literal");
6611 || GET_MODE (target) != tmode
6612 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6613 target = gen_reg_rtx (tmode);
6615 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6616 op0 = copy_to_mode_reg (mode0, op0);
6617 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6618 op1 = copy_to_mode_reg (mode1, op1);
6619 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6620 op2 = copy_to_mode_reg (mode2, op2);
6622 pat = GEN_FCN (icode) (target, op0, op1, op2);
6630 /* Expand the lvx builtins. */
6632 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6634 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6635 tree arglist = TREE_OPERAND (exp, 1);
6636 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6638 enum machine_mode tmode, mode0;
6640 enum insn_code icode;
6644 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6645 icode = CODE_FOR_altivec_lvx_v16qi;
6647 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6648 icode = CODE_FOR_altivec_lvx_v8hi;
6650 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6651 icode = CODE_FOR_altivec_lvx_v4si;
6653 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6654 icode = CODE_FOR_altivec_lvx_v4sf;
6663 arg0 = TREE_VALUE (arglist);
6664 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6665 tmode = insn_data[icode].operand[0].mode;
6666 mode0 = insn_data[icode].operand[1].mode;
6669 || GET_MODE (target) != tmode
6670 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6671 target = gen_reg_rtx (tmode);
6673 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6674 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6676 pat = GEN_FCN (icode) (target, op0);
6683 /* Expand the stvx builtins. */
6685 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6688 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6689 tree arglist = TREE_OPERAND (exp, 1);
6690 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6692 enum machine_mode mode0, mode1;
6694 enum insn_code icode;
6698 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6699 icode = CODE_FOR_altivec_stvx_v16qi;
6701 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6702 icode = CODE_FOR_altivec_stvx_v8hi;
6704 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6705 icode = CODE_FOR_altivec_stvx_v4si;
6707 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6708 icode = CODE_FOR_altivec_stvx_v4sf;
6715 arg0 = TREE_VALUE (arglist);
6716 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6717 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6718 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6719 mode0 = insn_data[icode].operand[0].mode;
6720 mode1 = insn_data[icode].operand[1].mode;
6722 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6723 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6724 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6725 op1 = copy_to_mode_reg (mode1, op1);
6727 pat = GEN_FCN (icode) (op0, op1);
6735 /* Expand the dst builtins. */
6737 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6740 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6741 tree arglist = TREE_OPERAND (exp, 1);
6742 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6743 tree arg0, arg1, arg2;
6744 enum machine_mode mode0, mode1, mode2;
6745 rtx pat, op0, op1, op2;
6746 struct builtin_description *d;
6751 /* Handle DST variants. */
6752 d = (struct builtin_description *) bdesc_dst;
6753 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6754 if (d->code == fcode)
6756 arg0 = TREE_VALUE (arglist);
6757 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6758 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6759 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6760 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6761 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6762 mode0 = insn_data[d->icode].operand[0].mode;
6763 mode1 = insn_data[d->icode].operand[1].mode;
6764 mode2 = insn_data[d->icode].operand[2].mode;
6766 /* Invalid arguments, bail out before generating bad rtl. */
6767 if (arg0 == error_mark_node
6768 || arg1 == error_mark_node
6769 || arg2 == error_mark_node)
6774 if (TREE_CODE (arg2) != INTEGER_CST
6775 || TREE_INT_CST_LOW (arg2) & ~0x3)
6777 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
6781 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6782 op0 = copy_to_mode_reg (Pmode, op0);
6783 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6784 op1 = copy_to_mode_reg (mode1, op1);
6786 pat = GEN_FCN (d->icode) (op0, op1, op2);
6796 /* Expand the builtin in EXP and store the result in TARGET. Store
6797 true in *EXPANDEDP if we found a builtin to expand. */
6799 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6801 struct builtin_description *d;
6802 struct builtin_description_predicates *dp;
6804 enum insn_code icode;
6805 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6806 tree arglist = TREE_OPERAND (exp, 1);
6809 enum machine_mode tmode, mode0;
6810 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6812 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
6813 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
6816 error ("unresolved overload for Altivec builtin %qE", fndecl);
6820 target = altivec_expand_ld_builtin (exp, target, expandedp);
6824 target = altivec_expand_st_builtin (exp, target, expandedp);
6828 target = altivec_expand_dst_builtin (exp, target, expandedp);
6836 case ALTIVEC_BUILTIN_STVX:
6837 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6838 case ALTIVEC_BUILTIN_STVEBX:
6839 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6840 case ALTIVEC_BUILTIN_STVEHX:
6841 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6842 case ALTIVEC_BUILTIN_STVEWX:
6843 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6844 case ALTIVEC_BUILTIN_STVXL:
6845 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6847 case ALTIVEC_BUILTIN_MFVSCR:
6848 icode = CODE_FOR_altivec_mfvscr;
6849 tmode = insn_data[icode].operand[0].mode;
6852 || GET_MODE (target) != tmode
6853 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6854 target = gen_reg_rtx (tmode);
6856 pat = GEN_FCN (icode) (target);
6862 case ALTIVEC_BUILTIN_MTVSCR:
6863 icode = CODE_FOR_altivec_mtvscr;
6864 arg0 = TREE_VALUE (arglist);
6865 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6866 mode0 = insn_data[icode].operand[0].mode;
6868 /* If we got invalid arguments bail out before generating bad rtl. */
6869 if (arg0 == error_mark_node)
6872 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6873 op0 = copy_to_mode_reg (mode0, op0);
6875 pat = GEN_FCN (icode) (op0);
6880 case ALTIVEC_BUILTIN_DSSALL:
6881 emit_insn (gen_altivec_dssall ());
6884 case ALTIVEC_BUILTIN_DSS:
6885 icode = CODE_FOR_altivec_dss;
6886 arg0 = TREE_VALUE (arglist);
6888 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6889 mode0 = insn_data[icode].operand[0].mode;
6891 /* If we got invalid arguments bail out before generating bad rtl. */
6892 if (arg0 == error_mark_node)
6895 if (TREE_CODE (arg0) != INTEGER_CST
6896 || TREE_INT_CST_LOW (arg0) & ~0x3)
6898 error ("argument to dss must be a 2-bit unsigned literal");
6902 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6903 op0 = copy_to_mode_reg (mode0, op0);
6905 emit_insn (gen_altivec_dss (op0));
6909 /* Expand abs* operations. */
6910 d = (struct builtin_description *) bdesc_abs;
6911 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6912 if (d->code == fcode)
6913 return altivec_expand_abs_builtin (d->icode, arglist, target);
6915 /* Expand the AltiVec predicates. */
6916 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6917 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6918 if (dp->code == fcode)
6919 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
6922 /* LV* are funky. We initialized them differently. */
6925 case ALTIVEC_BUILTIN_LVSL:
6926 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6928 case ALTIVEC_BUILTIN_LVSR:
6929 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6931 case ALTIVEC_BUILTIN_LVEBX:
6932 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6934 case ALTIVEC_BUILTIN_LVEHX:
6935 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6937 case ALTIVEC_BUILTIN_LVEWX:
6938 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6940 case ALTIVEC_BUILTIN_LVXL:
6941 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6943 case ALTIVEC_BUILTIN_LVX:
6944 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6955 /* Binops that need to be initialized manually, but can be expanded
6956 automagically by rs6000_expand_binop_builtin. */
6957 static struct builtin_description bdesc_2arg_spe[] =
6959 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6960 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6961 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6962 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6963 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6964 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6965 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6966 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6967 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6968 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6969 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6970 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6971 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6972 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6973 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6974 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6975 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6976 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6977 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6978 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6979 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6980 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6983 /* Expand the builtin in EXP and store the result in TARGET. Store
6984 true in *EXPANDEDP if we found a builtin to expand.
6986 This expands the SPE builtins that are not simple unary and binary
6989 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6991 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6992 tree arglist = TREE_OPERAND (exp, 1);
6994 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6995 enum insn_code icode;
6996 enum machine_mode tmode, mode0;
6998 struct builtin_description *d;
7003 /* Syntax check for a 5-bit unsigned immediate. */
7006 case SPE_BUILTIN_EVSTDD:
7007 case SPE_BUILTIN_EVSTDH:
7008 case SPE_BUILTIN_EVSTDW:
7009 case SPE_BUILTIN_EVSTWHE:
7010 case SPE_BUILTIN_EVSTWHO:
7011 case SPE_BUILTIN_EVSTWWE:
7012 case SPE_BUILTIN_EVSTWWO:
7013 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7014 if (TREE_CODE (arg1) != INTEGER_CST
7015 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7017 error ("argument 2 must be a 5-bit unsigned literal");
7025 /* The evsplat*i instructions are not quite generic. */
7028 case SPE_BUILTIN_EVSPLATFI:
7029 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7031 case SPE_BUILTIN_EVSPLATI:
7032 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7038 d = (struct builtin_description *) bdesc_2arg_spe;
7039 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7040 if (d->code == fcode)
7041 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7043 d = (struct builtin_description *) bdesc_spe_predicates;
7044 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7045 if (d->code == fcode)
7046 return spe_expand_predicate_builtin (d->icode, arglist, target);
7048 d = (struct builtin_description *) bdesc_spe_evsel;
7049 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7050 if (d->code == fcode)
7051 return spe_expand_evsel_builtin (d->icode, arglist, target);
7055 case SPE_BUILTIN_EVSTDDX:
7056 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7057 case SPE_BUILTIN_EVSTDHX:
7058 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7059 case SPE_BUILTIN_EVSTDWX:
7060 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7061 case SPE_BUILTIN_EVSTWHEX:
7062 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7063 case SPE_BUILTIN_EVSTWHOX:
7064 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7065 case SPE_BUILTIN_EVSTWWEX:
7066 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7067 case SPE_BUILTIN_EVSTWWOX:
7068 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7069 case SPE_BUILTIN_EVSTDD:
7070 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7071 case SPE_BUILTIN_EVSTDH:
7072 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7073 case SPE_BUILTIN_EVSTDW:
7074 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7075 case SPE_BUILTIN_EVSTWHE:
7076 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7077 case SPE_BUILTIN_EVSTWHO:
7078 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7079 case SPE_BUILTIN_EVSTWWE:
7080 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7081 case SPE_BUILTIN_EVSTWWO:
7082 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7083 case SPE_BUILTIN_MFSPEFSCR:
7084 icode = CODE_FOR_spe_mfspefscr;
7085 tmode = insn_data[icode].operand[0].mode;
7088 || GET_MODE (target) != tmode
7089 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7090 target = gen_reg_rtx (tmode);
7092 pat = GEN_FCN (icode) (target);
7097 case SPE_BUILTIN_MTSPEFSCR:
7098 icode = CODE_FOR_spe_mtspefscr;
7099 arg0 = TREE_VALUE (arglist);
7100 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7101 mode0 = insn_data[icode].operand[0].mode;
7103 if (arg0 == error_mark_node)
7106 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7107 op0 = copy_to_mode_reg (mode0, op0);
7109 pat = GEN_FCN (icode) (op0);
7122 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7124 rtx pat, scratch, tmp;
7125 tree form = TREE_VALUE (arglist);
7126 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7127 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7128 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7129 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7130 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7131 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7135 if (TREE_CODE (form) != INTEGER_CST)
7137 error ("argument 1 of __builtin_spe_predicate must be a constant");
7141 form_int = TREE_INT_CST_LOW (form);
7143 gcc_assert (mode0 == mode1);
7145 if (arg0 == error_mark_node || arg1 == error_mark_node)
7149 || GET_MODE (target) != SImode
7150 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7151 target = gen_reg_rtx (SImode);
7153 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7154 op0 = copy_to_mode_reg (mode0, op0);
7155 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7156 op1 = copy_to_mode_reg (mode1, op1);
7158 scratch = gen_reg_rtx (CCmode);
7160 pat = GEN_FCN (icode) (scratch, op0, op1);
7165 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7166 _lower_. We use one compare, but look in different bits of the
7167 CR for each variant.
7169 There are 2 elements in each SPE simd type (upper/lower). The CR
7170 bits are set as follows:
7172 BIT0 | BIT 1 | BIT 2 | BIT 3
7173 U | L | (U | L) | (U & L)
7175 So, for an "all" relationship, BIT 3 would be set.
7176 For an "any" relationship, BIT 2 would be set. Etc.
7178 Following traditional nomenclature, these bits map to:
7180 BIT0 | BIT 1 | BIT 2 | BIT 3
7183 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7188 /* All variant. OV bit. */
7190 /* We need to get to the OV bit, which is the ORDERED bit. We
7191 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7192 that's ugly and will make validate_condition_mode die.
7193 So let's just use another pattern. */
7194 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7196 /* Any variant. EQ bit. */
7200 /* Upper variant. LT bit. */
7204 /* Lower variant. GT bit. */
7209 error ("argument 1 of __builtin_spe_predicate is out of range");
7213 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7214 emit_move_insn (target, tmp);
7219 /* The evsel builtins look like this:
7221 e = __builtin_spe_evsel_OP (a, b, c, d);
7225 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7226 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7230 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7233 tree arg0 = TREE_VALUE (arglist);
7234 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7235 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7236 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7237 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7238 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7239 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7240 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7241 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7242 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7244 gcc_assert (mode0 == mode1);
7246 if (arg0 == error_mark_node || arg1 == error_mark_node
7247 || arg2 == error_mark_node || arg3 == error_mark_node)
7251 || GET_MODE (target) != mode0
7252 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7253 target = gen_reg_rtx (mode0);
7255 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7256 op0 = copy_to_mode_reg (mode0, op0);
7257 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7258 op1 = copy_to_mode_reg (mode0, op1);
7259 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7260 op2 = copy_to_mode_reg (mode0, op2);
7261 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7262 op3 = copy_to_mode_reg (mode0, op3);
7264 /* Generate the compare. */
7265 scratch = gen_reg_rtx (CCmode);
7266 pat = GEN_FCN (icode) (scratch, op0, op1);
7271 if (mode0 == V2SImode)
7272 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7274 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7279 /* Expand an expression EXP that calls a built-in function,
7280 with result going to TARGET if that's convenient
7281 (and in mode MODE if that's convenient).
7282 SUBTARGET may be used as the target for computing one of EXP's operands.
7283 IGNORE is nonzero if the value is to be ignored. */
7286 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7287 enum machine_mode mode ATTRIBUTE_UNUSED,
7288 int ignore ATTRIBUTE_UNUSED)
7290 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7291 tree arglist = TREE_OPERAND (exp, 1);
7292 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7293 struct builtin_description *d;
7298 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7299 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7301 int icode = (int) CODE_FOR_altivec_lvsr;
7302 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7303 enum machine_mode mode = insn_data[icode].operand[1].mode;
7307 gcc_assert (TARGET_ALTIVEC);
7309 arg = TREE_VALUE (arglist);
7310 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7311 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7312 addr = memory_address (mode, op);
7313 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7317 /* For the load case need to negate the address. */
7318 op = gen_reg_rtx (GET_MODE (addr));
7319 emit_insn (gen_rtx_SET (VOIDmode, op,
7320 gen_rtx_NEG (GET_MODE (addr), addr)));
7322 op = gen_rtx_MEM (mode, op);
7325 || GET_MODE (target) != tmode
7326 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7327 target = gen_reg_rtx (tmode);
7329 /*pat = gen_altivec_lvsr (target, op);*/
7330 pat = GEN_FCN (icode) (target, op);
7340 ret = altivec_expand_builtin (exp, target, &success);
7347 ret = spe_expand_builtin (exp, target, &success);
7353 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7355 /* Handle simple unary operations. */
7356 d = (struct builtin_description *) bdesc_1arg;
7357 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7358 if (d->code == fcode)
7359 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7361 /* Handle simple binary operations. */
7362 d = (struct builtin_description *) bdesc_2arg;
7363 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7364 if (d->code == fcode)
7365 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7367 /* Handle simple ternary operations. */
7368 d = (struct builtin_description *) bdesc_3arg;
7369 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7370 if (d->code == fcode)
7371 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7377 build_opaque_vector_type (tree node, int nunits)
7379 node = copy_node (node);
7380 TYPE_MAIN_VARIANT (node) = node;
7381 return build_vector_type (node, nunits);
7385 rs6000_init_builtins (void)
7387 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7388 V2SF_type_node = build_vector_type (float_type_node, 2);
7389 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7390 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7391 V4SF_type_node = build_vector_type (float_type_node, 4);
7392 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7393 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7395 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7396 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7397 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7399 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7400 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7401 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7402 opaque_V4SI_type_node = copy_node (V4SI_type_node);
7404 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7405 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7406 'vector unsigned short'. */
7408 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7409 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7410 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7411 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7413 long_integer_type_internal_node = long_integer_type_node;
7414 long_unsigned_type_internal_node = long_unsigned_type_node;
7415 intQI_type_internal_node = intQI_type_node;
7416 uintQI_type_internal_node = unsigned_intQI_type_node;
7417 intHI_type_internal_node = intHI_type_node;
7418 uintHI_type_internal_node = unsigned_intHI_type_node;
7419 intSI_type_internal_node = intSI_type_node;
7420 uintSI_type_internal_node = unsigned_intSI_type_node;
7421 float_type_internal_node = float_type_node;
7422 void_type_internal_node = void_type_node;
7424 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7425 get_identifier ("__bool char"),
7426 bool_char_type_node));
7427 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7428 get_identifier ("__bool short"),
7429 bool_short_type_node));
7430 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7431 get_identifier ("__bool int"),
7432 bool_int_type_node));
7433 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7434 get_identifier ("__pixel"),
7437 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7438 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7439 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7440 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7442 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7443 get_identifier ("__vector unsigned char"),
7444 unsigned_V16QI_type_node));
7445 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7446 get_identifier ("__vector signed char"),
7448 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7449 get_identifier ("__vector __bool char"),
7450 bool_V16QI_type_node));
7452 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7453 get_identifier ("__vector unsigned short"),
7454 unsigned_V8HI_type_node));
7455 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7456 get_identifier ("__vector signed short"),
7458 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7459 get_identifier ("__vector __bool short"),
7460 bool_V8HI_type_node));
7462 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7463 get_identifier ("__vector unsigned int"),
7464 unsigned_V4SI_type_node));
7465 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7466 get_identifier ("__vector signed int"),
7468 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7469 get_identifier ("__vector __bool int"),
7470 bool_V4SI_type_node));
7472 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7473 get_identifier ("__vector float"),
7475 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7476 get_identifier ("__vector __pixel"),
7477 pixel_V8HI_type_node));
7480 spe_init_builtins ();
7482 altivec_init_builtins ();
7483 if (TARGET_ALTIVEC || TARGET_SPE)
7484 rs6000_common_init_builtins ();
7487 /* Search through a set of builtins and enable the mask bits.
7488 DESC is an array of builtins.
7489 SIZE is the total number of builtins.
7490 START is the builtin enum at which to start.
7491 END is the builtin enum at which to end. */
7493 enable_mask_for_builtins (struct builtin_description *desc, int size,
7494 enum rs6000_builtins start,
7495 enum rs6000_builtins end)
7499 for (i = 0; i < size; ++i)
7500 if (desc[i].code == start)
7506 for (; i < size; ++i)
7508 /* Flip all the bits on. */
7509 desc[i].mask = target_flags;
7510 if (desc[i].code == end)
7516 spe_init_builtins (void)
7518 tree endlink = void_list_node;
7519 tree puint_type_node = build_pointer_type (unsigned_type_node);
7520 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7521 struct builtin_description *d;
7524 tree v2si_ftype_4_v2si
7525 = build_function_type
7526 (opaque_V2SI_type_node,
7527 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7528 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7529 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7530 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7533 tree v2sf_ftype_4_v2sf
7534 = build_function_type
7535 (opaque_V2SF_type_node,
7536 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7537 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7538 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7539 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7542 tree int_ftype_int_v2si_v2si
7543 = build_function_type
7545 tree_cons (NULL_TREE, integer_type_node,
7546 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7547 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7550 tree int_ftype_int_v2sf_v2sf
7551 = build_function_type
7553 tree_cons (NULL_TREE, integer_type_node,
7554 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7555 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7558 tree void_ftype_v2si_puint_int
7559 = build_function_type (void_type_node,
7560 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7561 tree_cons (NULL_TREE, puint_type_node,
7562 tree_cons (NULL_TREE,
7566 tree void_ftype_v2si_puint_char
7567 = build_function_type (void_type_node,
7568 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7569 tree_cons (NULL_TREE, puint_type_node,
7570 tree_cons (NULL_TREE,
7574 tree void_ftype_v2si_pv2si_int
7575 = build_function_type (void_type_node,
7576 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7577 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7578 tree_cons (NULL_TREE,
7582 tree void_ftype_v2si_pv2si_char
7583 = build_function_type (void_type_node,
7584 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7585 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7586 tree_cons (NULL_TREE,
7591 = build_function_type (void_type_node,
7592 tree_cons (NULL_TREE, integer_type_node, endlink));
7595 = build_function_type (integer_type_node, endlink);
7597 tree v2si_ftype_pv2si_int
7598 = build_function_type (opaque_V2SI_type_node,
7599 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7600 tree_cons (NULL_TREE, integer_type_node,
7603 tree v2si_ftype_puint_int
7604 = build_function_type (opaque_V2SI_type_node,
7605 tree_cons (NULL_TREE, puint_type_node,
7606 tree_cons (NULL_TREE, integer_type_node,
7609 tree v2si_ftype_pushort_int
7610 = build_function_type (opaque_V2SI_type_node,
7611 tree_cons (NULL_TREE, pushort_type_node,
7612 tree_cons (NULL_TREE, integer_type_node,
7615 tree v2si_ftype_signed_char
7616 = build_function_type (opaque_V2SI_type_node,
7617 tree_cons (NULL_TREE, signed_char_type_node,
7620 /* The initialization of the simple binary and unary builtins is
7621 done in rs6000_common_init_builtins, but we have to enable the
7622 mask bits here manually because we have run out of `target_flags'
7623 bits. We really need to redesign this mask business. */
7625 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7626 ARRAY_SIZE (bdesc_2arg),
7629 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7630 ARRAY_SIZE (bdesc_1arg),
7632 SPE_BUILTIN_EVSUBFUSIAAW);
7633 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7634 ARRAY_SIZE (bdesc_spe_predicates),
7635 SPE_BUILTIN_EVCMPEQ,
7636 SPE_BUILTIN_EVFSTSTLT);
7637 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7638 ARRAY_SIZE (bdesc_spe_evsel),
7639 SPE_BUILTIN_EVSEL_CMPGTS,
7640 SPE_BUILTIN_EVSEL_FSTSTEQ);
7642 (*lang_hooks.decls.pushdecl)
7643 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7644 opaque_V2SI_type_node));
7646 /* Initialize irregular SPE builtins. */
7648 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7649 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7650 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7651 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7652 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7653 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7654 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7655 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7656 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7657 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7658 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7659 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7660 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7661 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7662 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7663 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7664 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7665 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7668 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7669 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7670 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7671 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7672 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7673 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7674 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7675 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7676 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7677 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7678 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7679 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7680 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7681 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7682 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7683 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7684 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7685 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7686 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7687 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7688 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7689 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7692 d = (struct builtin_description *) bdesc_spe_predicates;
7693 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7697 switch (insn_data[d->icode].operand[1].mode)
7700 type = int_ftype_int_v2si_v2si;
7703 type = int_ftype_int_v2sf_v2sf;
7709 def_builtin (d->mask, d->name, type, d->code);
7712 /* Evsel predicates. */
7713 d = (struct builtin_description *) bdesc_spe_evsel;
7714 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7718 switch (insn_data[d->icode].operand[1].mode)
7721 type = v2si_ftype_4_v2si;
7724 type = v2sf_ftype_4_v2sf;
7730 def_builtin (d->mask, d->name, type, d->code);
7735 altivec_init_builtins (void)
7737 struct builtin_description *d;
7738 struct builtin_description_predicates *dp;
7740 tree pfloat_type_node = build_pointer_type (float_type_node);
7741 tree pint_type_node = build_pointer_type (integer_type_node);
7742 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7743 tree pchar_type_node = build_pointer_type (char_type_node);
7745 tree pvoid_type_node = build_pointer_type (void_type_node);
7747 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7748 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7749 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7750 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7752 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7754 tree int_ftype_opaque
7755 = build_function_type_list (integer_type_node,
7756 opaque_V4SI_type_node, NULL_TREE);
7758 tree opaque_ftype_opaque_int
7759 = build_function_type_list (opaque_V4SI_type_node,
7760 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
7761 tree opaque_ftype_opaque_opaque_int
7762 = build_function_type_list (opaque_V4SI_type_node,
7763 opaque_V4SI_type_node, opaque_V4SI_type_node,
7764 integer_type_node, NULL_TREE);
7765 tree int_ftype_int_opaque_opaque
7766 = build_function_type_list (integer_type_node,
7767 integer_type_node, opaque_V4SI_type_node,
7768 opaque_V4SI_type_node, NULL_TREE);
7769 tree int_ftype_int_v4si_v4si
7770 = build_function_type_list (integer_type_node,
7771 integer_type_node, V4SI_type_node,
7772 V4SI_type_node, NULL_TREE);
7773 tree v4sf_ftype_pcfloat
7774 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7775 tree void_ftype_pfloat_v4sf
7776 = build_function_type_list (void_type_node,
7777 pfloat_type_node, V4SF_type_node, NULL_TREE);
7778 tree v4si_ftype_pcint
7779 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7780 tree void_ftype_pint_v4si
7781 = build_function_type_list (void_type_node,
7782 pint_type_node, V4SI_type_node, NULL_TREE);
7783 tree v8hi_ftype_pcshort
7784 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7785 tree void_ftype_pshort_v8hi
7786 = build_function_type_list (void_type_node,
7787 pshort_type_node, V8HI_type_node, NULL_TREE);
7788 tree v16qi_ftype_pcchar
7789 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7790 tree void_ftype_pchar_v16qi
7791 = build_function_type_list (void_type_node,
7792 pchar_type_node, V16QI_type_node, NULL_TREE);
7793 tree void_ftype_v4si
7794 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7795 tree v8hi_ftype_void
7796 = build_function_type (V8HI_type_node, void_list_node);
7797 tree void_ftype_void
7798 = build_function_type (void_type_node, void_list_node);
7800 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7802 tree opaque_ftype_long_pcvoid
7803 = build_function_type_list (opaque_V4SI_type_node,
7804 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7805 tree v16qi_ftype_long_pcvoid
7806 = build_function_type_list (V16QI_type_node,
7807 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7808 tree v8hi_ftype_long_pcvoid
7809 = build_function_type_list (V8HI_type_node,
7810 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7811 tree v4si_ftype_long_pcvoid
7812 = build_function_type_list (V4SI_type_node,
7813 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7815 tree void_ftype_opaque_long_pvoid
7816 = build_function_type_list (void_type_node,
7817 opaque_V4SI_type_node, long_integer_type_node,
7818 pvoid_type_node, NULL_TREE);
7819 tree void_ftype_v4si_long_pvoid
7820 = build_function_type_list (void_type_node,
7821 V4SI_type_node, long_integer_type_node,
7822 pvoid_type_node, NULL_TREE);
7823 tree void_ftype_v16qi_long_pvoid
7824 = build_function_type_list (void_type_node,
7825 V16QI_type_node, long_integer_type_node,
7826 pvoid_type_node, NULL_TREE);
7827 tree void_ftype_v8hi_long_pvoid
7828 = build_function_type_list (void_type_node,
7829 V8HI_type_node, long_integer_type_node,
7830 pvoid_type_node, NULL_TREE);
7831 tree int_ftype_int_v8hi_v8hi
7832 = build_function_type_list (integer_type_node,
7833 integer_type_node, V8HI_type_node,
7834 V8HI_type_node, NULL_TREE);
7835 tree int_ftype_int_v16qi_v16qi
7836 = build_function_type_list (integer_type_node,
7837 integer_type_node, V16QI_type_node,
7838 V16QI_type_node, NULL_TREE);
7839 tree int_ftype_int_v4sf_v4sf
7840 = build_function_type_list (integer_type_node,
7841 integer_type_node, V4SF_type_node,
7842 V4SF_type_node, NULL_TREE);
7843 tree v4si_ftype_v4si
7844 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7845 tree v8hi_ftype_v8hi
7846 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7847 tree v16qi_ftype_v16qi
7848 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7849 tree v4sf_ftype_v4sf
7850 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7851 tree void_ftype_pcvoid_int_int
7852 = build_function_type_list (void_type_node,
7853 pcvoid_type_node, integer_type_node,
7854 integer_type_node, NULL_TREE);
7856 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7857 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7858 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7859 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7860 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7861 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7862 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7863 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7864 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7865 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7866 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7867 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7868 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7869 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7870 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7871 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7872 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7873 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7874 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7875 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7876 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7877 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7878 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7879 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7880 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7881 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7882 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7883 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7884 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7885 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7886 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7887 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7888 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
7889 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
7890 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
7891 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
7892 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
7893 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
7894 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
7895 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
7896 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
7897 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
7898 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
7899 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
7900 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
7901 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
7903 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
7905 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
7906 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
7907 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
7908 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
7909 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
7910 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
7911 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
7912 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
7913 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
7914 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
7916 /* Add the DST variants. */
7917 d = (struct builtin_description *) bdesc_dst;
7918 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7919 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7921 /* Initialize the predicates. */
7922 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7923 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7925 enum machine_mode mode1;
7927 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7928 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
7933 mode1 = insn_data[dp->icode].operand[1].mode;
7938 type = int_ftype_int_opaque_opaque;
7941 type = int_ftype_int_v4si_v4si;
7944 type = int_ftype_int_v8hi_v8hi;
7947 type = int_ftype_int_v16qi_v16qi;
7950 type = int_ftype_int_v4sf_v4sf;
7956 def_builtin (dp->mask, dp->name, type, dp->code);
7959 /* Initialize the abs* operators. */
7960 d = (struct builtin_description *) bdesc_abs;
7961 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7963 enum machine_mode mode0;
7966 mode0 = insn_data[d->icode].operand[0].mode;
7971 type = v4si_ftype_v4si;
7974 type = v8hi_ftype_v8hi;
7977 type = v16qi_ftype_v16qi;
7980 type = v4sf_ftype_v4sf;
7986 def_builtin (d->mask, d->name, type, d->code);
7993 /* Initialize target builtin that implements
7994 targetm.vectorize.builtin_mask_for_load. */
7996 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
7997 v16qi_ftype_long_pcvoid,
7998 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8000 tree_cons (get_identifier ("const"),
8001 NULL_TREE, NULL_TREE));
8002 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8003 altivec_builtin_mask_for_load = decl;
8008 rs6000_common_init_builtins (void)
8010 struct builtin_description *d;
8013 tree v4sf_ftype_v4sf_v4sf_v16qi
8014 = build_function_type_list (V4SF_type_node,
8015 V4SF_type_node, V4SF_type_node,
8016 V16QI_type_node, NULL_TREE);
8017 tree v4si_ftype_v4si_v4si_v16qi
8018 = build_function_type_list (V4SI_type_node,
8019 V4SI_type_node, V4SI_type_node,
8020 V16QI_type_node, NULL_TREE);
8021 tree v8hi_ftype_v8hi_v8hi_v16qi
8022 = build_function_type_list (V8HI_type_node,
8023 V8HI_type_node, V8HI_type_node,
8024 V16QI_type_node, NULL_TREE);
8025 tree v16qi_ftype_v16qi_v16qi_v16qi
8026 = build_function_type_list (V16QI_type_node,
8027 V16QI_type_node, V16QI_type_node,
8028 V16QI_type_node, NULL_TREE);
8030 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8032 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8033 tree v16qi_ftype_int
8034 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8035 tree v8hi_ftype_v16qi
8036 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8037 tree v4sf_ftype_v4sf
8038 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8040 tree v2si_ftype_v2si_v2si
8041 = build_function_type_list (opaque_V2SI_type_node,
8042 opaque_V2SI_type_node,
8043 opaque_V2SI_type_node, NULL_TREE);
8045 tree v2sf_ftype_v2sf_v2sf
8046 = build_function_type_list (opaque_V2SF_type_node,
8047 opaque_V2SF_type_node,
8048 opaque_V2SF_type_node, NULL_TREE);
8050 tree v2si_ftype_int_int
8051 = build_function_type_list (opaque_V2SI_type_node,
8052 integer_type_node, integer_type_node,
8055 tree opaque_ftype_opaque
8056 = build_function_type_list (opaque_V4SI_type_node,
8057 opaque_V4SI_type_node, NULL_TREE);
8059 tree v2si_ftype_v2si
8060 = build_function_type_list (opaque_V2SI_type_node,
8061 opaque_V2SI_type_node, NULL_TREE);
8063 tree v2sf_ftype_v2sf
8064 = build_function_type_list (opaque_V2SF_type_node,
8065 opaque_V2SF_type_node, NULL_TREE);
8067 tree v2sf_ftype_v2si
8068 = build_function_type_list (opaque_V2SF_type_node,
8069 opaque_V2SI_type_node, NULL_TREE);
8071 tree v2si_ftype_v2sf
8072 = build_function_type_list (opaque_V2SI_type_node,
8073 opaque_V2SF_type_node, NULL_TREE);
8075 tree v2si_ftype_v2si_char
8076 = build_function_type_list (opaque_V2SI_type_node,
8077 opaque_V2SI_type_node,
8078 char_type_node, NULL_TREE);
8080 tree v2si_ftype_int_char
8081 = build_function_type_list (opaque_V2SI_type_node,
8082 integer_type_node, char_type_node, NULL_TREE);
8084 tree v2si_ftype_char
8085 = build_function_type_list (opaque_V2SI_type_node,
8086 char_type_node, NULL_TREE);
8088 tree int_ftype_int_int
8089 = build_function_type_list (integer_type_node,
8090 integer_type_node, integer_type_node,
8093 tree opaque_ftype_opaque_opaque
8094 = build_function_type_list (opaque_V4SI_type_node,
8095 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8096 tree v4si_ftype_v4si_v4si
8097 = build_function_type_list (V4SI_type_node,
8098 V4SI_type_node, V4SI_type_node, NULL_TREE);
8099 tree v4sf_ftype_v4si_int
8100 = build_function_type_list (V4SF_type_node,
8101 V4SI_type_node, integer_type_node, NULL_TREE);
8102 tree v4si_ftype_v4sf_int
8103 = build_function_type_list (V4SI_type_node,
8104 V4SF_type_node, integer_type_node, NULL_TREE);
8105 tree v4si_ftype_v4si_int
8106 = build_function_type_list (V4SI_type_node,
8107 V4SI_type_node, integer_type_node, NULL_TREE);
8108 tree v8hi_ftype_v8hi_int
8109 = build_function_type_list (V8HI_type_node,
8110 V8HI_type_node, integer_type_node, NULL_TREE);
8111 tree v16qi_ftype_v16qi_int
8112 = build_function_type_list (V16QI_type_node,
8113 V16QI_type_node, integer_type_node, NULL_TREE);
8114 tree v16qi_ftype_v16qi_v16qi_int
8115 = build_function_type_list (V16QI_type_node,
8116 V16QI_type_node, V16QI_type_node,
8117 integer_type_node, NULL_TREE);
8118 tree v8hi_ftype_v8hi_v8hi_int
8119 = build_function_type_list (V8HI_type_node,
8120 V8HI_type_node, V8HI_type_node,
8121 integer_type_node, NULL_TREE);
8122 tree v4si_ftype_v4si_v4si_int
8123 = build_function_type_list (V4SI_type_node,
8124 V4SI_type_node, V4SI_type_node,
8125 integer_type_node, NULL_TREE);
8126 tree v4sf_ftype_v4sf_v4sf_int
8127 = build_function_type_list (V4SF_type_node,
8128 V4SF_type_node, V4SF_type_node,
8129 integer_type_node, NULL_TREE);
8130 tree v4sf_ftype_v4sf_v4sf
8131 = build_function_type_list (V4SF_type_node,
8132 V4SF_type_node, V4SF_type_node, NULL_TREE);
8133 tree opaque_ftype_opaque_opaque_opaque
8134 = build_function_type_list (opaque_V4SI_type_node,
8135 opaque_V4SI_type_node, opaque_V4SI_type_node,
8136 opaque_V4SI_type_node, NULL_TREE);
8137 tree v4sf_ftype_v4sf_v4sf_v4si
8138 = build_function_type_list (V4SF_type_node,
8139 V4SF_type_node, V4SF_type_node,
8140 V4SI_type_node, NULL_TREE);
8141 tree v4sf_ftype_v4sf_v4sf_v4sf
8142 = build_function_type_list (V4SF_type_node,
8143 V4SF_type_node, V4SF_type_node,
8144 V4SF_type_node, NULL_TREE);
8145 tree v4si_ftype_v4si_v4si_v4si
8146 = build_function_type_list (V4SI_type_node,
8147 V4SI_type_node, V4SI_type_node,
8148 V4SI_type_node, NULL_TREE);
8149 tree v8hi_ftype_v8hi_v8hi
8150 = build_function_type_list (V8HI_type_node,
8151 V8HI_type_node, V8HI_type_node, NULL_TREE);
8152 tree v8hi_ftype_v8hi_v8hi_v8hi
8153 = build_function_type_list (V8HI_type_node,
8154 V8HI_type_node, V8HI_type_node,
8155 V8HI_type_node, NULL_TREE);
8156 tree v4si_ftype_v8hi_v8hi_v4si
8157 = build_function_type_list (V4SI_type_node,
8158 V8HI_type_node, V8HI_type_node,
8159 V4SI_type_node, NULL_TREE);
8160 tree v4si_ftype_v16qi_v16qi_v4si
8161 = build_function_type_list (V4SI_type_node,
8162 V16QI_type_node, V16QI_type_node,
8163 V4SI_type_node, NULL_TREE);
8164 tree v16qi_ftype_v16qi_v16qi
8165 = build_function_type_list (V16QI_type_node,
8166 V16QI_type_node, V16QI_type_node, NULL_TREE);
8167 tree v4si_ftype_v4sf_v4sf
8168 = build_function_type_list (V4SI_type_node,
8169 V4SF_type_node, V4SF_type_node, NULL_TREE);
8170 tree v8hi_ftype_v16qi_v16qi
8171 = build_function_type_list (V8HI_type_node,
8172 V16QI_type_node, V16QI_type_node, NULL_TREE);
8173 tree v4si_ftype_v8hi_v8hi
8174 = build_function_type_list (V4SI_type_node,
8175 V8HI_type_node, V8HI_type_node, NULL_TREE);
8176 tree v8hi_ftype_v4si_v4si
8177 = build_function_type_list (V8HI_type_node,
8178 V4SI_type_node, V4SI_type_node, NULL_TREE);
8179 tree v16qi_ftype_v8hi_v8hi
8180 = build_function_type_list (V16QI_type_node,
8181 V8HI_type_node, V8HI_type_node, NULL_TREE);
8182 tree v4si_ftype_v16qi_v4si
8183 = build_function_type_list (V4SI_type_node,
8184 V16QI_type_node, V4SI_type_node, NULL_TREE);
8185 tree v4si_ftype_v16qi_v16qi
8186 = build_function_type_list (V4SI_type_node,
8187 V16QI_type_node, V16QI_type_node, NULL_TREE);
8188 tree v4si_ftype_v8hi_v4si
8189 = build_function_type_list (V4SI_type_node,
8190 V8HI_type_node, V4SI_type_node, NULL_TREE);
8191 tree v4si_ftype_v8hi
8192 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8193 tree int_ftype_v4si_v4si
8194 = build_function_type_list (integer_type_node,
8195 V4SI_type_node, V4SI_type_node, NULL_TREE);
8196 tree int_ftype_v4sf_v4sf
8197 = build_function_type_list (integer_type_node,
8198 V4SF_type_node, V4SF_type_node, NULL_TREE);
8199 tree int_ftype_v16qi_v16qi
8200 = build_function_type_list (integer_type_node,
8201 V16QI_type_node, V16QI_type_node, NULL_TREE);
8202 tree int_ftype_v8hi_v8hi
8203 = build_function_type_list (integer_type_node,
8204 V8HI_type_node, V8HI_type_node, NULL_TREE);
8206 /* Add the simple ternary operators. */
8207 d = (struct builtin_description *) bdesc_3arg;
8208 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8210 enum machine_mode mode0, mode1, mode2, mode3;
8212 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8213 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8224 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8227 mode0 = insn_data[d->icode].operand[0].mode;
8228 mode1 = insn_data[d->icode].operand[1].mode;
8229 mode2 = insn_data[d->icode].operand[2].mode;
8230 mode3 = insn_data[d->icode].operand[3].mode;
8233 /* When all four are of the same mode. */
8234 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8239 type = opaque_ftype_opaque_opaque_opaque;
8242 type = v4si_ftype_v4si_v4si_v4si;
8245 type = v4sf_ftype_v4sf_v4sf_v4sf;
8248 type = v8hi_ftype_v8hi_v8hi_v8hi;
8251 type = v16qi_ftype_v16qi_v16qi_v16qi;
8257 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8262 type = v4si_ftype_v4si_v4si_v16qi;
8265 type = v4sf_ftype_v4sf_v4sf_v16qi;
8268 type = v8hi_ftype_v8hi_v8hi_v16qi;
8271 type = v16qi_ftype_v16qi_v16qi_v16qi;
8277 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8278 && mode3 == V4SImode)
8279 type = v4si_ftype_v16qi_v16qi_v4si;
8280 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8281 && mode3 == V4SImode)
8282 type = v4si_ftype_v8hi_v8hi_v4si;
8283 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8284 && mode3 == V4SImode)
8285 type = v4sf_ftype_v4sf_v4sf_v4si;
8287 /* vchar, vchar, vchar, 4 bit literal. */
8288 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8290 type = v16qi_ftype_v16qi_v16qi_int;
8292 /* vshort, vshort, vshort, 4 bit literal. */
8293 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8295 type = v8hi_ftype_v8hi_v8hi_int;
8297 /* vint, vint, vint, 4 bit literal. */
8298 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8300 type = v4si_ftype_v4si_v4si_int;
8302 /* vfloat, vfloat, vfloat, 4 bit literal. */
8303 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8305 type = v4sf_ftype_v4sf_v4sf_int;
8310 def_builtin (d->mask, d->name, type, d->code);
8313 /* Add the simple binary operators. */
8314 d = (struct builtin_description *) bdesc_2arg;
8315 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8317 enum machine_mode mode0, mode1, mode2;
8319 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8320 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8330 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8333 mode0 = insn_data[d->icode].operand[0].mode;
8334 mode1 = insn_data[d->icode].operand[1].mode;
8335 mode2 = insn_data[d->icode].operand[2].mode;
8338 /* When all three operands are of the same mode. */
8339 if (mode0 == mode1 && mode1 == mode2)
8344 type = opaque_ftype_opaque_opaque;
8347 type = v4sf_ftype_v4sf_v4sf;
8350 type = v4si_ftype_v4si_v4si;
8353 type = v16qi_ftype_v16qi_v16qi;
8356 type = v8hi_ftype_v8hi_v8hi;
8359 type = v2si_ftype_v2si_v2si;
8362 type = v2sf_ftype_v2sf_v2sf;
8365 type = int_ftype_int_int;
8372 /* A few other combos we really don't want to do manually. */
8374 /* vint, vfloat, vfloat. */
8375 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8376 type = v4si_ftype_v4sf_v4sf;
8378 /* vshort, vchar, vchar. */
8379 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8380 type = v8hi_ftype_v16qi_v16qi;
8382 /* vint, vshort, vshort. */
8383 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8384 type = v4si_ftype_v8hi_v8hi;
8386 /* vshort, vint, vint. */
8387 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8388 type = v8hi_ftype_v4si_v4si;
8390 /* vchar, vshort, vshort. */
8391 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8392 type = v16qi_ftype_v8hi_v8hi;
8394 /* vint, vchar, vint. */
8395 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8396 type = v4si_ftype_v16qi_v4si;
8398 /* vint, vchar, vchar. */
8399 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8400 type = v4si_ftype_v16qi_v16qi;
8402 /* vint, vshort, vint. */
8403 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8404 type = v4si_ftype_v8hi_v4si;
8406 /* vint, vint, 5 bit literal. */
8407 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8408 type = v4si_ftype_v4si_int;
8410 /* vshort, vshort, 5 bit literal. */
8411 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8412 type = v8hi_ftype_v8hi_int;
8414 /* vchar, vchar, 5 bit literal. */
8415 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8416 type = v16qi_ftype_v16qi_int;
8418 /* vfloat, vint, 5 bit literal. */
8419 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8420 type = v4sf_ftype_v4si_int;
8422 /* vint, vfloat, 5 bit literal. */
8423 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8424 type = v4si_ftype_v4sf_int;
8426 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8427 type = v2si_ftype_int_int;
8429 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8430 type = v2si_ftype_v2si_char;
8432 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8433 type = v2si_ftype_int_char;
8438 gcc_assert (mode0 == SImode);
8442 type = int_ftype_v4si_v4si;
8445 type = int_ftype_v4sf_v4sf;
8448 type = int_ftype_v16qi_v16qi;
8451 type = int_ftype_v8hi_v8hi;
8458 def_builtin (d->mask, d->name, type, d->code);
8461 /* Add the simple unary operators. */
8462 d = (struct builtin_description *) bdesc_1arg;
8463 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8465 enum machine_mode mode0, mode1;
8467 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8468 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8477 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8480 mode0 = insn_data[d->icode].operand[0].mode;
8481 mode1 = insn_data[d->icode].operand[1].mode;
8484 if (mode0 == V4SImode && mode1 == QImode)
8485 type = v4si_ftype_int;
8486 else if (mode0 == V8HImode && mode1 == QImode)
8487 type = v8hi_ftype_int;
8488 else if (mode0 == V16QImode && mode1 == QImode)
8489 type = v16qi_ftype_int;
8490 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8491 type = opaque_ftype_opaque;
8492 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8493 type = v4sf_ftype_v4sf;
8494 else if (mode0 == V8HImode && mode1 == V16QImode)
8495 type = v8hi_ftype_v16qi;
8496 else if (mode0 == V4SImode && mode1 == V8HImode)
8497 type = v4si_ftype_v8hi;
8498 else if (mode0 == V2SImode && mode1 == V2SImode)
8499 type = v2si_ftype_v2si;
8500 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8501 type = v2sf_ftype_v2sf;
8502 else if (mode0 == V2SFmode && mode1 == V2SImode)
8503 type = v2sf_ftype_v2si;
8504 else if (mode0 == V2SImode && mode1 == V2SFmode)
8505 type = v2si_ftype_v2sf;
8506 else if (mode0 == V2SImode && mode1 == QImode)
8507 type = v2si_ftype_char;
8511 def_builtin (d->mask, d->name, type, d->code);
8516 rs6000_init_libfuncs (void)
8518 if (!TARGET_HARD_FLOAT)
8521 if (DEFAULT_ABI != ABI_V4)
8523 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8525 /* AIX library routines for float->int conversion. */
8526 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8527 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8528 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8529 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8532 /* AIX/Darwin/64-bit Linux quad floating point routines. */
8533 if (!TARGET_XL_COMPAT)
8535 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
8536 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
8537 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
8538 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
8542 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8543 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8544 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8545 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8550 /* 32-bit SVR4 quad floating point routines. */
8552 set_optab_libfunc (add_optab, TFmode, "_q_add");
8553 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8554 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8555 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8556 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8557 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8558 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8560 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8561 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8562 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8563 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8564 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8565 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8567 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8568 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8569 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8570 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8571 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8572 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8573 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8578 /* Expand a block clear operation, and return 1 if successful. Return 0
8579 if we should let the compiler generate normal code.
8581 operands[0] is the destination
8582 operands[1] is the length
8583 operands[2] is the alignment */
8586 expand_block_clear (rtx operands[])
8588 rtx orig_dest = operands[0];
8589 rtx bytes_rtx = operands[1];
8590 rtx align_rtx = operands[2];
8591 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
8592 HOST_WIDE_INT align;
8593 HOST_WIDE_INT bytes;
8598 /* If this is not a fixed size move, just call memcpy */
8602 /* This must be a fixed size alignment */
8603 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
8604 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8606 /* Anything to clear? */
8607 bytes = INTVAL (bytes_rtx);
8611 /* Use the builtin memset after a point, to avoid huge code bloat.
8612 When optimize_size, avoid any significant code bloat; calling
8613 memset is about 4 instructions, so allow for one instruction to
8614 load zero and three to do clearing. */
8615 if (TARGET_ALTIVEC && align >= 128)
8617 else if (TARGET_POWERPC64 && align >= 32)
8622 if (optimize_size && bytes > 3 * clear_step)
8624 if (! optimize_size && bytes > 8 * clear_step)
8627 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8629 enum machine_mode mode = BLKmode;
8632 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
8637 else if (bytes >= 8 && TARGET_POWERPC64
8638 /* 64-bit loads and stores require word-aligned
8640 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8645 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8646 { /* move 4 bytes */
8650 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8651 { /* move 2 bytes */
8655 else /* move 1 byte at a time */
8661 dest = adjust_address (orig_dest, mode, offset);
8663 emit_move_insn (dest, CONST0_RTX (mode));
8670 /* Expand a block move operation, and return 1 if successful. Return 0
8671 if we should let the compiler generate normal code.
8673 operands[0] is the destination
8674 operands[1] is the source
8675 operands[2] is the length
8676 operands[3] is the alignment */
8678 #define MAX_MOVE_REG 4
8681 expand_block_move (rtx operands[])
8683 rtx orig_dest = operands[0];
8684 rtx orig_src = operands[1];
8685 rtx bytes_rtx = operands[2];
8686 rtx align_rtx = operands[3];
8687 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8692 rtx stores[MAX_MOVE_REG];
8695 /* If this is not a fixed size move, just call memcpy */
8699 /* This must be a fixed size alignment */
8700 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
8701 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8703 /* Anything to move? */
8704 bytes = INTVAL (bytes_rtx);
8708 /* store_one_arg depends on expand_block_move to handle at least the size of
8709 reg_parm_stack_space. */
8710 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8713 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8716 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8717 rtx (*mov) (rtx, rtx);
8719 enum machine_mode mode = BLKmode;
8722 /* Altivec first, since it will be faster than a string move
8723 when it applies, and usually not significantly larger. */
8724 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
8728 gen_func.mov = gen_movv4si;
8730 else if (TARGET_STRING
8731 && bytes > 24 /* move up to 32 bytes at a time */
8739 && ! fixed_regs[12])
8741 move_bytes = (bytes > 32) ? 32 : bytes;
8742 gen_func.movmemsi = gen_movmemsi_8reg;
8744 else if (TARGET_STRING
8745 && bytes > 16 /* move up to 24 bytes at a time */
8751 && ! fixed_regs[10])
8753 move_bytes = (bytes > 24) ? 24 : bytes;
8754 gen_func.movmemsi = gen_movmemsi_6reg;
8756 else if (TARGET_STRING
8757 && bytes > 8 /* move up to 16 bytes at a time */
8763 move_bytes = (bytes > 16) ? 16 : bytes;
8764 gen_func.movmemsi = gen_movmemsi_4reg;
8766 else if (bytes >= 8 && TARGET_POWERPC64
8767 /* 64-bit loads and stores require word-aligned
8769 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8773 gen_func.mov = gen_movdi;
8775 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8776 { /* move up to 8 bytes at a time */
8777 move_bytes = (bytes > 8) ? 8 : bytes;
8778 gen_func.movmemsi = gen_movmemsi_2reg;
8780 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8781 { /* move 4 bytes */
8784 gen_func.mov = gen_movsi;
8786 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8787 { /* move 2 bytes */
8790 gen_func.mov = gen_movhi;
8792 else if (TARGET_STRING && bytes > 1)
8793 { /* move up to 4 bytes at a time */
8794 move_bytes = (bytes > 4) ? 4 : bytes;
8795 gen_func.movmemsi = gen_movmemsi_1reg;
8797 else /* move 1 byte at a time */
8801 gen_func.mov = gen_movqi;
8804 src = adjust_address (orig_src, mode, offset);
8805 dest = adjust_address (orig_dest, mode, offset);
8807 if (mode != BLKmode)
8809 rtx tmp_reg = gen_reg_rtx (mode);
8811 emit_insn ((*gen_func.mov) (tmp_reg, src));
8812 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8815 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8818 for (i = 0; i < num_reg; i++)
8819 emit_insn (stores[i]);
8823 if (mode == BLKmode)
8825 /* Move the address into scratch registers. The movmemsi
8826 patterns require zero offset. */
8827 if (!REG_P (XEXP (src, 0)))
8829 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8830 src = replace_equiv_address (src, src_reg);
8832 set_mem_size (src, GEN_INT (move_bytes));
8834 if (!REG_P (XEXP (dest, 0)))
8836 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8837 dest = replace_equiv_address (dest, dest_reg);
8839 set_mem_size (dest, GEN_INT (move_bytes));
8841 emit_insn ((*gen_func.movmemsi) (dest, src,
8842 GEN_INT (move_bytes & 31),
8851 /* Return a string to perform a load_multiple operation.
8852 operands[0] is the vector.
8853 operands[1] is the source address.
8854 operands[2] is the first destination register. */
8857 rs6000_output_load_multiple (rtx operands[3])
8859 /* We have to handle the case where the pseudo used to contain the address
8860 is assigned to one of the output registers. */
8862 int words = XVECLEN (operands[0], 0);
8865 if (XVECLEN (operands[0], 0) == 1)
8866 return "{l|lwz} %2,0(%1)";
8868 for (i = 0; i < words; i++)
8869 if (refers_to_regno_p (REGNO (operands[2]) + i,
8870 REGNO (operands[2]) + i + 1, operands[1], 0))
8874 xop[0] = GEN_INT (4 * (words-1));
8875 xop[1] = operands[1];
8876 xop[2] = operands[2];
8877 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8882 xop[0] = GEN_INT (4 * (words-1));
8883 xop[1] = operands[1];
8884 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8885 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8890 for (j = 0; j < words; j++)
8893 xop[0] = GEN_INT (j * 4);
8894 xop[1] = operands[1];
8895 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8896 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8898 xop[0] = GEN_INT (i * 4);
8899 xop[1] = operands[1];
8900 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8905 return "{lsi|lswi} %2,%1,%N0";
8909 /* A validation routine: say whether CODE, a condition code, and MODE
8910 match. The other alternatives either don't make sense or should
8911 never be generated. */
8914 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8916 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
8917 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
8918 && GET_MODE_CLASS (mode) == MODE_CC);
8920 /* These don't make sense. */
8921 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
8922 || mode != CCUNSmode);
8924 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
8925 || mode == CCUNSmode);
8927 gcc_assert (mode == CCFPmode
8928 || (code != ORDERED && code != UNORDERED
8929 && code != UNEQ && code != LTGT
8930 && code != UNGT && code != UNLT
8931 && code != UNGE && code != UNLE));
8933 /* These should never be generated except for
8934 flag_finite_math_only. */
8935 gcc_assert (mode != CCFPmode
8936 || flag_finite_math_only
8937 || (code != LE && code != GE
8938 && code != UNEQ && code != LTGT
8939 && code != UNGT && code != UNLT));
8941 /* These are invalid; the information is not there. */
8942 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
8946 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8947 mask required to convert the result of a rotate insn into a shift
8948 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8951 includes_lshift_p (rtx shiftop, rtx andop)
8953 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8955 shift_mask <<= INTVAL (shiftop);
8957 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8960 /* Similar, but for right shift. */
8963 includes_rshift_p (rtx shiftop, rtx andop)
8965 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8967 shift_mask >>= INTVAL (shiftop);
8969 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8972 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8973 to perform a left shift. It must have exactly SHIFTOP least
8974 significant 0's, then one or more 1's, then zero or more 0's. */
8977 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8979 if (GET_CODE (andop) == CONST_INT)
8981 HOST_WIDE_INT c, lsb, shift_mask;
8984 if (c == 0 || c == ~0)
8988 shift_mask <<= INTVAL (shiftop);
8990 /* Find the least significant one bit. */
8993 /* It must coincide with the LSB of the shift mask. */
8994 if (-lsb != shift_mask)
8997 /* Invert to look for the next transition (if any). */
9000 /* Remove the low group of ones (originally low group of zeros). */
9003 /* Again find the lsb, and check we have all 1's above. */
9007 else if (GET_CODE (andop) == CONST_DOUBLE
9008 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9010 HOST_WIDE_INT low, high, lsb;
9011 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9013 low = CONST_DOUBLE_LOW (andop);
9014 if (HOST_BITS_PER_WIDE_INT < 64)
9015 high = CONST_DOUBLE_HIGH (andop);
9017 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9018 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9021 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9023 shift_mask_high = ~0;
9024 if (INTVAL (shiftop) > 32)
9025 shift_mask_high <<= INTVAL (shiftop) - 32;
9029 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9036 return high == -lsb;
9039 shift_mask_low = ~0;
9040 shift_mask_low <<= INTVAL (shiftop);
9044 if (-lsb != shift_mask_low)
9047 if (HOST_BITS_PER_WIDE_INT < 64)
9052 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9055 return high == -lsb;
9059 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9065 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9066 to perform a left shift. It must have SHIFTOP or more least
9067 significant 0's, with the remainder of the word 1's. */
9070 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9072 if (GET_CODE (andop) == CONST_INT)
9074 HOST_WIDE_INT c, lsb, shift_mask;
9077 shift_mask <<= INTVAL (shiftop);
9080 /* Find the least significant one bit. */
9083 /* It must be covered by the shift mask.
9084 This test also rejects c == 0. */
9085 if ((lsb & shift_mask) == 0)
9088 /* Check we have all 1's above the transition, and reject all 1's. */
9089 return c == -lsb && lsb != 1;
9091 else if (GET_CODE (andop) == CONST_DOUBLE
9092 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9094 HOST_WIDE_INT low, lsb, shift_mask_low;
9096 low = CONST_DOUBLE_LOW (andop);
9098 if (HOST_BITS_PER_WIDE_INT < 64)
9100 HOST_WIDE_INT high, shift_mask_high;
9102 high = CONST_DOUBLE_HIGH (andop);
9106 shift_mask_high = ~0;
9107 if (INTVAL (shiftop) > 32)
9108 shift_mask_high <<= INTVAL (shiftop) - 32;
9112 if ((lsb & shift_mask_high) == 0)
9115 return high == -lsb;
9121 shift_mask_low = ~0;
9122 shift_mask_low <<= INTVAL (shiftop);
9126 if ((lsb & shift_mask_low) == 0)
9129 return low == -lsb && lsb != 1;
9135 /* Return 1 if operands will generate a valid arguments to rlwimi
9136 instruction for insert with right shift in 64-bit mode. The mask may
9137 not start on the first bit or stop on the last bit because wrap-around
9138 effects of instruction do not correspond to semantics of RTL insn. */
9141 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9143 if (INTVAL (startop) < 64
9144 && INTVAL (startop) > 32
9145 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9146 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9147 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9148 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9149 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9155 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9156 for lfq and stfq insns iff the registers are hard registers. */
9159 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9161 /* We might have been passed a SUBREG. */
9162 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9165 /* We might have been passed non floating point registers. */
9166 if (!FP_REGNO_P (REGNO (reg1))
9167 || !FP_REGNO_P (REGNO (reg2)))
9170 return (REGNO (reg1) == REGNO (reg2) - 1);
9173 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9174 addr1 and addr2 must be in consecutive memory locations
9175 (addr2 == addr1 + 8). */
9178 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9184 /* The mems cannot be volatile. */
9185 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9188 addr1 = XEXP (mem1, 0);
9189 addr2 = XEXP (mem2, 0);
9191 /* Extract an offset (if used) from the first addr. */
9192 if (GET_CODE (addr1) == PLUS)
9194 /* If not a REG, return zero. */
9195 if (GET_CODE (XEXP (addr1, 0)) != REG)
9199 reg1 = REGNO (XEXP (addr1, 0));
9200 /* The offset must be constant! */
9201 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9203 offset1 = INTVAL (XEXP (addr1, 1));
9206 else if (GET_CODE (addr1) != REG)
9210 reg1 = REGNO (addr1);
9211 /* This was a simple (mem (reg)) expression. Offset is 0. */
9215 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9216 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9217 register as addr1. */
9218 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9220 if (GET_CODE (addr2) != PLUS)
9223 if (GET_CODE (XEXP (addr2, 0)) != REG
9224 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9227 if (reg1 != REGNO (XEXP (addr2, 0)))
9230 /* The offset for the second addr must be 8 more than the first addr. */
9231 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9234 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9239 /* Return the register class of a scratch register needed to copy IN into
9240 or out of a register in CLASS in MODE. If it can be done directly,
9241 NO_REGS is returned. */
9244 secondary_reload_class (enum reg_class class,
9245 enum machine_mode mode ATTRIBUTE_UNUSED,
9250 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9252 && MACHOPIC_INDIRECT
9256 /* We cannot copy a symbolic operand directly into anything
9257 other than BASE_REGS for TARGET_ELF. So indicate that a
9258 register from BASE_REGS is needed as an intermediate
9261 On Darwin, pic addresses require a load from memory, which
9262 needs a base register. */
9263 if (class != BASE_REGS
9264 && (GET_CODE (in) == SYMBOL_REF
9265 || GET_CODE (in) == HIGH
9266 || GET_CODE (in) == LABEL_REF
9267 || GET_CODE (in) == CONST))
9271 if (GET_CODE (in) == REG)
9274 if (regno >= FIRST_PSEUDO_REGISTER)
9276 regno = true_regnum (in);
9277 if (regno >= FIRST_PSEUDO_REGISTER)
9281 else if (GET_CODE (in) == SUBREG)
9283 regno = true_regnum (in);
9284 if (regno >= FIRST_PSEUDO_REGISTER)
9290 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9292 if (class == GENERAL_REGS || class == BASE_REGS
9293 || (regno >= 0 && INT_REGNO_P (regno)))
9296 /* Constants, memory, and FP registers can go into FP registers. */
9297 if ((regno == -1 || FP_REGNO_P (regno))
9298 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9301 /* Memory, and AltiVec registers can go into AltiVec registers. */
9302 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9303 && class == ALTIVEC_REGS)
9306 /* We can copy among the CR registers. */
9307 if ((class == CR_REGS || class == CR0_REGS)
9308 && regno >= 0 && CR_REGNO_P (regno))
9311 /* Otherwise, we need GENERAL_REGS. */
9312 return GENERAL_REGS;
9315 /* Given a comparison operation, return the bit number in CCR to test. We
9316 know this is a valid comparison.
9318 SCC_P is 1 if this is for an scc. That means that %D will have been
9319 used instead of %C, so the bits will be in different places.
9321 Return -1 if OP isn't a valid comparison for some reason. */
9324 ccr_bit (rtx op, int scc_p)
9326 enum rtx_code code = GET_CODE (op);
9327 enum machine_mode cc_mode;
9332 if (!COMPARISON_P (op))
9337 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9339 cc_mode = GET_MODE (reg);
9340 cc_regnum = REGNO (reg);
9341 base_bit = 4 * (cc_regnum - CR0_REGNO);
9343 validate_condition_mode (code, cc_mode);
9345 /* When generating a sCOND operation, only positive conditions are
9348 || code == EQ || code == GT || code == LT || code == UNORDERED
9349 || code == GTU || code == LTU);
9354 return scc_p ? base_bit + 3 : base_bit + 2;
9356 return base_bit + 2;
9357 case GT: case GTU: case UNLE:
9358 return base_bit + 1;
9359 case LT: case LTU: case UNGE:
9361 case ORDERED: case UNORDERED:
9362 return base_bit + 3;
9365 /* If scc, we will have done a cror to put the bit in the
9366 unordered position. So test that bit. For integer, this is ! LT
9367 unless this is an scc insn. */
9368 return scc_p ? base_bit + 3 : base_bit;
9371 return scc_p ? base_bit + 3 : base_bit + 1;
9378 /* Return the GOT register. */
9381 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9383 /* The second flow pass currently (June 1999) can't update
9384 regs_ever_live without disturbing other parts of the compiler, so
9385 update it here to make the prolog/epilogue code happy. */
9386 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9387 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9389 current_function_uses_pic_offset_table = 1;
9391 return pic_offset_table_rtx;
9394 /* Function to init struct machine_function.
9395 This will be called, via a pointer variable,
9396 from push_function_context. */
9398 static struct machine_function *
9399 rs6000_init_machine_status (void)
9401 return ggc_alloc_cleared (sizeof (machine_function));
9404 /* These macros test for integers and extract the low-order bits. */
9406 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9407 && GET_MODE (X) == VOIDmode)
9409 #define INT_LOWPART(X) \
9410 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9416 unsigned long val = INT_LOWPART (op);
9418 /* If the high bit is zero, the value is the first 1 bit we find
9420 if ((val & 0x80000000) == 0)
9422 gcc_assert (val & 0xffffffff);
9425 while (((val <<= 1) & 0x80000000) == 0)
9430 /* If the high bit is set and the low bit is not, or the mask is all
9431 1's, the value is zero. */
9432 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9435 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9438 while (((val >>= 1) & 1) != 0)
9448 unsigned long val = INT_LOWPART (op);
9450 /* If the low bit is zero, the value is the first 1 bit we find from
9454 gcc_assert (val & 0xffffffff);
9457 while (((val >>= 1) & 1) == 0)
9463 /* If the low bit is set and the high bit is not, or the mask is all
9464 1's, the value is 31. */
9465 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9468 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9471 while (((val <<= 1) & 0x80000000) != 0)
9477 /* Locate some local-dynamic symbol still in use by this function
9478 so that we can print its name in some tls_ld pattern. */
9481 rs6000_get_some_local_dynamic_name (void)
9485 if (cfun->machine->some_ld_name)
9486 return cfun->machine->some_ld_name;
9488 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9490 && for_each_rtx (&PATTERN (insn),
9491 rs6000_get_some_local_dynamic_name_1, 0))
9492 return cfun->machine->some_ld_name;
9497 /* Helper function for rs6000_get_some_local_dynamic_name. */
9500 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9504 if (GET_CODE (x) == SYMBOL_REF)
9506 const char *str = XSTR (x, 0);
9507 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9509 cfun->machine->some_ld_name = str;
9517 /* Write out a function code label. */
9520 rs6000_output_function_entry (FILE *file, const char *fname)
9522 if (fname[0] != '.')
9524 switch (DEFAULT_ABI)
9533 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
9542 RS6000_OUTPUT_BASENAME (file, fname);
9544 assemble_name (file, fname);
9547 /* Print an operand. Recognize special options, documented below. */
9550 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9551 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9553 #define SMALL_DATA_RELOC "sda21"
9554 #define SMALL_DATA_REG 0
9558 print_operand (FILE *file, rtx x, int code)
9562 unsigned HOST_WIDE_INT uval;
9567 /* Write out an instruction after the call which may be replaced
9568 with glue code by the loader. This depends on the AIX version. */
9569 asm_fprintf (file, RS6000_CALL_GLUE);
9572 /* %a is output_address. */
9575 /* If X is a constant integer whose low-order 5 bits are zero,
9576 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9577 in the AIX assembler where "sri" with a zero shift count
9578 writes a trash instruction. */
9579 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9586 /* If constant, low-order 16 bits of constant, unsigned.
9587 Otherwise, write normally. */
9589 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9591 print_operand (file, x, 0);
9595 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9596 for 64-bit mask direction. */
9597 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
9600 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9604 /* X is a CR register. Print the number of the GT bit of the CR. */
9605 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9606 output_operand_lossage ("invalid %%E value");
9608 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9612 /* Like 'J' but get to the EQ bit. */
9613 gcc_assert (GET_CODE (x) == REG);
9615 /* Bit 1 is EQ bit. */
9616 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9618 fprintf (file, "%d", i);
9622 /* X is a CR register. Print the number of the EQ bit of the CR */
9623 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9624 output_operand_lossage ("invalid %%E value");
9626 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9630 /* X is a CR register. Print the shift count needed to move it
9631 to the high-order four bits. */
9632 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9633 output_operand_lossage ("invalid %%f value");
9635 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9639 /* Similar, but print the count for the rotate in the opposite
9641 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9642 output_operand_lossage ("invalid %%F value");
9644 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9648 /* X is a constant integer. If it is negative, print "m",
9649 otherwise print "z". This is to make an aze or ame insn. */
9650 if (GET_CODE (x) != CONST_INT)
9651 output_operand_lossage ("invalid %%G value");
9652 else if (INTVAL (x) >= 0)
9659 /* If constant, output low-order five bits. Otherwise, write
9662 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9664 print_operand (file, x, 0);
9668 /* If constant, output low-order six bits. Otherwise, write
9671 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9673 print_operand (file, x, 0);
9677 /* Print `i' if this is a constant, else nothing. */
9683 /* Write the bit number in CCR for jump. */
9686 output_operand_lossage ("invalid %%j code");
9688 fprintf (file, "%d", i);
9692 /* Similar, but add one for shift count in rlinm for scc and pass
9693 scc flag to `ccr_bit'. */
9696 output_operand_lossage ("invalid %%J code");
9698 /* If we want bit 31, write a shift count of zero, not 32. */
9699 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9703 /* X must be a constant. Write the 1's complement of the
9706 output_operand_lossage ("invalid %%k value");
9708 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9712 /* X must be a symbolic constant on ELF. Write an
9713 expression suitable for an 'addi' that adds in the low 16
9715 if (GET_CODE (x) != CONST)
9717 print_operand_address (file, x);
9722 if (GET_CODE (XEXP (x, 0)) != PLUS
9723 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9724 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9725 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9726 output_operand_lossage ("invalid %%K value");
9727 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9729 /* For GNU as, there must be a non-alphanumeric character
9730 between 'l' and the number. The '-' is added by
9731 print_operand() already. */
9732 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9734 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9738 /* %l is output_asm_label. */
9741 /* Write second word of DImode or DFmode reference. Works on register
9742 or non-indexed memory only. */
9743 if (GET_CODE (x) == REG)
9744 fputs (reg_names[REGNO (x) + 1], file);
9745 else if (GET_CODE (x) == MEM)
9747 /* Handle possible auto-increment. Since it is pre-increment and
9748 we have already done it, we can just use an offset of word. */
9749 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9750 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9751 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9754 output_address (XEXP (adjust_address_nv (x, SImode,
9758 if (small_data_operand (x, GET_MODE (x)))
9759 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9760 reg_names[SMALL_DATA_REG]);
9765 /* MB value for a mask operand. */
9766 if (! mask_operand (x, SImode))
9767 output_operand_lossage ("invalid %%m value");
9769 fprintf (file, "%d", extract_MB (x));
9773 /* ME value for a mask operand. */
9774 if (! mask_operand (x, SImode))
9775 output_operand_lossage ("invalid %%M value");
9777 fprintf (file, "%d", extract_ME (x));
9780 /* %n outputs the negative of its operand. */
9783 /* Write the number of elements in the vector times 4. */
9784 if (GET_CODE (x) != PARALLEL)
9785 output_operand_lossage ("invalid %%N value");
9787 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9791 /* Similar, but subtract 1 first. */
9792 if (GET_CODE (x) != PARALLEL)
9793 output_operand_lossage ("invalid %%O value");
9795 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9799 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9801 || INT_LOWPART (x) < 0
9802 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9803 output_operand_lossage ("invalid %%p value");
9805 fprintf (file, "%d", i);
9809 /* The operand must be an indirect memory reference. The result
9810 is the register name. */
9811 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9812 || REGNO (XEXP (x, 0)) >= 32)
9813 output_operand_lossage ("invalid %%P value");
9815 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9819 /* This outputs the logical code corresponding to a boolean
9820 expression. The expression may have one or both operands
9821 negated (if one, only the first one). For condition register
9822 logical operations, it will also treat the negated
9823 CR codes as NOTs, but not handle NOTs of them. */
9825 const char *const *t = 0;
9827 enum rtx_code code = GET_CODE (x);
9828 static const char * const tbl[3][3] = {
9829 { "and", "andc", "nor" },
9830 { "or", "orc", "nand" },
9831 { "xor", "eqv", "xor" } };
9835 else if (code == IOR)
9837 else if (code == XOR)
9840 output_operand_lossage ("invalid %%q value");
9842 if (GET_CODE (XEXP (x, 0)) != NOT)
9846 if (GET_CODE (XEXP (x, 1)) == NOT)
9864 /* X is a CR register. Print the mask for `mtcrf'. */
9865 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9866 output_operand_lossage ("invalid %%R value");
9868 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9872 /* Low 5 bits of 32 - value */
9874 output_operand_lossage ("invalid %%s value");
9876 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9880 /* PowerPC64 mask position. All 0's is excluded.
9881 CONST_INT 32-bit mask is considered sign-extended so any
9882 transition must occur within the CONST_INT, not on the boundary. */
9883 if (! mask_operand (x, DImode))
9884 output_operand_lossage ("invalid %%S value");
9886 uval = INT_LOWPART (x);
9888 if (uval & 1) /* Clear Left */
9890 #if HOST_BITS_PER_WIDE_INT > 64
9891 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9895 else /* Clear Right */
9898 #if HOST_BITS_PER_WIDE_INT > 64
9899 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9905 gcc_assert (i >= 0);
9906 fprintf (file, "%d", i);
9910 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9911 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
9913 /* Bit 3 is OV bit. */
9914 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9916 /* If we want bit 31, write a shift count of zero, not 32. */
9917 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9921 /* Print the symbolic name of a branch target register. */
9922 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9923 && REGNO (x) != COUNT_REGISTER_REGNUM))
9924 output_operand_lossage ("invalid %%T value");
9925 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9926 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9928 fputs ("ctr", file);
9932 /* High-order 16 bits of constant for use in unsigned operand. */
9934 output_operand_lossage ("invalid %%u value");
9936 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9937 (INT_LOWPART (x) >> 16) & 0xffff);
9941 /* High-order 16 bits of constant for use in signed operand. */
9943 output_operand_lossage ("invalid %%v value");
9945 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9946 (INT_LOWPART (x) >> 16) & 0xffff);
9950 /* Print `u' if this has an auto-increment or auto-decrement. */
9951 if (GET_CODE (x) == MEM
9952 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9953 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9958 /* Print the trap code for this operand. */
9959 switch (GET_CODE (x))
9962 fputs ("eq", file); /* 4 */
9965 fputs ("ne", file); /* 24 */
9968 fputs ("lt", file); /* 16 */
9971 fputs ("le", file); /* 20 */
9974 fputs ("gt", file); /* 8 */
9977 fputs ("ge", file); /* 12 */
9980 fputs ("llt", file); /* 2 */
9983 fputs ("lle", file); /* 6 */
9986 fputs ("lgt", file); /* 1 */
9989 fputs ("lge", file); /* 5 */
9997 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10000 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10001 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10003 print_operand (file, x, 0);
10007 /* MB value for a PowerPC64 rldic operand. */
10008 val = (GET_CODE (x) == CONST_INT
10009 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10014 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10015 if ((val <<= 1) < 0)
10018 #if HOST_BITS_PER_WIDE_INT == 32
10019 if (GET_CODE (x) == CONST_INT && i >= 0)
10020 i += 32; /* zero-extend high-part was all 0's */
10021 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10023 val = CONST_DOUBLE_LOW (x);
10029 for ( ; i < 64; i++)
10030 if ((val <<= 1) < 0)
10035 fprintf (file, "%d", i + 1);
10039 if (GET_CODE (x) == MEM
10040 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10045 /* Like 'L', for third word of TImode */
10046 if (GET_CODE (x) == REG)
10047 fputs (reg_names[REGNO (x) + 2], file);
10048 else if (GET_CODE (x) == MEM)
10050 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10051 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10052 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10054 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10055 if (small_data_operand (x, GET_MODE (x)))
10056 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10057 reg_names[SMALL_DATA_REG]);
10062 /* X is a SYMBOL_REF. Write out the name preceded by a
10063 period and without any trailing data in brackets. Used for function
10064 names. If we are configured for System V (or the embedded ABI) on
10065 the PowerPC, do not emit the period, since those systems do not use
10066 TOCs and the like. */
10067 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10069 /* Mark the decl as referenced so that cgraph will output the
10071 if (SYMBOL_REF_DECL (x))
10072 mark_decl_referenced (SYMBOL_REF_DECL (x));
10074 /* For macho, check to see if we need a stub. */
10077 const char *name = XSTR (x, 0);
10079 if (MACHOPIC_INDIRECT
10080 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10081 name = machopic_indirection_name (x, /*stub_p=*/true);
10083 assemble_name (file, name);
10085 else if (!DOT_SYMBOLS)
10086 assemble_name (file, XSTR (x, 0));
10088 rs6000_output_function_entry (file, XSTR (x, 0));
10092 /* Like 'L', for last word of TImode. */
10093 if (GET_CODE (x) == REG)
10094 fputs (reg_names[REGNO (x) + 3], file);
10095 else if (GET_CODE (x) == MEM)
10097 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10098 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10099 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10101 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10102 if (small_data_operand (x, GET_MODE (x)))
10103 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10104 reg_names[SMALL_DATA_REG]);
10108 /* Print AltiVec or SPE memory operand. */
10113 gcc_assert (GET_CODE (x) == MEM);
10119 /* Handle [reg]. */
10120 if (GET_CODE (tmp) == REG)
10122 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10125 /* Handle [reg+UIMM]. */
10126 else if (GET_CODE (tmp) == PLUS &&
10127 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10131 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10133 x = INTVAL (XEXP (tmp, 1));
10134 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10138 /* Fall through. Must be [reg+reg]. */
10141 && GET_CODE (tmp) == AND
10142 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10143 && INTVAL (XEXP (tmp, 1)) == -16)
10144 tmp = XEXP (tmp, 0);
10145 if (GET_CODE (tmp) == REG)
10146 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10149 gcc_assert (GET_CODE (tmp) == PLUS
10150 && GET_CODE (XEXP (tmp, 1)) == REG);
10152 if (REGNO (XEXP (tmp, 0)) == 0)
10153 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10154 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10156 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10157 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10163 if (GET_CODE (x) == REG)
10164 fprintf (file, "%s", reg_names[REGNO (x)]);
10165 else if (GET_CODE (x) == MEM)
10167 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10168 know the width from the mode. */
10169 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10170 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10171 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10172 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10173 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10174 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10176 output_address (XEXP (x, 0));
10179 output_addr_const (file, x);
10183 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10187 output_operand_lossage ("invalid %%xn code");
10191 /* Print the address of an operand. */
10194 print_operand_address (FILE *file, rtx x)
10196 if (GET_CODE (x) == REG)
10197 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10198 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10199 || GET_CODE (x) == LABEL_REF)
10201 output_addr_const (file, x);
10202 if (small_data_operand (x, GET_MODE (x)))
10203 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10204 reg_names[SMALL_DATA_REG]);
10206 gcc_assert (!TARGET_TOC);
10208 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10210 if (REGNO (XEXP (x, 0)) == 0)
10211 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10212 reg_names[ REGNO (XEXP (x, 0)) ]);
10214 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10215 reg_names[ REGNO (XEXP (x, 1)) ]);
10217 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10218 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10219 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10221 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10222 && CONSTANT_P (XEXP (x, 1)))
10224 output_addr_const (file, XEXP (x, 1));
10225 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10229 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10230 && CONSTANT_P (XEXP (x, 1)))
10232 fprintf (file, "lo16(");
10233 output_addr_const (file, XEXP (x, 1));
10234 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10237 else if (legitimate_constant_pool_address_p (x))
10239 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10241 rtx contains_minus = XEXP (x, 1);
10245 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10246 turn it into (sym) for output_addr_const. */
10247 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10248 contains_minus = XEXP (contains_minus, 0);
10250 minus = XEXP (contains_minus, 0);
10251 symref = XEXP (minus, 0);
10252 XEXP (contains_minus, 0) = symref;
10257 name = XSTR (symref, 0);
10258 newname = alloca (strlen (name) + sizeof ("@toc"));
10259 strcpy (newname, name);
10260 strcat (newname, "@toc");
10261 XSTR (symref, 0) = newname;
10263 output_addr_const (file, XEXP (x, 1));
10265 XSTR (symref, 0) = name;
10266 XEXP (contains_minus, 0) = minus;
10269 output_addr_const (file, XEXP (x, 1));
10271 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10274 gcc_unreachable ();
10277 /* Target hook for assembling integer objects. The PowerPC version has
10278 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10279 is defined. It also needs to handle DI-mode objects on 64-bit
10283 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10285 #ifdef RELOCATABLE_NEEDS_FIXUP
10286 /* Special handling for SI values. */
10287 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
10289 extern int in_toc_section (void);
10290 static int recurse = 0;
10292 /* For -mrelocatable, we mark all addresses that need to be fixed up
10293 in the .fixup section. */
10294 if (TARGET_RELOCATABLE
10295 && !in_toc_section ()
10296 && !in_text_section ()
10297 && !in_unlikely_text_section ()
10299 && GET_CODE (x) != CONST_INT
10300 && GET_CODE (x) != CONST_DOUBLE
10306 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10308 ASM_OUTPUT_LABEL (asm_out_file, buf);
10309 fprintf (asm_out_file, "\t.long\t(");
10310 output_addr_const (asm_out_file, x);
10311 fprintf (asm_out_file, ")@fixup\n");
10312 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10313 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10314 fprintf (asm_out_file, "\t.long\t");
10315 assemble_name (asm_out_file, buf);
10316 fprintf (asm_out_file, "\n\t.previous\n");
10320 /* Remove initial .'s to turn a -mcall-aixdesc function
10321 address into the address of the descriptor, not the function
10323 else if (GET_CODE (x) == SYMBOL_REF
10324 && XSTR (x, 0)[0] == '.'
10325 && DEFAULT_ABI == ABI_AIX)
10327 const char *name = XSTR (x, 0);
10328 while (*name == '.')
10331 fprintf (asm_out_file, "\t.long\t%s\n", name);
10335 #endif /* RELOCATABLE_NEEDS_FIXUP */
10336 return default_assemble_integer (x, size, aligned_p);
10339 #ifdef HAVE_GAS_HIDDEN
10340 /* Emit an assembler directive to set symbol visibility for DECL to
10341 VISIBILITY_TYPE. */
10344 rs6000_assemble_visibility (tree decl, int vis)
10346 /* Functions need to have their entry point symbol visibility set as
10347 well as their descriptor symbol visibility. */
10348 if (DEFAULT_ABI == ABI_AIX
10350 && TREE_CODE (decl) == FUNCTION_DECL)
10352 static const char * const visibility_types[] = {
10353 NULL, "internal", "hidden", "protected"
10356 const char *name, *type;
10358 name = ((* targetm.strip_name_encoding)
10359 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10360 type = visibility_types[vis];
10362 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10363 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10366 default_assemble_visibility (decl, vis);
10371 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10373 /* Reversal of FP compares takes care -- an ordered compare
10374 becomes an unordered compare and vice versa. */
10375 if (mode == CCFPmode
10376 && (!flag_finite_math_only
10377 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10378 || code == UNEQ || code == LTGT))
10379 return reverse_condition_maybe_unordered (code);
10381 return reverse_condition (code);
10384 /* Generate a compare for CODE. Return a brand-new rtx that
10385 represents the result of the compare. */
10388 rs6000_generate_compare (enum rtx_code code)
10390 enum machine_mode comp_mode;
10391 rtx compare_result;
10393 if (rs6000_compare_fp_p)
10394 comp_mode = CCFPmode;
10395 else if (code == GTU || code == LTU
10396 || code == GEU || code == LEU)
10397 comp_mode = CCUNSmode;
10398 else if ((code == EQ || code == NE)
10399 && GET_CODE (rs6000_compare_op0) == SUBREG
10400 && GET_CODE (rs6000_compare_op1) == SUBREG
10401 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10402 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10403 /* These are unsigned values, perhaps there will be a later
10404 ordering compare that can be shared with this one.
10405 Unfortunately we cannot detect the signedness of the operands
10406 for non-subregs. */
10407 comp_mode = CCUNSmode;
10409 comp_mode = CCmode;
10411 /* First, the compare. */
10412 compare_result = gen_reg_rtx (comp_mode);
10414 /* SPE FP compare instructions on the GPRs. Yuck! */
10415 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10416 && rs6000_compare_fp_p)
10418 rtx cmp, or_result, compare_result2;
10419 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10421 if (op_mode == VOIDmode)
10422 op_mode = GET_MODE (rs6000_compare_op1);
10424 /* Note: The E500 comparison instructions set the GT bit (x +
10425 1), on success. This explains the mess. */
10429 case EQ: case UNEQ: case NE: case LTGT:
10433 cmp = flag_unsafe_math_optimizations
10434 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10435 rs6000_compare_op1)
10436 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10437 rs6000_compare_op1);
10441 cmp = flag_unsafe_math_optimizations
10442 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10443 rs6000_compare_op1)
10444 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10445 rs6000_compare_op1);
10449 gcc_unreachable ();
10453 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10457 cmp = flag_unsafe_math_optimizations
10458 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10459 rs6000_compare_op1)
10460 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10461 rs6000_compare_op1);
10465 cmp = flag_unsafe_math_optimizations
10466 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10467 rs6000_compare_op1)
10468 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10469 rs6000_compare_op1);
10473 gcc_unreachable ();
10477 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10481 cmp = flag_unsafe_math_optimizations
10482 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10483 rs6000_compare_op1)
10484 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10485 rs6000_compare_op1);
10489 cmp = flag_unsafe_math_optimizations
10490 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
10491 rs6000_compare_op1)
10492 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
10493 rs6000_compare_op1);
10497 gcc_unreachable ();
10501 gcc_unreachable ();
10504 /* Synthesize LE and GE from LT/GT || EQ. */
10505 if (code == LE || code == GE || code == LEU || code == GEU)
10511 case LE: code = LT; break;
10512 case GE: code = GT; break;
10513 case LEU: code = LT; break;
10514 case GEU: code = GT; break;
10515 default: gcc_unreachable ();
10518 compare_result2 = gen_reg_rtx (CCFPmode);
10524 cmp = flag_unsafe_math_optimizations
10525 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10526 rs6000_compare_op1)
10527 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10528 rs6000_compare_op1);
10532 cmp = flag_unsafe_math_optimizations
10533 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
10534 rs6000_compare_op1)
10535 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
10536 rs6000_compare_op1);
10540 gcc_unreachable ();
10544 /* OR them together. */
10545 or_result = gen_reg_rtx (CCFPmode);
10546 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
10548 compare_result = or_result;
10553 if (code == NE || code == LTGT)
10563 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
10564 CLOBBERs to match cmptf_internal2 pattern. */
10565 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
10566 && GET_MODE (rs6000_compare_op0) == TFmode
10567 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
10568 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
10569 emit_insn (gen_rtx_PARALLEL (VOIDmode,
10571 gen_rtx_SET (VOIDmode,
10573 gen_rtx_COMPARE (comp_mode,
10574 rs6000_compare_op0,
10575 rs6000_compare_op1)),
10576 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10577 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10578 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10579 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10580 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10581 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10582 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10583 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
10585 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10586 gen_rtx_COMPARE (comp_mode,
10587 rs6000_compare_op0,
10588 rs6000_compare_op1)));
10591 /* Some kinds of FP comparisons need an OR operation;
10592 under flag_finite_math_only we don't bother. */
10593 if (rs6000_compare_fp_p
10594 && !flag_finite_math_only
10595 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10596 && (code == LE || code == GE
10597 || code == UNEQ || code == LTGT
10598 || code == UNGT || code == UNLT))
10600 enum rtx_code or1, or2;
10601 rtx or1_rtx, or2_rtx, compare2_rtx;
10602 rtx or_result = gen_reg_rtx (CCEQmode);
10606 case LE: or1 = LT; or2 = EQ; break;
10607 case GE: or1 = GT; or2 = EQ; break;
10608 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10609 case LTGT: or1 = LT; or2 = GT; break;
10610 case UNGT: or1 = UNORDERED; or2 = GT; break;
10611 case UNLT: or1 = UNORDERED; or2 = LT; break;
10612 default: gcc_unreachable ();
10614 validate_condition_mode (or1, comp_mode);
10615 validate_condition_mode (or2, comp_mode);
10616 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10617 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10618 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10619 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10621 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10623 compare_result = or_result;
10627 validate_condition_mode (code, GET_MODE (compare_result));
10629 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10633 /* Emit the RTL for an sCOND pattern. */
10636 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10639 enum machine_mode op_mode;
10640 enum rtx_code cond_code;
10642 condition_rtx = rs6000_generate_compare (code);
10643 cond_code = GET_CODE (condition_rtx);
10645 if (TARGET_E500 && rs6000_compare_fp_p
10646 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10650 PUT_MODE (condition_rtx, SImode);
10651 t = XEXP (condition_rtx, 0);
10653 gcc_assert (cond_code == NE || cond_code == EQ);
10655 if (cond_code == NE)
10656 emit_insn (gen_e500_flip_gt_bit (t, t));
10658 emit_insn (gen_move_from_CR_gt_bit (result, t));
10662 if (cond_code == NE
10663 || cond_code == GE || cond_code == LE
10664 || cond_code == GEU || cond_code == LEU
10665 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10667 rtx not_result = gen_reg_rtx (CCEQmode);
10668 rtx not_op, rev_cond_rtx;
10669 enum machine_mode cc_mode;
10671 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10673 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10674 SImode, XEXP (condition_rtx, 0), const0_rtx);
10675 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10676 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10677 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10680 op_mode = GET_MODE (rs6000_compare_op0);
10681 if (op_mode == VOIDmode)
10682 op_mode = GET_MODE (rs6000_compare_op1);
10684 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10686 PUT_MODE (condition_rtx, DImode);
10687 convert_move (result, condition_rtx, 0);
10691 PUT_MODE (condition_rtx, SImode);
10692 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10696 /* Emit a branch of kind CODE to location LOC. */
10699 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10701 rtx condition_rtx, loc_ref;
10703 condition_rtx = rs6000_generate_compare (code);
10704 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10705 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10706 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10707 loc_ref, pc_rtx)));
10710 /* Return the string to output a conditional branch to LABEL, which is
10711 the operand number of the label, or -1 if the branch is really a
10712 conditional return.
10714 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10715 condition code register and its mode specifies what kind of
10716 comparison we made.
10718 REVERSED is nonzero if we should reverse the sense of the comparison.
10720 INSN is the insn. */
10723 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10725 static char string[64];
10726 enum rtx_code code = GET_CODE (op);
10727 rtx cc_reg = XEXP (op, 0);
10728 enum machine_mode mode = GET_MODE (cc_reg);
10729 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10730 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10731 int really_reversed = reversed ^ need_longbranch;
10737 validate_condition_mode (code, mode);
10739 /* Work out which way this really branches. We could use
10740 reverse_condition_maybe_unordered here always but this
10741 makes the resulting assembler clearer. */
10742 if (really_reversed)
10744 /* Reversal of FP compares takes care -- an ordered compare
10745 becomes an unordered compare and vice versa. */
10746 if (mode == CCFPmode)
10747 code = reverse_condition_maybe_unordered (code);
10749 code = reverse_condition (code);
10752 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10754 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10759 /* Opposite of GT. */
10768 gcc_unreachable ();
10774 /* Not all of these are actually distinct opcodes, but
10775 we distinguish them for clarity of the resulting assembler. */
10776 case NE: case LTGT:
10777 ccode = "ne"; break;
10778 case EQ: case UNEQ:
10779 ccode = "eq"; break;
10781 ccode = "ge"; break;
10782 case GT: case GTU: case UNGT:
10783 ccode = "gt"; break;
10785 ccode = "le"; break;
10786 case LT: case LTU: case UNLT:
10787 ccode = "lt"; break;
10788 case UNORDERED: ccode = "un"; break;
10789 case ORDERED: ccode = "nu"; break;
10790 case UNGE: ccode = "nl"; break;
10791 case UNLE: ccode = "ng"; break;
10793 gcc_unreachable ();
10796 /* Maybe we have a guess as to how likely the branch is.
10797 The old mnemonics don't have a way to specify this information. */
10799 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10800 if (note != NULL_RTX)
10802 /* PROB is the difference from 50%. */
10803 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10805 /* Only hint for highly probable/improbable branches on newer
10806 cpus as static prediction overrides processor dynamic
10807 prediction. For older cpus we may as well always hint, but
10808 assume not taken for branches that are very close to 50% as a
10809 mispredicted taken branch is more expensive than a
10810 mispredicted not-taken branch. */
10811 if (rs6000_always_hint
10812 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10814 if (abs (prob) > REG_BR_PROB_BASE / 20
10815 && ((prob > 0) ^ need_longbranch))
10823 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10825 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10827 /* We need to escape any '%' characters in the reg_names string.
10828 Assume they'd only be the first character.... */
10829 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10831 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10835 /* If the branch distance was too far, we may have to use an
10836 unconditional branch to go the distance. */
10837 if (need_longbranch)
10838 s += sprintf (s, ",$+8\n\tb %s", label);
10840 s += sprintf (s, ",%s", label);
10846 /* Return the string to flip the GT bit on a CR. */
10848 output_e500_flip_gt_bit (rtx dst, rtx src)
10850 static char string[64];
10853 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
10854 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
10857 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10858 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10860 sprintf (string, "crnot %d,%d", a, b);
10864 /* Return insn index for the vector compare instruction for given CODE,
10865 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
10869 get_vec_cmp_insn (enum rtx_code code,
10870 enum machine_mode dest_mode,
10871 enum machine_mode op_mode)
10873 if (!TARGET_ALTIVEC)
10874 return INSN_NOT_AVAILABLE;
10879 if (dest_mode == V16QImode && op_mode == V16QImode)
10880 return UNSPEC_VCMPEQUB;
10881 if (dest_mode == V8HImode && op_mode == V8HImode)
10882 return UNSPEC_VCMPEQUH;
10883 if (dest_mode == V4SImode && op_mode == V4SImode)
10884 return UNSPEC_VCMPEQUW;
10885 if (dest_mode == V4SImode && op_mode == V4SFmode)
10886 return UNSPEC_VCMPEQFP;
10889 if (dest_mode == V4SImode && op_mode == V4SFmode)
10890 return UNSPEC_VCMPGEFP;
10892 if (dest_mode == V16QImode && op_mode == V16QImode)
10893 return UNSPEC_VCMPGTSB;
10894 if (dest_mode == V8HImode && op_mode == V8HImode)
10895 return UNSPEC_VCMPGTSH;
10896 if (dest_mode == V4SImode && op_mode == V4SImode)
10897 return UNSPEC_VCMPGTSW;
10898 if (dest_mode == V4SImode && op_mode == V4SFmode)
10899 return UNSPEC_VCMPGTFP;
10902 if (dest_mode == V16QImode && op_mode == V16QImode)
10903 return UNSPEC_VCMPGTUB;
10904 if (dest_mode == V8HImode && op_mode == V8HImode)
10905 return UNSPEC_VCMPGTUH;
10906 if (dest_mode == V4SImode && op_mode == V4SImode)
10907 return UNSPEC_VCMPGTUW;
10912 return INSN_NOT_AVAILABLE;
10915 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
10916 DMODE is expected destination mode. This is a recursive function. */
10919 rs6000_emit_vector_compare (enum rtx_code rcode,
10921 enum machine_mode dmode)
10925 enum machine_mode dest_mode;
10926 enum machine_mode op_mode = GET_MODE (op1);
10928 gcc_assert (TARGET_ALTIVEC);
10929 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
10931 /* Floating point vector compare instructions uses destination V4SImode.
10932 Move destination to appropriate mode later. */
10933 if (dmode == V4SFmode)
10934 dest_mode = V4SImode;
10938 mask = gen_reg_rtx (dest_mode);
10939 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
10941 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
10943 bool swap_operands = false;
10944 bool try_again = false;
10949 swap_operands = true;
10954 swap_operands = true;
10958 /* Treat A != B as ~(A==B). */
10960 enum insn_code nor_code;
10961 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
10964 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
10965 gcc_assert (nor_code != CODE_FOR_nothing);
10966 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
10968 if (dmode != dest_mode)
10970 rtx temp = gen_reg_rtx (dest_mode);
10971 convert_move (temp, mask, 0);
10981 /* Try GT/GTU/LT/LTU OR EQ */
10984 enum insn_code ior_code;
10985 enum rtx_code new_code;
11006 gcc_unreachable ();
11009 c_rtx = rs6000_emit_vector_compare (new_code,
11010 op0, op1, dest_mode);
11011 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11014 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11015 gcc_assert (ior_code != CODE_FOR_nothing);
11016 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11017 if (dmode != dest_mode)
11019 rtx temp = gen_reg_rtx (dest_mode);
11020 convert_move (temp, mask, 0);
11027 gcc_unreachable ();
11032 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11033 /* You only get two chances. */
11034 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11046 emit_insn (gen_rtx_SET (VOIDmode, mask,
11047 gen_rtx_UNSPEC (dest_mode,
11048 gen_rtvec (2, op0, op1),
11050 if (dmode != dest_mode)
11052 rtx temp = gen_reg_rtx (dest_mode);
11053 convert_move (temp, mask, 0);
11059 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11060 valid insn doesn exist for given mode. */
11063 get_vsel_insn (enum machine_mode mode)
11068 return UNSPEC_VSEL4SI;
11071 return UNSPEC_VSEL4SF;
11074 return UNSPEC_VSEL8HI;
11077 return UNSPEC_VSEL16QI;
11080 return INSN_NOT_AVAILABLE;
11083 return INSN_NOT_AVAILABLE;
11086 /* Emit vector select insn where DEST is destination using
11087 operands OP1, OP2 and MASK. */
11090 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11093 enum machine_mode dest_mode = GET_MODE (dest);
11094 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11096 temp = gen_reg_rtx (dest_mode);
11098 /* For each vector element, select op1 when mask is 1 otherwise
11100 t = gen_rtx_SET (VOIDmode, temp,
11101 gen_rtx_UNSPEC (dest_mode,
11102 gen_rtvec (3, op2, op1, mask),
11105 emit_move_insn (dest, temp);
11109 /* Emit vector conditional expression.
11110 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11111 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11114 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11115 rtx cond, rtx cc_op0, rtx cc_op1)
11117 enum machine_mode dest_mode = GET_MODE (dest);
11118 enum rtx_code rcode = GET_CODE (cond);
11121 if (!TARGET_ALTIVEC)
11124 /* Get the vector mask for the given relational operations. */
11125 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11127 rs6000_emit_vector_select (dest, op1, op2, mask);
11132 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11133 operands of the last comparison is nonzero/true, FALSE_COND if it
11134 is zero/false. Return 0 if the hardware has no such operation. */
11137 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11139 enum rtx_code code = GET_CODE (op);
11140 rtx op0 = rs6000_compare_op0;
11141 rtx op1 = rs6000_compare_op1;
11142 REAL_VALUE_TYPE c1;
11143 enum machine_mode compare_mode = GET_MODE (op0);
11144 enum machine_mode result_mode = GET_MODE (dest);
11146 bool is_against_zero;
11148 /* These modes should always match. */
11149 if (GET_MODE (op1) != compare_mode
11150 /* In the isel case however, we can use a compare immediate, so
11151 op1 may be a small constant. */
11152 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11154 if (GET_MODE (true_cond) != result_mode)
11156 if (GET_MODE (false_cond) != result_mode)
11159 /* First, work out if the hardware can do this at all, or
11160 if it's too slow.... */
11161 if (! rs6000_compare_fp_p)
11164 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11167 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11168 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11171 is_against_zero = op1 == CONST0_RTX (compare_mode);
11173 /* A floating-point subtract might overflow, underflow, or produce
11174 an inexact result, thus changing the floating-point flags, so it
11175 can't be generated if we care about that. It's safe if one side
11176 of the construct is zero, since then no subtract will be
11178 if (GET_MODE_CLASS (compare_mode) == MODE_FLOAT
11179 && flag_trapping_math && ! is_against_zero)
11182 /* Eliminate half of the comparisons by switching operands, this
11183 makes the remaining code simpler. */
11184 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11185 || code == LTGT || code == LT || code == UNLE)
11187 code = reverse_condition_maybe_unordered (code);
11189 true_cond = false_cond;
11193 /* UNEQ and LTGT take four instructions for a comparison with zero,
11194 it'll probably be faster to use a branch here too. */
11195 if (code == UNEQ && HONOR_NANS (compare_mode))
11198 if (GET_CODE (op1) == CONST_DOUBLE)
11199 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11201 /* We're going to try to implement comparisons by performing
11202 a subtract, then comparing against zero. Unfortunately,
11203 Inf - Inf is NaN which is not zero, and so if we don't
11204 know that the operand is finite and the comparison
11205 would treat EQ different to UNORDERED, we can't do it. */
11206 if (HONOR_INFINITIES (compare_mode)
11207 && code != GT && code != UNGE
11208 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11209 /* Constructs of the form (a OP b ? a : b) are safe. */
11210 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11211 || (! rtx_equal_p (op0, true_cond)
11212 && ! rtx_equal_p (op1, true_cond))))
11215 /* At this point we know we can use fsel. */
11217 /* Reduce the comparison to a comparison against zero. */
11218 if (! is_against_zero)
11220 temp = gen_reg_rtx (compare_mode);
11221 emit_insn (gen_rtx_SET (VOIDmode, temp,
11222 gen_rtx_MINUS (compare_mode, op0, op1)));
11224 op1 = CONST0_RTX (compare_mode);
11227 /* If we don't care about NaNs we can reduce some of the comparisons
11228 down to faster ones. */
11229 if (! HONOR_NANS (compare_mode))
11235 true_cond = false_cond;
11248 /* Now, reduce everything down to a GE. */
11255 temp = gen_reg_rtx (compare_mode);
11256 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11261 temp = gen_reg_rtx (compare_mode);
11262 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11267 temp = gen_reg_rtx (compare_mode);
11268 emit_insn (gen_rtx_SET (VOIDmode, temp,
11269 gen_rtx_NEG (compare_mode,
11270 gen_rtx_ABS (compare_mode, op0))));
11275 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11276 temp = gen_reg_rtx (result_mode);
11277 emit_insn (gen_rtx_SET (VOIDmode, temp,
11278 gen_rtx_IF_THEN_ELSE (result_mode,
11279 gen_rtx_GE (VOIDmode,
11281 true_cond, false_cond)));
11282 false_cond = true_cond;
11285 temp = gen_reg_rtx (compare_mode);
11286 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11291 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11292 temp = gen_reg_rtx (result_mode);
11293 emit_insn (gen_rtx_SET (VOIDmode, temp,
11294 gen_rtx_IF_THEN_ELSE (result_mode,
11295 gen_rtx_GE (VOIDmode,
11297 true_cond, false_cond)));
11298 true_cond = false_cond;
11301 temp = gen_reg_rtx (compare_mode);
11302 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11307 gcc_unreachable ();
11310 emit_insn (gen_rtx_SET (VOIDmode, dest,
11311 gen_rtx_IF_THEN_ELSE (result_mode,
11312 gen_rtx_GE (VOIDmode,
11314 true_cond, false_cond)));
11318 /* Same as above, but for ints (isel). */
11321 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11323 rtx condition_rtx, cr;
11325 /* All isel implementations thus far are 32-bits. */
11326 if (GET_MODE (rs6000_compare_op0) != SImode)
11329 /* We still have to do the compare, because isel doesn't do a
11330 compare, it just looks at the CRx bits set by a previous compare
11332 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11333 cr = XEXP (condition_rtx, 0);
11335 if (GET_MODE (cr) == CCmode)
11336 emit_insn (gen_isel_signed (dest, condition_rtx,
11337 true_cond, false_cond, cr));
11339 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11340 true_cond, false_cond, cr));
11346 output_isel (rtx *operands)
11348 enum rtx_code code;
11350 code = GET_CODE (operands[1]);
11351 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11353 PUT_CODE (operands[1], reverse_condition (code));
11354 return "isel %0,%3,%2,%j1";
11357 return "isel %0,%2,%3,%j1";
11361 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11363 enum machine_mode mode = GET_MODE (op0);
11367 if (code == SMAX || code == SMIN)
11372 if (code == SMAX || code == UMAX)
11373 target = emit_conditional_move (dest, c, op0, op1, mode,
11374 op0, op1, mode, 0);
11376 target = emit_conditional_move (dest, c, op0, op1, mode,
11377 op1, op0, mode, 0);
11378 gcc_assert (target);
11379 if (target != dest)
11380 emit_move_insn (dest, target);
11383 /* Emit instructions to perform a load-reserved/store-conditional operation.
11384 The operation performed is an atomic
11385 (set M (CODE:MODE M OP))
11386 If not NULL, BEFORE is atomically set to M before the operation, and
11387 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11388 If SYNC_P then a memory barrier is emitted before the operation.
11389 Either OP or M may be wrapped in a NOT operation. */
11392 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11393 rtx m, rtx op, rtx before_param, rtx after_param,
11396 enum machine_mode used_mode;
11397 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11400 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11401 rtx shift = NULL_RTX;
11404 emit_insn (gen_memory_barrier ());
11406 if (GET_CODE (m) == NOT)
11407 used_m = XEXP (m, 0);
11411 /* If this is smaller than SImode, we'll have to use SImode with
11413 if (mode == QImode || mode == HImode)
11417 if (MEM_ALIGN (used_m) >= 32)
11420 if (BYTES_BIG_ENDIAN)
11421 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
11423 shift = GEN_INT (ishift);
11427 rtx addrSI, aligned_addr;
11428 int shift_mask = mode == QImode ? 0x18 : 0x10;
11430 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11431 XEXP (used_m, 0)));
11432 shift = gen_reg_rtx (SImode);
11434 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
11435 GEN_INT (shift_mask)));
11436 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
11438 aligned_addr = expand_binop (Pmode, and_optab,
11440 GEN_INT (-4), NULL_RTX,
11441 1, OPTAB_LIB_WIDEN);
11442 used_m = change_address (used_m, SImode, aligned_addr);
11443 set_mem_align (used_m, 32);
11444 /* It's safe to keep the old alias set of USED_M, because
11445 the operation is atomic and only affects the original
11447 if (GET_CODE (m) == NOT)
11448 m = gen_rtx_NOT (SImode, used_m);
11453 if (GET_CODE (op) == NOT)
11455 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11456 oldop = gen_rtx_NOT (SImode, oldop);
11459 oldop = lowpart_subreg (SImode, op, mode);
11464 newop = expand_binop (SImode, and_optab,
11465 oldop, GEN_INT (imask), NULL_RTX,
11466 1, OPTAB_LIB_WIDEN);
11467 emit_insn (gen_ashlsi3 (newop, newop, shift));
11471 newop = expand_binop (SImode, ior_optab,
11472 oldop, GEN_INT (~imask), NULL_RTX,
11473 1, OPTAB_LIB_WIDEN);
11474 emit_insn (gen_rotlsi3 (newop, newop, shift));
11481 newop = expand_binop (SImode, and_optab,
11482 oldop, GEN_INT (imask), NULL_RTX,
11483 1, OPTAB_LIB_WIDEN);
11484 emit_insn (gen_ashlsi3 (newop, newop, shift));
11486 mask = gen_reg_rtx (SImode);
11487 emit_move_insn (mask, GEN_INT (imask));
11488 emit_insn (gen_ashlsi3 (mask, mask, shift));
11490 newop = gen_rtx_AND (SImode, gen_rtx_PLUS (SImode, m, newop),
11492 newop = gen_rtx_IOR (SImode, newop,
11493 gen_rtx_AND (SImode,
11494 gen_rtx_NOT (SImode, mask),
11500 gcc_unreachable ();
11503 if (GET_CODE (m) == NOT)
11507 mask = gen_reg_rtx (SImode);
11508 emit_move_insn (mask, GEN_INT (imask));
11509 emit_insn (gen_ashlsi3 (mask, mask, shift));
11511 xorm = gen_rtx_XOR (SImode, used_m, mask);
11512 /* Depending on the value of 'op', the XOR or the operation might
11513 be able to be simplified away. */
11514 newop = simplify_gen_binary (code, SImode, xorm, newop);
11517 used_mode = SImode;
11518 before = gen_reg_rtx (used_mode);
11519 after = gen_reg_rtx (used_mode);
11524 before = before_param;
11525 after = after_param;
11527 if (before == NULL_RTX)
11528 before = gen_reg_rtx (used_mode);
11529 if (after == NULL_RTX)
11530 after = gen_reg_rtx (used_mode);
11533 if ((code == PLUS || GET_CODE (m) == NOT) && used_mode != mode)
11534 the_op = op; /* Computed above. */
11535 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
11536 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
11538 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
11540 set_after = gen_rtx_SET (VOIDmode, after, the_op);
11541 set_before = gen_rtx_SET (VOIDmode, before, used_m);
11542 set_atomic = gen_rtx_SET (VOIDmode, used_m,
11543 gen_rtx_UNSPEC (used_mode, gen_rtvec (1, the_op),
11545 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
11547 if (code == PLUS && used_mode != mode)
11548 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
11549 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
11551 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
11552 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
11554 /* Shift and mask the return values properly. */
11555 if (used_mode != mode && before_param)
11557 emit_insn (gen_lshrsi3 (before, before, shift));
11558 convert_move (before_param, before, 1);
11561 if (used_mode != mode && after_param)
11563 emit_insn (gen_lshrsi3 (after, after, shift));
11564 convert_move (after_param, after, 1);
11567 /* The previous sequence will end with a branch that's dependent on
11568 the conditional store, so placing an isync will ensure that no
11569 other instructions (especially, no load or store instructions)
11570 can start before the atomic operation completes. */
11572 emit_insn (gen_isync ());
11575 /* Emit instructions to move SRC to DST. Called by splitters for
11576 multi-register moves. It will emit at most one instruction for
11577 each register that is accessed; that is, it won't emit li/lis pairs
11578 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11582 rs6000_split_multireg_move (rtx dst, rtx src)
11584 /* The register number of the first register being moved. */
11586 /* The mode that is to be moved. */
11587 enum machine_mode mode;
11588 /* The mode that the move is being done in, and its size. */
11589 enum machine_mode reg_mode;
11591 /* The number of registers that will be moved. */
11594 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11595 mode = GET_MODE (dst);
11596 nregs = hard_regno_nregs[reg][mode];
11597 if (FP_REGNO_P (reg))
11599 else if (ALTIVEC_REGNO_P (reg))
11600 reg_mode = V16QImode;
11602 reg_mode = word_mode;
11603 reg_mode_size = GET_MODE_SIZE (reg_mode);
11605 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
11607 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11609 /* Move register range backwards, if we might have destructive
11612 for (i = nregs - 1; i >= 0; i--)
11613 emit_insn (gen_rtx_SET (VOIDmode,
11614 simplify_gen_subreg (reg_mode, dst, mode,
11615 i * reg_mode_size),
11616 simplify_gen_subreg (reg_mode, src, mode,
11617 i * reg_mode_size)));
11623 bool used_update = false;
11625 if (MEM_P (src) && INT_REGNO_P (reg))
11629 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11630 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11633 breg = XEXP (XEXP (src, 0), 0);
11634 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
11635 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11636 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
11637 emit_insn (TARGET_32BIT
11638 ? gen_addsi3 (breg, breg, delta_rtx)
11639 : gen_adddi3 (breg, breg, delta_rtx));
11640 src = gen_rtx_MEM (mode, breg);
11642 else if (! offsettable_memref_p (src))
11644 rtx newsrc, basereg;
11645 basereg = gen_rtx_REG (Pmode, reg);
11646 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11647 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11648 MEM_COPY_ATTRIBUTES (newsrc, src);
11652 breg = XEXP (src, 0);
11653 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
11654 breg = XEXP (breg, 0);
11656 /* If the base register we are using to address memory is
11657 also a destination reg, then change that register last. */
11659 && REGNO (breg) >= REGNO (dst)
11660 && REGNO (breg) < REGNO (dst) + nregs)
11661 j = REGNO (breg) - REGNO (dst);
11664 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11668 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11669 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11672 breg = XEXP (XEXP (dst, 0), 0);
11673 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
11674 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11675 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
11677 /* We have to update the breg before doing the store.
11678 Use store with update, if available. */
11682 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11683 emit_insn (TARGET_32BIT
11684 ? (TARGET_POWERPC64
11685 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
11686 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
11687 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
11688 used_update = true;
11691 emit_insn (TARGET_32BIT
11692 ? gen_addsi3 (breg, breg, delta_rtx)
11693 : gen_adddi3 (breg, breg, delta_rtx));
11694 dst = gen_rtx_MEM (mode, breg);
11697 gcc_assert (offsettable_memref_p (dst));
11700 for (i = 0; i < nregs; i++)
11702 /* Calculate index to next subword. */
11707 /* If compiler already emitted move of first word by
11708 store with update, no need to do anything. */
11709 if (j == 0 && used_update)
11712 emit_insn (gen_rtx_SET (VOIDmode,
11713 simplify_gen_subreg (reg_mode, dst, mode,
11714 j * reg_mode_size),
11715 simplify_gen_subreg (reg_mode, src, mode,
11716 j * reg_mode_size)));
11722 /* This page contains routines that are used to determine what the
11723 function prologue and epilogue code will do and write them out. */
11725 /* Return the first fixed-point register that is required to be
11726 saved. 32 if none. */
11729 first_reg_to_save (void)
11733 /* Find lowest numbered live register. */
11734 for (first_reg = 13; first_reg <= 31; first_reg++)
11735 if (regs_ever_live[first_reg]
11736 && (! call_used_regs[first_reg]
11737 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11738 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11739 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11740 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11745 && current_function_uses_pic_offset_table
11746 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11747 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11753 /* Similar, for FP regs. */
11756 first_fp_reg_to_save (void)
11760 /* Find lowest numbered live register. */
11761 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11762 if (regs_ever_live[first_reg])
11768 /* Similar, for AltiVec regs. */
11771 first_altivec_reg_to_save (void)
11775 /* Stack frame remains as is unless we are in AltiVec ABI. */
11776 if (! TARGET_ALTIVEC_ABI)
11777 return LAST_ALTIVEC_REGNO + 1;
11779 /* Find lowest numbered live register. */
11780 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11781 if (regs_ever_live[i])
11787 /* Return a 32-bit mask of the AltiVec registers we need to set in
11788 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11789 the 32-bit word is 0. */
11791 static unsigned int
11792 compute_vrsave_mask (void)
11794 unsigned int i, mask = 0;
11796 /* First, find out if we use _any_ altivec registers. */
11797 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11798 if (regs_ever_live[i])
11799 mask |= ALTIVEC_REG_BIT (i);
11804 /* Next, remove the argument registers from the set. These must
11805 be in the VRSAVE mask set by the caller, so we don't need to add
11806 them in again. More importantly, the mask we compute here is
11807 used to generate CLOBBERs in the set_vrsave insn, and we do not
11808 wish the argument registers to die. */
11809 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11810 mask &= ~ALTIVEC_REG_BIT (i);
11812 /* Similarly, remove the return value from the set. */
11815 diddle_return_value (is_altivec_return_reg, &yes);
11817 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11823 /* For a very restricted set of circumstances, we can cut down the
11824 size of prologues/epilogues by calling our own save/restore-the-world
11828 compute_save_world_info (rs6000_stack_t *info_ptr)
11830 info_ptr->world_save_p = 1;
11831 info_ptr->world_save_p
11832 = (WORLD_SAVE_P (info_ptr)
11833 && DEFAULT_ABI == ABI_DARWIN
11834 && ! (current_function_calls_setjmp && flag_exceptions)
11835 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
11836 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
11837 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
11838 && info_ptr->cr_save_p);
11840 /* This will not work in conjunction with sibcalls. Make sure there
11841 are none. (This check is expensive, but seldom executed.) */
11842 if (WORLD_SAVE_P (info_ptr))
11845 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
11846 if ( GET_CODE (insn) == CALL_INSN
11847 && SIBLING_CALL_P (insn))
11849 info_ptr->world_save_p = 0;
11854 if (WORLD_SAVE_P (info_ptr))
11856 /* Even if we're not touching VRsave, make sure there's room on the
11857 stack for it, if it looks like we're calling SAVE_WORLD, which
11858 will attempt to save it. */
11859 info_ptr->vrsave_size = 4;
11861 /* "Save" the VRsave register too if we're saving the world. */
11862 if (info_ptr->vrsave_mask == 0)
11863 info_ptr->vrsave_mask = compute_vrsave_mask ();
11865 /* Because the Darwin register save/restore routines only handle
11866 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
11868 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
11869 && (info_ptr->first_altivec_reg_save
11870 >= FIRST_SAVED_ALTIVEC_REGNO));
11877 is_altivec_return_reg (rtx reg, void *xyes)
11879 bool *yes = (bool *) xyes;
11880 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11885 /* Calculate the stack information for the current function. This is
11886 complicated by having two separate calling sequences, the AIX calling
11887 sequence and the V.4 calling sequence.
11889 AIX (and Darwin/Mac OS X) stack frames look like:
11891 SP----> +---------------------------------------+
11892 | back chain to caller | 0 0
11893 +---------------------------------------+
11894 | saved CR | 4 8 (8-11)
11895 +---------------------------------------+
11897 +---------------------------------------+
11898 | reserved for compilers | 12 24
11899 +---------------------------------------+
11900 | reserved for binders | 16 32
11901 +---------------------------------------+
11902 | saved TOC pointer | 20 40
11903 +---------------------------------------+
11904 | Parameter save area (P) | 24 48
11905 +---------------------------------------+
11906 | Alloca space (A) | 24+P etc.
11907 +---------------------------------------+
11908 | Local variable space (L) | 24+P+A
11909 +---------------------------------------+
11910 | Float/int conversion temporary (X) | 24+P+A+L
11911 +---------------------------------------+
11912 | Save area for AltiVec registers (W) | 24+P+A+L+X
11913 +---------------------------------------+
11914 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11915 +---------------------------------------+
11916 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11917 +---------------------------------------+
11918 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11919 +---------------------------------------+
11920 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11921 +---------------------------------------+
11922 old SP->| back chain to caller's caller |
11923 +---------------------------------------+
11925 The required alignment for AIX configurations is two words (i.e., 8
11929 V.4 stack frames look like:
11931 SP----> +---------------------------------------+
11932 | back chain to caller | 0
11933 +---------------------------------------+
11934 | caller's saved LR | 4
11935 +---------------------------------------+
11936 | Parameter save area (P) | 8
11937 +---------------------------------------+
11938 | Alloca space (A) | 8+P
11939 +---------------------------------------+
11940 | Varargs save area (V) | 8+P+A
11941 +---------------------------------------+
11942 | Local variable space (L) | 8+P+A+V
11943 +---------------------------------------+
11944 | Float/int conversion temporary (X) | 8+P+A+V+L
11945 +---------------------------------------+
11946 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11947 +---------------------------------------+
11948 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11949 +---------------------------------------+
11950 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11951 +---------------------------------------+
11952 | SPE: area for 64-bit GP registers |
11953 +---------------------------------------+
11954 | SPE alignment padding |
11955 +---------------------------------------+
11956 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11957 +---------------------------------------+
11958 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11959 +---------------------------------------+
11960 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11961 +---------------------------------------+
11962 old SP->| back chain to caller's caller |
11963 +---------------------------------------+
11965 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11966 given. (But note below and in sysv4.h that we require only 8 and
11967 may round up the size of our stack frame anyways. The historical
11968 reason is early versions of powerpc-linux which didn't properly
11969 align the stack at program startup. A happy side-effect is that
11970 -mno-eabi libraries can be used with -meabi programs.)
11972 The EABI configuration defaults to the V.4 layout. However,
11973 the stack alignment requirements may differ. If -mno-eabi is not
11974 given, the required stack alignment is 8 bytes; if -mno-eabi is
11975 given, the required alignment is 16 bytes. (But see V.4 comment
11978 #ifndef ABI_STACK_BOUNDARY
11979 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11982 static rs6000_stack_t *
11983 rs6000_stack_info (void)
11985 static rs6000_stack_t info, zero_info;
11986 rs6000_stack_t *info_ptr = &info;
11987 int reg_size = TARGET_32BIT ? 4 : 8;
11990 HOST_WIDE_INT non_fixed_size;
11992 /* Zero all fields portably. */
11997 /* Cache value so we don't rescan instruction chain over and over. */
11998 if (cfun->machine->insn_chain_scanned_p == 0)
11999 cfun->machine->insn_chain_scanned_p
12000 = spe_func_has_64bit_regs_p () + 1;
12001 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
12004 /* Select which calling sequence. */
12005 info_ptr->abi = DEFAULT_ABI;
12007 /* Calculate which registers need to be saved & save area size. */
12008 info_ptr->first_gp_reg_save = first_reg_to_save ();
12009 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12010 even if it currently looks like we won't. */
12011 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
12012 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12013 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
12014 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12015 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
12017 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
12019 /* For the SPE, we have an additional upper 32-bits on each GPR.
12020 Ideally we should save the entire 64-bits only when the upper
12021 half is used in SIMD instructions. Since we only record
12022 registers live (not the size they are used in), this proves
12023 difficult because we'd have to traverse the instruction chain at
12024 the right time, taking reload into account. This is a real pain,
12025 so we opt to save the GPRs in 64-bits always if but one register
12026 gets used in 64-bits. Otherwise, all the registers in the frame
12027 get saved in 32-bits.
12029 So... since when we save all GPRs (except the SP) in 64-bits, the
12030 traditional GP save area will be empty. */
12031 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12032 info_ptr->gp_size = 0;
12034 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12035 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12037 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12038 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12039 - info_ptr->first_altivec_reg_save);
12041 /* Does this function call anything? */
12042 info_ptr->calls_p = (! current_function_is_leaf
12043 || cfun->machine->ra_needs_full_frame);
12045 /* Determine if we need to save the link register. */
12046 if (rs6000_ra_ever_killed ()
12047 || (DEFAULT_ABI == ABI_AIX
12048 && current_function_profile
12049 && !TARGET_PROFILE_KERNEL)
12050 #ifdef TARGET_RELOCATABLE
12051 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12053 || (info_ptr->first_fp_reg_save != 64
12054 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
12055 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
12056 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
12057 || info_ptr->calls_p)
12059 info_ptr->lr_save_p = 1;
12060 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
12063 /* Determine if we need to save the condition code registers. */
12064 if (regs_ever_live[CR2_REGNO]
12065 || regs_ever_live[CR3_REGNO]
12066 || regs_ever_live[CR4_REGNO])
12068 info_ptr->cr_save_p = 1;
12069 if (DEFAULT_ABI == ABI_V4)
12070 info_ptr->cr_size = reg_size;
12073 /* If the current function calls __builtin_eh_return, then we need
12074 to allocate stack space for registers that will hold data for
12075 the exception handler. */
12076 if (current_function_calls_eh_return)
12079 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12082 /* SPE saves EH registers in 64-bits. */
12083 ehrd_size = i * (TARGET_SPE_ABI
12084 && info_ptr->spe_64bit_regs_used != 0
12085 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12090 /* Determine various sizes. */
12091 info_ptr->reg_size = reg_size;
12092 info_ptr->fixed_size = RS6000_SAVE_AREA;
12093 info_ptr->varargs_size = RS6000_VARARGS_AREA;
12094 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12095 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12096 TARGET_ALTIVEC ? 16 : 8);
12098 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12099 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12101 info_ptr->spe_gp_size = 0;
12103 if (TARGET_ALTIVEC_ABI)
12104 info_ptr->vrsave_mask = compute_vrsave_mask ();
12106 info_ptr->vrsave_mask = 0;
12108 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12109 info_ptr->vrsave_size = 4;
12111 info_ptr->vrsave_size = 0;
12113 compute_save_world_info (info_ptr);
12115 /* Calculate the offsets. */
12116 switch (DEFAULT_ABI)
12120 gcc_unreachable ();
12124 info_ptr->fp_save_offset = - info_ptr->fp_size;
12125 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12127 if (TARGET_ALTIVEC_ABI)
12129 info_ptr->vrsave_save_offset
12130 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12132 /* Align stack so vector save area is on a quadword boundary. */
12133 if (info_ptr->altivec_size != 0)
12134 info_ptr->altivec_padding_size
12135 = 16 - (-info_ptr->vrsave_save_offset % 16);
12137 info_ptr->altivec_padding_size = 0;
12139 info_ptr->altivec_save_offset
12140 = info_ptr->vrsave_save_offset
12141 - info_ptr->altivec_padding_size
12142 - info_ptr->altivec_size;
12144 /* Adjust for AltiVec case. */
12145 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12148 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12149 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12150 info_ptr->lr_save_offset = 2*reg_size;
12154 info_ptr->fp_save_offset = - info_ptr->fp_size;
12155 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12156 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12158 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12160 /* Align stack so SPE GPR save area is aligned on a
12161 double-word boundary. */
12162 if (info_ptr->spe_gp_size != 0)
12163 info_ptr->spe_padding_size
12164 = 8 - (-info_ptr->cr_save_offset % 8);
12166 info_ptr->spe_padding_size = 0;
12168 info_ptr->spe_gp_save_offset
12169 = info_ptr->cr_save_offset
12170 - info_ptr->spe_padding_size
12171 - info_ptr->spe_gp_size;
12173 /* Adjust for SPE case. */
12174 info_ptr->toc_save_offset
12175 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12177 else if (TARGET_ALTIVEC_ABI)
12179 info_ptr->vrsave_save_offset
12180 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12182 /* Align stack so vector save area is on a quadword boundary. */
12183 if (info_ptr->altivec_size != 0)
12184 info_ptr->altivec_padding_size
12185 = 16 - (-info_ptr->vrsave_save_offset % 16);
12187 info_ptr->altivec_padding_size = 0;
12189 info_ptr->altivec_save_offset
12190 = info_ptr->vrsave_save_offset
12191 - info_ptr->altivec_padding_size
12192 - info_ptr->altivec_size;
12194 /* Adjust for AltiVec case. */
12195 info_ptr->toc_save_offset
12196 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12199 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12200 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12201 info_ptr->lr_save_offset = reg_size;
12205 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12206 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12207 + info_ptr->gp_size
12208 + info_ptr->altivec_size
12209 + info_ptr->altivec_padding_size
12210 + info_ptr->spe_gp_size
12211 + info_ptr->spe_padding_size
12213 + info_ptr->cr_size
12214 + info_ptr->lr_size
12215 + info_ptr->vrsave_size
12216 + info_ptr->toc_size,
12219 non_fixed_size = (info_ptr->vars_size
12220 + info_ptr->parm_size
12221 + info_ptr->save_size
12222 + info_ptr->varargs_size);
12224 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12225 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12227 /* Determine if we need to allocate any stack frame:
12229 For AIX we need to push the stack if a frame pointer is needed
12230 (because the stack might be dynamically adjusted), if we are
12231 debugging, if we make calls, or if the sum of fp_save, gp_save,
12232 and local variables are more than the space needed to save all
12233 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12234 + 18*8 = 288 (GPR13 reserved).
12236 For V.4 we don't have the stack cushion that AIX uses, but assume
12237 that the debugger can handle stackless frames. */
12239 if (info_ptr->calls_p)
12240 info_ptr->push_p = 1;
12242 else if (DEFAULT_ABI == ABI_V4)
12243 info_ptr->push_p = non_fixed_size != 0;
12245 else if (frame_pointer_needed)
12246 info_ptr->push_p = 1;
12248 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12249 info_ptr->push_p = 1;
12252 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12254 /* Zero offsets if we're not saving those registers. */
12255 if (info_ptr->fp_size == 0)
12256 info_ptr->fp_save_offset = 0;
12258 if (info_ptr->gp_size == 0)
12259 info_ptr->gp_save_offset = 0;
12261 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12262 info_ptr->altivec_save_offset = 0;
12264 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12265 info_ptr->vrsave_save_offset = 0;
12267 if (! TARGET_SPE_ABI
12268 || info_ptr->spe_64bit_regs_used == 0
12269 || info_ptr->spe_gp_size == 0)
12270 info_ptr->spe_gp_save_offset = 0;
12272 if (! info_ptr->lr_save_p)
12273 info_ptr->lr_save_offset = 0;
12275 if (! info_ptr->cr_save_p)
12276 info_ptr->cr_save_offset = 0;
12278 if (! info_ptr->toc_save_p)
12279 info_ptr->toc_save_offset = 0;
12284 /* Return true if the current function uses any GPRs in 64-bit SIMD
12288 spe_func_has_64bit_regs_p (void)
12292 /* Functions that save and restore all the call-saved registers will
12293 need to save/restore the registers in 64-bits. */
12294 if (current_function_calls_eh_return
12295 || current_function_calls_setjmp
12296 || current_function_has_nonlocal_goto)
12299 insns = get_insns ();
12301 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12307 /* FIXME: This should be implemented with attributes...
12309 (set_attr "spe64" "true")....then,
12310 if (get_spe64(insn)) return true;
12312 It's the only reliable way to do the stuff below. */
12314 i = PATTERN (insn);
12315 if (GET_CODE (i) == SET)
12317 enum machine_mode mode = GET_MODE (SET_SRC (i));
12319 if (SPE_VECTOR_MODE (mode))
12321 if (TARGET_E500_DOUBLE && mode == DFmode)
12331 debug_stack_info (rs6000_stack_t *info)
12333 const char *abi_string;
12336 info = rs6000_stack_info ();
12338 fprintf (stderr, "\nStack information for function %s:\n",
12339 ((current_function_decl && DECL_NAME (current_function_decl))
12340 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12345 default: abi_string = "Unknown"; break;
12346 case ABI_NONE: abi_string = "NONE"; break;
12347 case ABI_AIX: abi_string = "AIX"; break;
12348 case ABI_DARWIN: abi_string = "Darwin"; break;
12349 case ABI_V4: abi_string = "V.4"; break;
12352 fprintf (stderr, "\tABI = %5s\n", abi_string);
12354 if (TARGET_ALTIVEC_ABI)
12355 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12357 if (TARGET_SPE_ABI)
12358 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12360 if (info->first_gp_reg_save != 32)
12361 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12363 if (info->first_fp_reg_save != 64)
12364 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
12366 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12367 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12368 info->first_altivec_reg_save);
12370 if (info->lr_save_p)
12371 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
12373 if (info->cr_save_p)
12374 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12376 if (info->toc_save_p)
12377 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
12379 if (info->vrsave_mask)
12380 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
12383 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
12386 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
12388 if (info->gp_save_offset)
12389 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
12391 if (info->fp_save_offset)
12392 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
12394 if (info->altivec_save_offset)
12395 fprintf (stderr, "\taltivec_save_offset = %5d\n",
12396 info->altivec_save_offset);
12398 if (info->spe_gp_save_offset)
12399 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
12400 info->spe_gp_save_offset);
12402 if (info->vrsave_save_offset)
12403 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
12404 info->vrsave_save_offset);
12406 if (info->lr_save_offset)
12407 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
12409 if (info->cr_save_offset)
12410 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
12412 if (info->toc_save_offset)
12413 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
12415 if (info->varargs_save_offset)
12416 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
12418 if (info->total_size)
12419 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12422 if (info->varargs_size)
12423 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
12425 if (info->vars_size)
12426 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12429 if (info->parm_size)
12430 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
12432 if (info->fixed_size)
12433 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
12436 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
12438 if (info->spe_gp_size)
12439 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
12442 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
12444 if (info->altivec_size)
12445 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
12447 if (info->vrsave_size)
12448 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
12450 if (info->altivec_padding_size)
12451 fprintf (stderr, "\taltivec_padding_size= %5d\n",
12452 info->altivec_padding_size);
12454 if (info->spe_padding_size)
12455 fprintf (stderr, "\tspe_padding_size = %5d\n",
12456 info->spe_padding_size);
12459 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
12462 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
12464 if (info->toc_size)
12465 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
12467 if (info->save_size)
12468 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
12470 if (info->reg_size != 4)
12471 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
12473 fprintf (stderr, "\n");
12477 rs6000_return_addr (int count, rtx frame)
12479 /* Currently we don't optimize very well between prolog and body
12480 code and for PIC code the code can be actually quite bad, so
12481 don't try to be too clever here. */
12482 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
12484 cfun->machine->ra_needs_full_frame = 1;
12491 plus_constant (copy_to_reg
12492 (gen_rtx_MEM (Pmode,
12493 memory_address (Pmode, frame))),
12494 RETURN_ADDRESS_OFFSET)));
12497 cfun->machine->ra_need_lr = 1;
12498 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
12501 /* Say whether a function is a candidate for sibcall handling or not.
12502 We do not allow indirect calls to be optimized into sibling calls.
12503 Also, we can't do it if there are any vector parameters; there's
12504 nowhere to put the VRsave code so it works; note that functions with
12505 vector parameters are required to have a prototype, so the argument
12506 type info must be available here. (The tail recursion case can work
12507 with vector parameters, but there's no way to distinguish here.) */
12509 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
12514 if (TARGET_ALTIVEC_VRSAVE)
12516 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
12517 type; type = TREE_CHAIN (type))
12519 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
12523 if (DEFAULT_ABI == ABI_DARWIN
12524 || (*targetm.binds_local_p) (decl))
12526 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12528 if (!lookup_attribute ("longcall", attr_list)
12529 || lookup_attribute ("shortcall", attr_list))
12536 /* NULL if INSN insn is valid within a low-overhead loop.
12537 Otherwise return why doloop cannot be applied.
12538 PowerPC uses the COUNT register for branch on table instructions. */
12540 static const char *
12541 rs6000_invalid_within_doloop (rtx insn)
12544 return "Function call in the loop.";
12547 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
12548 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
12549 return "Computed branch in the loop.";
12555 rs6000_ra_ever_killed (void)
12561 if (current_function_is_thunk)
12564 /* regs_ever_live has LR marked as used if any sibcalls are present,
12565 but this should not force saving and restoring in the
12566 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12567 clobbers LR, so that is inappropriate. */
12569 /* Also, the prologue can generate a store into LR that
12570 doesn't really count, like this:
12573 bcl to set PIC register
12577 When we're called from the epilogue, we need to avoid counting
12578 this as a store. */
12580 push_topmost_sequence ();
12581 top = get_insns ();
12582 pop_topmost_sequence ();
12583 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12585 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12589 if (FIND_REG_INC_NOTE (insn, reg))
12591 else if (GET_CODE (insn) == CALL_INSN
12592 && !SIBLING_CALL_P (insn))
12594 else if (set_of (reg, insn) != NULL_RTX
12595 && !prologue_epilogue_contains (insn))
12602 /* Add a REG_MAYBE_DEAD note to the insn. */
12604 rs6000_maybe_dead (rtx insn)
12606 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12611 /* Emit instructions needed to load the TOC register.
12612 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12613 a constant pool; or for SVR4 -fpic. */
12616 rs6000_emit_load_toc_table (int fromprolog)
12619 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12621 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
12624 rtx lab, tmp1, tmp2, got, tempLR;
12626 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12627 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12629 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12631 got = rs6000_got_sym ();
12632 tmp1 = tmp2 = dest;
12635 tmp1 = gen_reg_rtx (Pmode);
12636 tmp2 = gen_reg_rtx (Pmode);
12638 tempLR = (fromprolog
12639 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12640 : gen_reg_rtx (Pmode));
12641 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
12643 rs6000_maybe_dead (insn);
12644 insn = emit_move_insn (tmp1, tempLR);
12646 rs6000_maybe_dead (insn);
12647 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
12649 rs6000_maybe_dead (insn);
12650 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
12652 rs6000_maybe_dead (insn);
12654 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12656 rtx tempLR = (fromprolog
12657 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12658 : gen_reg_rtx (Pmode));
12660 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
12662 rs6000_maybe_dead (insn);
12663 insn = emit_move_insn (dest, tempLR);
12665 rs6000_maybe_dead (insn);
12667 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12670 rtx tempLR = (fromprolog
12671 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12672 : gen_reg_rtx (Pmode));
12673 rtx temp0 = (fromprolog
12674 ? gen_rtx_REG (Pmode, 0)
12675 : gen_reg_rtx (Pmode));
12681 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12682 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12684 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12685 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12687 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12689 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12690 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12698 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12699 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
12700 emit_move_insn (dest, tempLR);
12701 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12703 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12705 rs6000_maybe_dead (insn);
12707 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12709 /* This is for AIX code running in non-PIC ELF32. */
12712 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12713 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12715 insn = emit_insn (gen_elf_high (dest, realsym));
12717 rs6000_maybe_dead (insn);
12718 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12720 rs6000_maybe_dead (insn);
12724 gcc_assert (DEFAULT_ABI == ABI_AIX);
12727 insn = emit_insn (gen_load_toc_aix_si (dest));
12729 insn = emit_insn (gen_load_toc_aix_di (dest));
12731 rs6000_maybe_dead (insn);
12735 /* Emit instructions to restore the link register after determining where
12736 its value has been stored. */
12739 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12741 rs6000_stack_t *info = rs6000_stack_info ();
12744 operands[0] = source;
12745 operands[1] = scratch;
12747 if (info->lr_save_p)
12749 rtx frame_rtx = stack_pointer_rtx;
12750 HOST_WIDE_INT sp_offset = 0;
12753 if (frame_pointer_needed
12754 || current_function_calls_alloca
12755 || info->total_size > 32767)
12757 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12758 frame_rtx = operands[1];
12760 else if (info->push_p)
12761 sp_offset = info->total_size;
12763 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12764 tmp = gen_rtx_MEM (Pmode, tmp);
12765 emit_move_insn (tmp, operands[0]);
12768 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12771 static GTY(()) int set = -1;
12774 get_TOC_alias_set (void)
12777 set = new_alias_set ();
12781 /* This returns nonzero if the current function uses the TOC. This is
12782 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12783 is generated by the ABI_V4 load_toc_* patterns. */
12790 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12793 rtx pat = PATTERN (insn);
12796 if (GET_CODE (pat) == PARALLEL)
12797 for (i = 0; i < XVECLEN (pat, 0); i++)
12799 rtx sub = XVECEXP (pat, 0, i);
12800 if (GET_CODE (sub) == USE)
12802 sub = XEXP (sub, 0);
12803 if (GET_CODE (sub) == UNSPEC
12804 && XINT (sub, 1) == UNSPEC_TOC)
12814 create_TOC_reference (rtx symbol)
12816 return gen_rtx_PLUS (Pmode,
12817 gen_rtx_REG (Pmode, TOC_REGISTER),
12818 gen_rtx_CONST (Pmode,
12819 gen_rtx_MINUS (Pmode, symbol,
12820 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12823 /* If _Unwind_* has been called from within the same module,
12824 toc register is not guaranteed to be saved to 40(1) on function
12825 entry. Save it there in that case. */
12828 rs6000_aix_emit_builtin_unwind_init (void)
12831 rtx stack_top = gen_reg_rtx (Pmode);
12832 rtx opcode_addr = gen_reg_rtx (Pmode);
12833 rtx opcode = gen_reg_rtx (SImode);
12834 rtx tocompare = gen_reg_rtx (SImode);
12835 rtx no_toc_save_needed = gen_label_rtx ();
12837 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12838 emit_move_insn (stack_top, mem);
12840 mem = gen_rtx_MEM (Pmode,
12841 gen_rtx_PLUS (Pmode, stack_top,
12842 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12843 emit_move_insn (opcode_addr, mem);
12844 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12845 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12846 : 0xE8410028, SImode));
12848 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12849 SImode, NULL_RTX, NULL_RTX,
12850 no_toc_save_needed);
12852 mem = gen_rtx_MEM (Pmode,
12853 gen_rtx_PLUS (Pmode, stack_top,
12854 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12855 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12856 emit_label (no_toc_save_needed);
12859 /* This ties together stack memory (MEM with an alias set of
12860 rs6000_sr_alias_set) and the change to the stack pointer. */
12863 rs6000_emit_stack_tie (void)
12865 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12867 set_mem_alias_set (mem, rs6000_sr_alias_set);
12868 emit_insn (gen_stack_tie (mem));
12871 /* Emit the correct code for allocating stack space, as insns.
12872 If COPY_R12, make sure a copy of the old frame is left in r12.
12873 The generated code may use hard register 0 as a temporary. */
12876 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12879 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12880 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12881 rtx todec = gen_int_mode (-size, Pmode);
12883 if (INTVAL (todec) != -size)
12885 warning (0, "stack frame too large");
12886 emit_insn (gen_trap ());
12890 if (current_function_limit_stack)
12892 if (REG_P (stack_limit_rtx)
12893 && REGNO (stack_limit_rtx) > 1
12894 && REGNO (stack_limit_rtx) <= 31)
12896 emit_insn (TARGET_32BIT
12897 ? gen_addsi3 (tmp_reg,
12900 : gen_adddi3 (tmp_reg,
12904 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12907 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12909 && DEFAULT_ABI == ABI_V4)
12911 rtx toload = gen_rtx_CONST (VOIDmode,
12912 gen_rtx_PLUS (Pmode,
12916 emit_insn (gen_elf_high (tmp_reg, toload));
12917 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12918 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12922 warning (0, "stack limit expression is not supported");
12925 if (copy_r12 || ! TARGET_UPDATE)
12926 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12932 /* Need a note here so that try_split doesn't get confused. */
12933 if (get_last_insn () == NULL_RTX)
12934 emit_note (NOTE_INSN_DELETED);
12935 insn = emit_move_insn (tmp_reg, todec);
12936 try_split (PATTERN (insn), insn, 0);
12940 insn = emit_insn (TARGET_32BIT
12941 ? gen_movsi_update (stack_reg, stack_reg,
12943 : gen_movdi_di_update (stack_reg, stack_reg,
12944 todec, stack_reg));
12948 insn = emit_insn (TARGET_32BIT
12949 ? gen_addsi3 (stack_reg, stack_reg, todec)
12950 : gen_adddi3 (stack_reg, stack_reg, todec));
12951 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12952 gen_rtx_REG (Pmode, 12));
12955 RTX_FRAME_RELATED_P (insn) = 1;
12957 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12958 gen_rtx_SET (VOIDmode, stack_reg,
12959 gen_rtx_PLUS (Pmode, stack_reg,
12964 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12965 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12966 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12967 deduce these equivalences by itself so it wasn't necessary to hold
12968 its hand so much. */
12971 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12972 rtx reg2, rtx rreg)
12976 /* copy_rtx will not make unique copies of registers, so we need to
12977 ensure we don't have unwanted sharing here. */
12979 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12982 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12984 real = copy_rtx (PATTERN (insn));
12986 if (reg2 != NULL_RTX)
12987 real = replace_rtx (real, reg2, rreg);
12989 real = replace_rtx (real, reg,
12990 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12991 STACK_POINTER_REGNUM),
12994 /* We expect that 'real' is either a SET or a PARALLEL containing
12995 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12996 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12998 if (GET_CODE (real) == SET)
13002 temp = simplify_rtx (SET_SRC (set));
13004 SET_SRC (set) = temp;
13005 temp = simplify_rtx (SET_DEST (set));
13007 SET_DEST (set) = temp;
13008 if (GET_CODE (SET_DEST (set)) == MEM)
13010 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13012 XEXP (SET_DEST (set), 0) = temp;
13019 gcc_assert (GET_CODE (real) == PARALLEL);
13020 for (i = 0; i < XVECLEN (real, 0); i++)
13021 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13023 rtx set = XVECEXP (real, 0, i);
13025 temp = simplify_rtx (SET_SRC (set));
13027 SET_SRC (set) = temp;
13028 temp = simplify_rtx (SET_DEST (set));
13030 SET_DEST (set) = temp;
13031 if (GET_CODE (SET_DEST (set)) == MEM)
13033 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13035 XEXP (SET_DEST (set), 0) = temp;
13037 RTX_FRAME_RELATED_P (set) = 1;
13042 real = spe_synthesize_frame_save (real);
13044 RTX_FRAME_RELATED_P (insn) = 1;
13045 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13050 /* Given an SPE frame note, return a PARALLEL of SETs with the
13051 original note, plus a synthetic register save. */
13054 spe_synthesize_frame_save (rtx real)
13056 rtx synth, offset, reg, real2;
13058 if (GET_CODE (real) != SET
13059 || GET_MODE (SET_SRC (real)) != V2SImode)
13062 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13063 frame related note. The parallel contains a set of the register
13064 being saved, and another set to a synthetic register (n+1200).
13065 This is so we can differentiate between 64-bit and 32-bit saves.
13066 Words cannot describe this nastiness. */
13068 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13069 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13070 && GET_CODE (SET_SRC (real)) == REG);
13073 (set (mem (plus (reg x) (const y)))
13076 (set (mem (plus (reg x) (const y+4)))
13080 real2 = copy_rtx (real);
13081 PUT_MODE (SET_DEST (real2), SImode);
13082 reg = SET_SRC (real2);
13083 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13084 synth = copy_rtx (real2);
13086 if (BYTES_BIG_ENDIAN)
13088 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13089 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13092 reg = SET_SRC (synth);
13094 synth = replace_rtx (synth, reg,
13095 gen_rtx_REG (SImode, REGNO (reg) + 1200));
13097 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13098 synth = replace_rtx (synth, offset,
13099 GEN_INT (INTVAL (offset)
13100 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13102 RTX_FRAME_RELATED_P (synth) = 1;
13103 RTX_FRAME_RELATED_P (real2) = 1;
13104 if (BYTES_BIG_ENDIAN)
13105 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13107 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13112 /* Returns an insn that has a vrsave set operation with the
13113 appropriate CLOBBERs. */
13116 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
13119 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
13120 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13123 = gen_rtx_SET (VOIDmode,
13125 gen_rtx_UNSPEC_VOLATILE (SImode,
13126 gen_rtvec (2, reg, vrsave),
13131 /* We need to clobber the registers in the mask so the scheduler
13132 does not move sets to VRSAVE before sets of AltiVec registers.
13134 However, if the function receives nonlocal gotos, reload will set
13135 all call saved registers live. We will end up with:
13137 (set (reg 999) (mem))
13138 (parallel [ (set (reg vrsave) (unspec blah))
13139 (clobber (reg 999))])
13141 The clobber will cause the store into reg 999 to be dead, and
13142 flow will attempt to delete an epilogue insn. In this case, we
13143 need an unspec use/set of the register. */
13145 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13146 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13148 if (!epiloguep || call_used_regs [i])
13149 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13150 gen_rtx_REG (V4SImode, i));
13153 rtx reg = gen_rtx_REG (V4SImode, i);
13156 = gen_rtx_SET (VOIDmode,
13158 gen_rtx_UNSPEC (V4SImode,
13159 gen_rtvec (1, reg), 27));
13163 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13165 for (i = 0; i < nclobs; ++i)
13166 XVECEXP (insn, 0, i) = clobs[i];
13171 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13172 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13175 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13176 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13178 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13179 rtx replacea, replaceb;
13181 int_rtx = GEN_INT (offset);
13183 /* Some cases that need register indexed addressing. */
13184 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13185 || (TARGET_E500_DOUBLE && mode == DFmode)
13187 && SPE_VECTOR_MODE (mode)
13188 && !SPE_CONST_OFFSET_OK (offset)))
13190 /* Whomever calls us must make sure r11 is available in the
13191 flow path of instructions in the prologue. */
13192 offset_rtx = gen_rtx_REG (Pmode, 11);
13193 emit_move_insn (offset_rtx, int_rtx);
13195 replacea = offset_rtx;
13196 replaceb = int_rtx;
13200 offset_rtx = int_rtx;
13201 replacea = NULL_RTX;
13202 replaceb = NULL_RTX;
13205 reg = gen_rtx_REG (mode, regno);
13206 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13207 mem = gen_rtx_MEM (mode, addr);
13208 set_mem_alias_set (mem, rs6000_sr_alias_set);
13210 insn = emit_move_insn (mem, reg);
13212 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13215 /* Emit an offset memory reference suitable for a frame store, while
13216 converting to a valid addressing mode. */
13219 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13221 rtx int_rtx, offset_rtx;
13223 int_rtx = GEN_INT (offset);
13225 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13226 || (TARGET_E500_DOUBLE && mode == DFmode))
13228 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13229 emit_move_insn (offset_rtx, int_rtx);
13232 offset_rtx = int_rtx;
13234 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13237 /* Look for user-defined global regs. We should not save and restore these,
13238 and cannot use stmw/lmw if there are any in its range. */
13241 no_global_regs_above (int first_greg)
13244 for (i = 0; i < 32 - first_greg; i++)
13245 if (global_regs[first_greg + i])
13250 #ifndef TARGET_FIX_AND_CONTINUE
13251 #define TARGET_FIX_AND_CONTINUE 0
13254 /* Emit function prologue as insns. */
13257 rs6000_emit_prologue (void)
13259 rs6000_stack_t *info = rs6000_stack_info ();
13260 enum machine_mode reg_mode = Pmode;
13261 int reg_size = TARGET_32BIT ? 4 : 8;
13262 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13263 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13264 rtx frame_reg_rtx = sp_reg_rtx;
13265 rtx cr_save_rtx = NULL_RTX;
13267 int saving_FPRs_inline;
13268 int using_store_multiple;
13269 HOST_WIDE_INT sp_offset = 0;
13271 if (TARGET_FIX_AND_CONTINUE)
13273 /* gdb on darwin arranges to forward a function from the old
13274 address by modifying the first 5 instructions of the function
13275 to branch to the overriding function. This is necessary to
13276 permit function pointers that point to the old function to
13277 actually forward to the new function. */
13278 emit_insn (gen_nop ());
13279 emit_insn (gen_nop ());
13280 emit_insn (gen_nop ());
13281 emit_insn (gen_nop ());
13282 emit_insn (gen_nop ());
13285 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13287 reg_mode = V2SImode;
13291 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13292 && (!TARGET_SPE_ABI
13293 || info->spe_64bit_regs_used == 0)
13294 && info->first_gp_reg_save < 31
13295 && no_global_regs_above (info->first_gp_reg_save));
13296 saving_FPRs_inline = (info->first_fp_reg_save == 64
13297 || FP_SAVE_INLINE (info->first_fp_reg_save)
13298 || current_function_calls_eh_return
13299 || cfun->machine->ra_need_lr);
13301 /* For V.4, update stack before we do any saving and set back pointer. */
13303 && (DEFAULT_ABI == ABI_V4
13304 || current_function_calls_eh_return))
13306 if (info->total_size < 32767)
13307 sp_offset = info->total_size;
13309 frame_reg_rtx = frame_ptr_rtx;
13310 rs6000_emit_allocate_stack (info->total_size,
13311 (frame_reg_rtx != sp_reg_rtx
13312 && (info->cr_save_p
13314 || info->first_fp_reg_save < 64
13315 || info->first_gp_reg_save < 32
13317 if (frame_reg_rtx != sp_reg_rtx)
13318 rs6000_emit_stack_tie ();
13321 /* Handle world saves specially here. */
13322 if (WORLD_SAVE_P (info))
13328 /* save_world expects lr in r0. */
13329 if (info->lr_save_p)
13331 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13332 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13333 RTX_FRAME_RELATED_P (insn) = 1;
13336 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13337 assumptions about the offsets of various bits of the stack
13339 gcc_assert (info->gp_save_offset == -220
13340 && info->fp_save_offset == -144
13341 && info->lr_save_offset == 8
13342 && info->cr_save_offset == 4
13345 && (!current_function_calls_eh_return
13346 || info->ehrd_offset == -432)
13347 && info->vrsave_save_offset == -224
13348 && info->altivec_save_offset == (-224 -16 -192));
13350 treg = gen_rtx_REG (SImode, 11);
13351 emit_move_insn (treg, GEN_INT (-info->total_size));
13353 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13354 in R11. It also clobbers R12, so beware! */
13356 /* Preserve CR2 for save_world prologues */
13358 sz += 32 - info->first_gp_reg_save;
13359 sz += 64 - info->first_fp_reg_save;
13360 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
13361 p = rtvec_alloc (sz);
13363 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
13364 gen_rtx_REG (Pmode,
13365 LINK_REGISTER_REGNUM));
13366 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13367 gen_rtx_SYMBOL_REF (Pmode,
13369 /* We do floats first so that the instruction pattern matches
13371 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13373 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13374 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13375 GEN_INT (info->fp_save_offset
13376 + sp_offset + 8 * i));
13377 rtx mem = gen_rtx_MEM (DFmode, addr);
13378 set_mem_alias_set (mem, rs6000_sr_alias_set);
13380 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13382 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13384 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13385 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13386 GEN_INT (info->altivec_save_offset
13387 + sp_offset + 16 * i));
13388 rtx mem = gen_rtx_MEM (V4SImode, addr);
13389 set_mem_alias_set (mem, rs6000_sr_alias_set);
13391 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13393 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13395 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13396 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13397 GEN_INT (info->gp_save_offset
13398 + sp_offset + reg_size * i));
13399 rtx mem = gen_rtx_MEM (reg_mode, addr);
13400 set_mem_alias_set (mem, rs6000_sr_alias_set);
13402 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13406 /* CR register traditionally saved as CR2. */
13407 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13408 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13409 GEN_INT (info->cr_save_offset
13411 rtx mem = gen_rtx_MEM (reg_mode, addr);
13412 set_mem_alias_set (mem, rs6000_sr_alias_set);
13414 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13416 /* Prevent any attempt to delete the setting of r0 and treg! */
13417 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
13418 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
13419 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
13421 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13422 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13423 NULL_RTX, NULL_RTX);
13425 if (current_function_calls_eh_return)
13430 unsigned int regno = EH_RETURN_DATA_REGNO (i);
13431 if (regno == INVALID_REGNUM)
13433 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13434 info->ehrd_offset + sp_offset
13435 + reg_size * (int) i,
13441 /* Save AltiVec registers if needed. */
13442 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13446 /* There should be a non inline version of this, for when we
13447 are saving lots of vector registers. */
13448 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13449 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13451 rtx areg, savereg, mem;
13454 offset = info->altivec_save_offset + sp_offset
13455 + 16 * (i - info->first_altivec_reg_save);
13457 savereg = gen_rtx_REG (V4SImode, i);
13459 areg = gen_rtx_REG (Pmode, 0);
13460 emit_move_insn (areg, GEN_INT (offset));
13462 /* AltiVec addressing mode is [reg+reg]. */
13463 mem = gen_rtx_MEM (V4SImode,
13464 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
13466 set_mem_alias_set (mem, rs6000_sr_alias_set);
13468 insn = emit_move_insn (mem, savereg);
13470 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13471 areg, GEN_INT (offset));
13475 /* VRSAVE is a bit vector representing which AltiVec registers
13476 are used. The OS uses this to determine which vector
13477 registers to save on a context switch. We need to save
13478 VRSAVE on the stack frame, add whatever AltiVec registers we
13479 used in this function, and do the corresponding magic in the
13482 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13483 && info->vrsave_mask != 0)
13485 rtx reg, mem, vrsave;
13488 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
13489 as frame_reg_rtx and r11 as the static chain pointer for
13490 nested functions. */
13491 reg = gen_rtx_REG (SImode, 0);
13492 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13494 emit_insn (gen_get_vrsave_internal (reg));
13496 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
13498 if (!WORLD_SAVE_P (info))
13501 offset = info->vrsave_save_offset + sp_offset;
13503 = gen_rtx_MEM (SImode,
13504 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
13505 set_mem_alias_set (mem, rs6000_sr_alias_set);
13506 insn = emit_move_insn (mem, reg);
13509 /* Include the registers in the mask. */
13510 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
13512 insn = emit_insn (generate_set_vrsave (reg, info, 0));
13515 /* If we use the link register, get it into r0. */
13516 if (!WORLD_SAVE_P (info) && info->lr_save_p)
13518 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13519 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13520 RTX_FRAME_RELATED_P (insn) = 1;
13523 /* If we need to save CR, put it into r12. */
13524 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
13528 cr_save_rtx = gen_rtx_REG (SImode, 12);
13529 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13530 RTX_FRAME_RELATED_P (insn) = 1;
13531 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13532 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13533 But that's OK. All we have to do is specify that _one_ condition
13534 code register is saved in this stack slot. The thrower's epilogue
13535 will then restore all the call-saved registers.
13536 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13537 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
13538 gen_rtx_REG (SImode, CR2_REGNO));
13539 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13544 /* Do any required saving of fpr's. If only one or two to save, do
13545 it ourselves. Otherwise, call function. */
13546 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
13549 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13550 if ((regs_ever_live[info->first_fp_reg_save+i]
13551 && ! call_used_regs[info->first_fp_reg_save+i]))
13552 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
13553 info->first_fp_reg_save + i,
13554 info->fp_save_offset + sp_offset + 8 * i,
13557 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
13561 const char *alloc_rname;
13563 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
13565 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
13566 gen_rtx_REG (Pmode,
13567 LINK_REGISTER_REGNUM));
13568 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
13569 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
13570 alloc_rname = ggc_strdup (rname);
13571 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13572 gen_rtx_SYMBOL_REF (Pmode,
13574 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13576 rtx addr, reg, mem;
13577 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13578 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13579 GEN_INT (info->fp_save_offset
13580 + sp_offset + 8*i));
13581 mem = gen_rtx_MEM (DFmode, addr);
13582 set_mem_alias_set (mem, rs6000_sr_alias_set);
13584 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
13586 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13587 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13588 NULL_RTX, NULL_RTX);
13591 /* Save GPRs. This is done as a PARALLEL if we are using
13592 the store-multiple instructions. */
13593 if (!WORLD_SAVE_P (info) && using_store_multiple)
13597 p = rtvec_alloc (32 - info->first_gp_reg_save);
13598 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13600 rtx addr, reg, mem;
13601 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13602 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13603 GEN_INT (info->gp_save_offset
13606 mem = gen_rtx_MEM (reg_mode, addr);
13607 set_mem_alias_set (mem, rs6000_sr_alias_set);
13609 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
13611 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13612 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13613 NULL_RTX, NULL_RTX);
13615 else if (!WORLD_SAVE_P (info))
13618 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13619 if ((regs_ever_live[info->first_gp_reg_save + i]
13620 && (!call_used_regs[info->first_gp_reg_save + i]
13621 || (i + info->first_gp_reg_save
13622 == RS6000_PIC_OFFSET_TABLE_REGNUM
13623 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13624 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13625 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13626 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13628 rtx addr, reg, mem;
13629 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13631 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13633 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13636 if (!SPE_CONST_OFFSET_OK (offset))
13638 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13639 emit_move_insn (b, GEN_INT (offset));
13642 b = GEN_INT (offset);
13644 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13645 mem = gen_rtx_MEM (V2SImode, addr);
13646 set_mem_alias_set (mem, rs6000_sr_alias_set);
13647 insn = emit_move_insn (mem, reg);
13649 if (GET_CODE (b) == CONST_INT)
13650 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13651 NULL_RTX, NULL_RTX);
13653 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13654 b, GEN_INT (offset));
13658 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13659 GEN_INT (info->gp_save_offset
13662 mem = gen_rtx_MEM (reg_mode, addr);
13663 set_mem_alias_set (mem, rs6000_sr_alias_set);
13665 insn = emit_move_insn (mem, reg);
13666 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13667 NULL_RTX, NULL_RTX);
13672 /* ??? There's no need to emit actual instructions here, but it's the
13673 easiest way to get the frame unwind information emitted. */
13674 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
13676 unsigned int i, regno;
13678 /* In AIX ABI we need to pretend we save r2 here. */
13681 rtx addr, reg, mem;
13683 reg = gen_rtx_REG (reg_mode, 2);
13684 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13685 GEN_INT (sp_offset + 5 * reg_size));
13686 mem = gen_rtx_MEM (reg_mode, addr);
13687 set_mem_alias_set (mem, rs6000_sr_alias_set);
13689 insn = emit_move_insn (mem, reg);
13690 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13691 NULL_RTX, NULL_RTX);
13692 PATTERN (insn) = gen_blockage ();
13697 regno = EH_RETURN_DATA_REGNO (i);
13698 if (regno == INVALID_REGNUM)
13701 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13702 info->ehrd_offset + sp_offset
13703 + reg_size * (int) i,
13708 /* Save lr if we used it. */
13709 if (!WORLD_SAVE_P (info) && info->lr_save_p)
13711 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13712 GEN_INT (info->lr_save_offset + sp_offset));
13713 rtx reg = gen_rtx_REG (Pmode, 0);
13714 rtx mem = gen_rtx_MEM (Pmode, addr);
13715 /* This should not be of rs6000_sr_alias_set, because of
13716 __builtin_return_address. */
13718 insn = emit_move_insn (mem, reg);
13719 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13720 NULL_RTX, NULL_RTX);
13723 /* Save CR if we use any that must be preserved. */
13724 if (!WORLD_SAVE_P (info) && info->cr_save_p)
13726 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13727 GEN_INT (info->cr_save_offset + sp_offset));
13728 rtx mem = gen_rtx_MEM (SImode, addr);
13729 /* See the large comment above about why CR2_REGNO is used. */
13730 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13732 set_mem_alias_set (mem, rs6000_sr_alias_set);
13734 /* If r12 was used to hold the original sp, copy cr into r0 now
13736 if (REGNO (frame_reg_rtx) == 12)
13740 cr_save_rtx = gen_rtx_REG (SImode, 0);
13741 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13742 RTX_FRAME_RELATED_P (insn) = 1;
13743 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13744 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13749 insn = emit_move_insn (mem, cr_save_rtx);
13751 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13752 NULL_RTX, NULL_RTX);
13755 /* Update stack and set back pointer unless this is V.4,
13756 for which it was done previously. */
13757 if (!WORLD_SAVE_P (info) && info->push_p
13758 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13759 rs6000_emit_allocate_stack (info->total_size, FALSE);
13761 /* Set frame pointer, if needed. */
13762 if (frame_pointer_needed)
13764 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13766 RTX_FRAME_RELATED_P (insn) = 1;
13769 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13770 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13771 || (DEFAULT_ABI == ABI_V4
13772 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
13773 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13775 /* If emit_load_toc_table will use the link register, we need to save
13776 it. We use R12 for this purpose because emit_load_toc_table
13777 can use register 0. This allows us to use a plain 'blr' to return
13778 from the procedure more often. */
13779 int save_LR_around_toc_setup = (TARGET_ELF
13780 && DEFAULT_ABI != ABI_AIX
13782 && ! info->lr_save_p
13783 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
13784 if (save_LR_around_toc_setup)
13786 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13788 insn = emit_move_insn (frame_ptr_rtx, lr);
13789 rs6000_maybe_dead (insn);
13790 RTX_FRAME_RELATED_P (insn) = 1;
13792 rs6000_emit_load_toc_table (TRUE);
13794 insn = emit_move_insn (lr, frame_ptr_rtx);
13795 rs6000_maybe_dead (insn);
13796 RTX_FRAME_RELATED_P (insn) = 1;
13799 rs6000_emit_load_toc_table (TRUE);
13803 if (DEFAULT_ABI == ABI_DARWIN
13804 && flag_pic && current_function_uses_pic_offset_table)
13806 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13807 rtx src = machopic_function_base_sym ();
13809 /* Save and restore LR locally around this call (in R0). */
13810 if (!info->lr_save_p)
13811 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
13813 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
13815 insn = emit_move_insn (gen_rtx_REG (Pmode,
13816 RS6000_PIC_OFFSET_TABLE_REGNUM),
13818 rs6000_maybe_dead (insn);
13820 if (!info->lr_save_p)
13821 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
13826 /* Write function prologue. */
13829 rs6000_output_function_prologue (FILE *file,
13830 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13832 rs6000_stack_t *info = rs6000_stack_info ();
13834 if (TARGET_DEBUG_STACK)
13835 debug_stack_info (info);
13837 /* Write .extern for any function we will call to save and restore
13839 if (info->first_fp_reg_save < 64
13840 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13841 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13842 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13843 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13844 RESTORE_FP_SUFFIX);
13846 /* Write .extern for AIX common mode routines, if needed. */
13847 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13849 fputs ("\t.extern __mulh\n", file);
13850 fputs ("\t.extern __mull\n", file);
13851 fputs ("\t.extern __divss\n", file);
13852 fputs ("\t.extern __divus\n", file);
13853 fputs ("\t.extern __quoss\n", file);
13854 fputs ("\t.extern __quous\n", file);
13855 common_mode_defined = 1;
13858 if (! HAVE_prologue)
13862 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13863 the "toplevel" insn chain. */
13864 emit_note (NOTE_INSN_DELETED);
13865 rs6000_emit_prologue ();
13866 emit_note (NOTE_INSN_DELETED);
13868 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13872 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13874 INSN_ADDRESSES_NEW (insn, addr);
13879 if (TARGET_DEBUG_STACK)
13880 debug_rtx_list (get_insns (), 100);
13881 final (get_insns (), file, FALSE);
13885 rs6000_pic_labelno++;
13888 /* Emit function epilogue as insns.
13890 At present, dwarf2out_frame_debug_expr doesn't understand
13891 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13892 anywhere in the epilogue. Most of the insns below would in any case
13893 need special notes to explain where r11 is in relation to the stack. */
13896 rs6000_emit_epilogue (int sibcall)
13898 rs6000_stack_t *info;
13899 int restoring_FPRs_inline;
13900 int using_load_multiple;
13901 int using_mfcr_multiple;
13902 int use_backchain_to_restore_sp;
13904 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13905 rtx frame_reg_rtx = sp_reg_rtx;
13906 enum machine_mode reg_mode = Pmode;
13907 int reg_size = TARGET_32BIT ? 4 : 8;
13910 info = rs6000_stack_info ();
13912 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13914 reg_mode = V2SImode;
13918 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13919 && (!TARGET_SPE_ABI
13920 || info->spe_64bit_regs_used == 0)
13921 && info->first_gp_reg_save < 31
13922 && no_global_regs_above (info->first_gp_reg_save));
13923 restoring_FPRs_inline = (sibcall
13924 || current_function_calls_eh_return
13925 || info->first_fp_reg_save == 64
13926 || FP_SAVE_INLINE (info->first_fp_reg_save));
13927 use_backchain_to_restore_sp = (frame_pointer_needed
13928 || current_function_calls_alloca
13929 || info->total_size > 32767);
13930 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13931 || rs6000_cpu == PROCESSOR_PPC603
13932 || rs6000_cpu == PROCESSOR_PPC750
13935 if (WORLD_SAVE_P (info))
13939 const char *alloc_rname;
13942 /* eh_rest_world_r10 will return to the location saved in the LR
13943 stack slot (which is not likely to be our caller.)
13944 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
13945 rest_world is similar, except any R10 parameter is ignored.
13946 The exception-handling stuff that was here in 2.95 is no
13947 longer necessary. */
13951 + 32 - info->first_gp_reg_save
13952 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
13953 + 63 + 1 - info->first_fp_reg_save);
13955 strcpy (rname, ((current_function_calls_eh_return) ?
13956 "*eh_rest_world_r10" : "*rest_world"));
13957 alloc_rname = ggc_strdup (rname);
13960 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
13961 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13962 gen_rtx_REG (Pmode,
13963 LINK_REGISTER_REGNUM));
13965 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
13966 /* The instruction pattern requires a clobber here;
13967 it is shared with the restVEC helper. */
13969 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
13972 /* CR register traditionally saved as CR2. */
13973 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13974 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13975 GEN_INT (info->cr_save_offset));
13976 rtx mem = gen_rtx_MEM (reg_mode, addr);
13977 set_mem_alias_set (mem, rs6000_sr_alias_set);
13979 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13982 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13984 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13985 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13986 GEN_INT (info->gp_save_offset
13988 rtx mem = gen_rtx_MEM (reg_mode, addr);
13989 set_mem_alias_set (mem, rs6000_sr_alias_set);
13991 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13993 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13995 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13996 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13997 GEN_INT (info->altivec_save_offset
13999 rtx mem = gen_rtx_MEM (V4SImode, addr);
14000 set_mem_alias_set (mem, rs6000_sr_alias_set);
14002 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14004 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
14006 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14007 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14008 GEN_INT (info->fp_save_offset
14010 rtx mem = gen_rtx_MEM (DFmode, addr);
14011 set_mem_alias_set (mem, rs6000_sr_alias_set);
14013 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14016 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
14018 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
14020 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
14022 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
14024 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
14025 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14030 /* If we have a frame pointer, a call to alloca, or a large stack
14031 frame, restore the old stack pointer using the backchain. Otherwise,
14032 we know what size to update it with. */
14033 if (use_backchain_to_restore_sp)
14035 /* Under V.4, don't reset the stack pointer until after we're done
14036 loading the saved registers. */
14037 if (DEFAULT_ABI == ABI_V4)
14038 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
14040 emit_move_insn (frame_reg_rtx,
14041 gen_rtx_MEM (Pmode, sp_reg_rtx));
14044 else if (info->push_p)
14046 if (DEFAULT_ABI == ABI_V4
14047 || current_function_calls_eh_return)
14048 sp_offset = info->total_size;
14051 emit_insn (TARGET_32BIT
14052 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14053 GEN_INT (info->total_size))
14054 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14055 GEN_INT (info->total_size)));
14059 /* Restore AltiVec registers if needed. */
14060 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14064 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14065 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14067 rtx addr, areg, mem;
14069 areg = gen_rtx_REG (Pmode, 0);
14071 (areg, GEN_INT (info->altivec_save_offset
14073 + 16 * (i - info->first_altivec_reg_save)));
14075 /* AltiVec addressing mode is [reg+reg]. */
14076 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14077 mem = gen_rtx_MEM (V4SImode, addr);
14078 set_mem_alias_set (mem, rs6000_sr_alias_set);
14080 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14084 /* Restore VRSAVE if needed. */
14085 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14086 && info->vrsave_mask != 0)
14088 rtx addr, mem, reg;
14090 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14091 GEN_INT (info->vrsave_save_offset + sp_offset));
14092 mem = gen_rtx_MEM (SImode, addr);
14093 set_mem_alias_set (mem, rs6000_sr_alias_set);
14094 reg = gen_rtx_REG (SImode, 12);
14095 emit_move_insn (reg, mem);
14097 emit_insn (generate_set_vrsave (reg, info, 1));
14100 /* Get the old lr if we saved it. */
14101 if (info->lr_save_p)
14103 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14104 info->lr_save_offset + sp_offset);
14106 set_mem_alias_set (mem, rs6000_sr_alias_set);
14108 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
14111 /* Get the old cr if we saved it. */
14112 if (info->cr_save_p)
14114 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14115 GEN_INT (info->cr_save_offset + sp_offset));
14116 rtx mem = gen_rtx_MEM (SImode, addr);
14118 set_mem_alias_set (mem, rs6000_sr_alias_set);
14120 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14123 /* Set LR here to try to overlap restores below. */
14124 if (info->lr_save_p)
14125 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14126 gen_rtx_REG (Pmode, 0));
14128 /* Load exception handler data registers, if needed. */
14129 if (current_function_calls_eh_return)
14131 unsigned int i, regno;
14135 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14136 GEN_INT (sp_offset + 5 * reg_size));
14137 rtx mem = gen_rtx_MEM (reg_mode, addr);
14139 set_mem_alias_set (mem, rs6000_sr_alias_set);
14141 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14148 regno = EH_RETURN_DATA_REGNO (i);
14149 if (regno == INVALID_REGNUM)
14152 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14153 info->ehrd_offset + sp_offset
14154 + reg_size * (int) i);
14155 set_mem_alias_set (mem, rs6000_sr_alias_set);
14157 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14161 /* Restore GPRs. This is done as a PARALLEL if we are using
14162 the load-multiple instructions. */
14163 if (using_load_multiple)
14166 p = rtvec_alloc (32 - info->first_gp_reg_save);
14167 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14169 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14170 GEN_INT (info->gp_save_offset
14173 rtx mem = gen_rtx_MEM (reg_mode, addr);
14175 set_mem_alias_set (mem, rs6000_sr_alias_set);
14178 gen_rtx_SET (VOIDmode,
14179 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14182 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14185 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14186 if ((regs_ever_live[info->first_gp_reg_save + i]
14187 && (!call_used_regs[info->first_gp_reg_save + i]
14188 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14189 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14190 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14191 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14192 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14194 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14195 GEN_INT (info->gp_save_offset
14198 rtx mem = gen_rtx_MEM (reg_mode, addr);
14200 /* Restore 64-bit quantities for SPE. */
14201 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14203 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14206 if (!SPE_CONST_OFFSET_OK (offset))
14208 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14209 emit_move_insn (b, GEN_INT (offset));
14212 b = GEN_INT (offset);
14214 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14215 mem = gen_rtx_MEM (V2SImode, addr);
14218 set_mem_alias_set (mem, rs6000_sr_alias_set);
14220 emit_move_insn (gen_rtx_REG (reg_mode,
14221 info->first_gp_reg_save + i), mem);
14224 /* Restore fpr's if we need to do it without calling a function. */
14225 if (restoring_FPRs_inline)
14226 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14227 if ((regs_ever_live[info->first_fp_reg_save+i]
14228 && ! call_used_regs[info->first_fp_reg_save+i]))
14231 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14232 GEN_INT (info->fp_save_offset
14235 mem = gen_rtx_MEM (DFmode, addr);
14236 set_mem_alias_set (mem, rs6000_sr_alias_set);
14238 emit_move_insn (gen_rtx_REG (DFmode,
14239 info->first_fp_reg_save + i),
14243 /* If we saved cr, restore it here. Just those that were used. */
14244 if (info->cr_save_p)
14246 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14249 if (using_mfcr_multiple)
14251 for (i = 0; i < 8; i++)
14252 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14254 gcc_assert (count);
14257 if (using_mfcr_multiple && count > 1)
14262 p = rtvec_alloc (count);
14265 for (i = 0; i < 8; i++)
14266 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14268 rtvec r = rtvec_alloc (2);
14269 RTVEC_ELT (r, 0) = r12_rtx;
14270 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14271 RTVEC_ELT (p, ndx) =
14272 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14273 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14276 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14277 gcc_assert (ndx == count);
14280 for (i = 0; i < 8; i++)
14281 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14283 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14289 /* If this is V.4, unwind the stack pointer after all of the loads
14290 have been done. We need to emit a block here so that sched
14291 doesn't decide to move the sp change before the register restores
14292 (which may not have any obvious dependency on the stack). This
14293 doesn't hurt performance, because there is no scheduling that can
14294 be done after this point. */
14295 if (DEFAULT_ABI == ABI_V4
14296 || current_function_calls_eh_return)
14298 if (frame_reg_rtx != sp_reg_rtx)
14299 rs6000_emit_stack_tie ();
14301 if (use_backchain_to_restore_sp)
14303 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14305 else if (sp_offset != 0)
14307 emit_insn (TARGET_32BIT
14308 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14309 GEN_INT (sp_offset))
14310 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14311 GEN_INT (sp_offset)));
14315 if (current_function_calls_eh_return)
14317 rtx sa = EH_RETURN_STACKADJ_RTX;
14318 emit_insn (TARGET_32BIT
14319 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14320 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14326 if (! restoring_FPRs_inline)
14327 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14329 p = rtvec_alloc (2);
14331 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14332 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14333 gen_rtx_REG (Pmode,
14334 LINK_REGISTER_REGNUM));
14336 /* If we have to restore more than two FP registers, branch to the
14337 restore function. It will return to our caller. */
14338 if (! restoring_FPRs_inline)
14342 const char *alloc_rname;
14344 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
14345 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
14346 alloc_rname = ggc_strdup (rname);
14347 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14348 gen_rtx_SYMBOL_REF (Pmode,
14351 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14354 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
14355 GEN_INT (info->fp_save_offset + 8*i));
14356 mem = gen_rtx_MEM (DFmode, addr);
14357 set_mem_alias_set (mem, rs6000_sr_alias_set);
14359 RTVEC_ELT (p, i+3) =
14360 gen_rtx_SET (VOIDmode,
14361 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
14366 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14370 /* Write function epilogue. */
14373 rs6000_output_function_epilogue (FILE *file,
14374 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14376 rs6000_stack_t *info = rs6000_stack_info ();
14378 if (! HAVE_epilogue)
14380 rtx insn = get_last_insn ();
14381 /* If the last insn was a BARRIER, we don't have to write anything except
14382 the trace table. */
14383 if (GET_CODE (insn) == NOTE)
14384 insn = prev_nonnote_insn (insn);
14385 if (insn == 0 || GET_CODE (insn) != BARRIER)
14387 /* This is slightly ugly, but at least we don't have two
14388 copies of the epilogue-emitting code. */
14391 /* A NOTE_INSN_DELETED is supposed to be at the start
14392 and end of the "toplevel" insn chain. */
14393 emit_note (NOTE_INSN_DELETED);
14394 rs6000_emit_epilogue (FALSE);
14395 emit_note (NOTE_INSN_DELETED);
14397 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14401 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14403 INSN_ADDRESSES_NEW (insn, addr);
14408 if (TARGET_DEBUG_STACK)
14409 debug_rtx_list (get_insns (), 100);
14410 final (get_insns (), file, FALSE);
14416 macho_branch_islands ();
14417 /* Mach-O doesn't support labels at the end of objects, so if
14418 it looks like we might want one, insert a NOP. */
14420 rtx insn = get_last_insn ();
14423 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
14424 insn = PREV_INSN (insn);
14428 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
14429 fputs ("\tnop\n", file);
14433 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14436 We don't output a traceback table if -finhibit-size-directive was
14437 used. The documentation for -finhibit-size-directive reads
14438 ``don't output a @code{.size} assembler directive, or anything
14439 else that would cause trouble if the function is split in the
14440 middle, and the two halves are placed at locations far apart in
14441 memory.'' The traceback table has this property, since it
14442 includes the offset from the start of the function to the
14443 traceback table itself.
14445 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14446 different traceback table. */
14447 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
14448 && rs6000_traceback != traceback_none)
14450 const char *fname = NULL;
14451 const char *language_string = lang_hooks.name;
14452 int fixed_parms = 0, float_parms = 0, parm_info = 0;
14454 int optional_tbtab;
14456 if (rs6000_traceback == traceback_full)
14457 optional_tbtab = 1;
14458 else if (rs6000_traceback == traceback_part)
14459 optional_tbtab = 0;
14461 optional_tbtab = !optimize_size && !TARGET_ELF;
14463 if (optional_tbtab)
14465 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
14466 while (*fname == '.') /* V.4 encodes . in the name */
14469 /* Need label immediately before tbtab, so we can compute
14470 its offset from the function start. */
14471 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14472 ASM_OUTPUT_LABEL (file, fname);
14475 /* The .tbtab pseudo-op can only be used for the first eight
14476 expressions, since it can't handle the possibly variable
14477 length fields that follow. However, if you omit the optional
14478 fields, the assembler outputs zeros for all optional fields
14479 anyways, giving each variable length field is minimum length
14480 (as defined in sys/debug.h). Thus we can not use the .tbtab
14481 pseudo-op at all. */
14483 /* An all-zero word flags the start of the tbtab, for debuggers
14484 that have to find it by searching forward from the entry
14485 point or from the current pc. */
14486 fputs ("\t.long 0\n", file);
14488 /* Tbtab format type. Use format type 0. */
14489 fputs ("\t.byte 0,", file);
14491 /* Language type. Unfortunately, there does not seem to be any
14492 official way to discover the language being compiled, so we
14493 use language_string.
14494 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14495 Java is 13. Objective-C is 14. */
14496 if (! strcmp (language_string, "GNU C"))
14498 else if (! strcmp (language_string, "GNU F77")
14499 || ! strcmp (language_string, "GNU F95"))
14501 else if (! strcmp (language_string, "GNU Pascal"))
14503 else if (! strcmp (language_string, "GNU Ada"))
14505 else if (! strcmp (language_string, "GNU C++"))
14507 else if (! strcmp (language_string, "GNU Java"))
14509 else if (! strcmp (language_string, "GNU Objective-C"))
14512 gcc_unreachable ();
14513 fprintf (file, "%d,", i);
14515 /* 8 single bit fields: global linkage (not set for C extern linkage,
14516 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14517 from start of procedure stored in tbtab, internal function, function
14518 has controlled storage, function has no toc, function uses fp,
14519 function logs/aborts fp operations. */
14520 /* Assume that fp operations are used if any fp reg must be saved. */
14521 fprintf (file, "%d,",
14522 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
14524 /* 6 bitfields: function is interrupt handler, name present in
14525 proc table, function calls alloca, on condition directives
14526 (controls stack walks, 3 bits), saves condition reg, saves
14528 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14529 set up as a frame pointer, even when there is no alloca call. */
14530 fprintf (file, "%d,",
14531 ((optional_tbtab << 6)
14532 | ((optional_tbtab & frame_pointer_needed) << 5)
14533 | (info->cr_save_p << 1)
14534 | (info->lr_save_p)));
14536 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14538 fprintf (file, "%d,",
14539 (info->push_p << 7) | (64 - info->first_fp_reg_save));
14541 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14542 fprintf (file, "%d,", (32 - first_reg_to_save ()));
14544 if (optional_tbtab)
14546 /* Compute the parameter info from the function decl argument
14549 int next_parm_info_bit = 31;
14551 for (decl = DECL_ARGUMENTS (current_function_decl);
14552 decl; decl = TREE_CHAIN (decl))
14554 rtx parameter = DECL_INCOMING_RTL (decl);
14555 enum machine_mode mode = GET_MODE (parameter);
14557 if (GET_CODE (parameter) == REG)
14559 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
14577 gcc_unreachable ();
14580 /* If only one bit will fit, don't or in this entry. */
14581 if (next_parm_info_bit > 0)
14582 parm_info |= (bits << (next_parm_info_bit - 1));
14583 next_parm_info_bit -= 2;
14587 fixed_parms += ((GET_MODE_SIZE (mode)
14588 + (UNITS_PER_WORD - 1))
14590 next_parm_info_bit -= 1;
14596 /* Number of fixed point parameters. */
14597 /* This is actually the number of words of fixed point parameters; thus
14598 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14599 fprintf (file, "%d,", fixed_parms);
14601 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14603 /* This is actually the number of fp registers that hold parameters;
14604 and thus the maximum value is 13. */
14605 /* Set parameters on stack bit if parameters are not in their original
14606 registers, regardless of whether they are on the stack? Xlc
14607 seems to set the bit when not optimizing. */
14608 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
14610 if (! optional_tbtab)
14613 /* Optional fields follow. Some are variable length. */
14615 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14616 11 double float. */
14617 /* There is an entry for each parameter in a register, in the order that
14618 they occur in the parameter list. Any intervening arguments on the
14619 stack are ignored. If the list overflows a long (max possible length
14620 34 bits) then completely leave off all elements that don't fit. */
14621 /* Only emit this long if there was at least one parameter. */
14622 if (fixed_parms || float_parms)
14623 fprintf (file, "\t.long %d\n", parm_info);
14625 /* Offset from start of code to tb table. */
14626 fputs ("\t.long ", file);
14627 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14629 RS6000_OUTPUT_BASENAME (file, fname);
14631 assemble_name (file, fname);
14633 rs6000_output_function_entry (file, fname);
14636 /* Interrupt handler mask. */
14637 /* Omit this long, since we never set the interrupt handler bit
14640 /* Number of CTL (controlled storage) anchors. */
14641 /* Omit this long, since the has_ctl bit is never set above. */
14643 /* Displacement into stack of each CTL anchor. */
14644 /* Omit this list of longs, because there are no CTL anchors. */
14646 /* Length of function name. */
14649 fprintf (file, "\t.short %d\n", (int) strlen (fname));
14651 /* Function name. */
14652 assemble_string (fname, strlen (fname));
14654 /* Register for alloca automatic storage; this is always reg 31.
14655 Only emit this if the alloca bit was set above. */
14656 if (frame_pointer_needed)
14657 fputs ("\t.byte 31\n", file);
14659 fputs ("\t.align 2\n", file);
14663 /* A C compound statement that outputs the assembler code for a thunk
14664 function, used to implement C++ virtual function calls with
14665 multiple inheritance. The thunk acts as a wrapper around a virtual
14666 function, adjusting the implicit object parameter before handing
14667 control off to the real function.
14669 First, emit code to add the integer DELTA to the location that
14670 contains the incoming first argument. Assume that this argument
14671 contains a pointer, and is the one used to pass the `this' pointer
14672 in C++. This is the incoming argument *before* the function
14673 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14674 values of all other incoming arguments.
14676 After the addition, emit code to jump to FUNCTION, which is a
14677 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14678 not touch the return address. Hence returning from FUNCTION will
14679 return to whoever called the current `thunk'.
14681 The effect must be as if FUNCTION had been called directly with the
14682 adjusted first argument. This macro is responsible for emitting
14683 all of the code for a thunk function; output_function_prologue()
14684 and output_function_epilogue() are not invoked.
14686 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14687 been extracted from it.) It might possibly be useful on some
14688 targets, but probably not.
14690 If you do not define this macro, the target-independent code in the
14691 C++ frontend will generate a less efficient heavyweight thunk that
14692 calls FUNCTION instead of jumping to it. The generic approach does
14693 not support varargs. */
14696 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
14697 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
14700 rtx this, insn, funexp;
14702 reload_completed = 1;
14703 epilogue_completed = 1;
14704 no_new_pseudos = 1;
14705 reset_block_changes ();
14707 /* Mark the end of the (empty) prologue. */
14708 emit_note (NOTE_INSN_PROLOGUE_END);
14710 /* Find the "this" pointer. If the function returns a structure,
14711 the structure return pointer is in r3. */
14712 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
14713 this = gen_rtx_REG (Pmode, 4);
14715 this = gen_rtx_REG (Pmode, 3);
14717 /* Apply the constant offset, if required. */
14720 rtx delta_rtx = GEN_INT (delta);
14721 emit_insn (TARGET_32BIT
14722 ? gen_addsi3 (this, this, delta_rtx)
14723 : gen_adddi3 (this, this, delta_rtx));
14726 /* Apply the offset from the vtable, if required. */
14729 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
14730 rtx tmp = gen_rtx_REG (Pmode, 12);
14732 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
14733 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
14735 emit_insn (TARGET_32BIT
14736 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
14737 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
14738 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
14742 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
14744 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
14746 emit_insn (TARGET_32BIT
14747 ? gen_addsi3 (this, this, tmp)
14748 : gen_adddi3 (this, this, tmp));
14751 /* Generate a tail call to the target function. */
14752 if (!TREE_USED (function))
14754 assemble_external (function);
14755 TREE_USED (function) = 1;
14757 funexp = XEXP (DECL_RTL (function), 0);
14758 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
14761 if (MACHOPIC_INDIRECT)
14762 funexp = machopic_indirect_call_target (funexp);
14765 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14766 generate sibcall RTL explicitly. */
14767 insn = emit_call_insn (
14768 gen_rtx_PARALLEL (VOIDmode,
14770 gen_rtx_CALL (VOIDmode,
14771 funexp, const0_rtx),
14772 gen_rtx_USE (VOIDmode, const0_rtx),
14773 gen_rtx_USE (VOIDmode,
14774 gen_rtx_REG (SImode,
14775 LINK_REGISTER_REGNUM)),
14776 gen_rtx_RETURN (VOIDmode))));
14777 SIBLING_CALL_P (insn) = 1;
14780 /* Run just enough of rest_of_compilation to get the insns emitted.
14781 There's not really enough bulk here to make other passes such as
14782 instruction scheduling worth while. Note that use_thunk calls
14783 assemble_start_function and assemble_end_function. */
14784 insn = get_insns ();
14785 insn_locators_initialize ();
14786 shorten_branches (insn);
14787 final_start_function (insn, file, 1);
14788 final (insn, file, 1);
14789 final_end_function ();
14791 reload_completed = 0;
14792 epilogue_completed = 0;
14793 no_new_pseudos = 0;
14796 /* A quick summary of the various types of 'constant-pool tables'
14799 Target Flags Name One table per
14800 AIX (none) AIX TOC object file
14801 AIX -mfull-toc AIX TOC object file
14802 AIX -mminimal-toc AIX minimal TOC translation unit
14803 SVR4/EABI (none) SVR4 SDATA object file
14804 SVR4/EABI -fpic SVR4 pic object file
14805 SVR4/EABI -fPIC SVR4 PIC translation unit
14806 SVR4/EABI -mrelocatable EABI TOC function
14807 SVR4/EABI -maix AIX TOC object file
14808 SVR4/EABI -maix -mminimal-toc
14809 AIX minimal TOC translation unit
14811 Name Reg. Set by entries contains:
14812 made by addrs? fp? sum?
14814 AIX TOC 2 crt0 as Y option option
14815 AIX minimal TOC 30 prolog gcc Y Y option
14816 SVR4 SDATA 13 crt0 gcc N Y N
14817 SVR4 pic 30 prolog ld Y not yet N
14818 SVR4 PIC 30 prolog gcc Y option option
14819 EABI TOC 30 prolog gcc Y option option
14823 /* Hash functions for the hash table. */
14826 rs6000_hash_constant (rtx k)
14828 enum rtx_code code = GET_CODE (k);
14829 enum machine_mode mode = GET_MODE (k);
14830 unsigned result = (code << 3) ^ mode;
14831 const char *format;
14834 format = GET_RTX_FORMAT (code);
14835 flen = strlen (format);
14841 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
14844 if (mode != VOIDmode)
14845 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
14857 for (; fidx < flen; fidx++)
14858 switch (format[fidx])
14863 const char *str = XSTR (k, fidx);
14864 len = strlen (str);
14865 result = result * 613 + len;
14866 for (i = 0; i < len; i++)
14867 result = result * 613 + (unsigned) str[i];
14872 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
14876 result = result * 613 + (unsigned) XINT (k, fidx);
14879 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
14880 result = result * 613 + (unsigned) XWINT (k, fidx);
14884 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
14885 result = result * 613 + (unsigned) (XWINT (k, fidx)
14892 gcc_unreachable ();
14899 toc_hash_function (const void *hash_entry)
14901 const struct toc_hash_struct *thc =
14902 (const struct toc_hash_struct *) hash_entry;
14903 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
14906 /* Compare H1 and H2 for equivalence. */
14909 toc_hash_eq (const void *h1, const void *h2)
14911 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
14912 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
14914 if (((const struct toc_hash_struct *) h1)->key_mode
14915 != ((const struct toc_hash_struct *) h2)->key_mode)
14918 return rtx_equal_p (r1, r2);
14921 /* These are the names given by the C++ front-end to vtables, and
14922 vtable-like objects. Ideally, this logic should not be here;
14923 instead, there should be some programmatic way of inquiring as
14924 to whether or not an object is a vtable. */
14926 #define VTABLE_NAME_P(NAME) \
14927 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
14928 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14929 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14930 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14931 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14934 rs6000_output_symbol_ref (FILE *file, rtx x)
14936 /* Currently C++ toc references to vtables can be emitted before it
14937 is decided whether the vtable is public or private. If this is
14938 the case, then the linker will eventually complain that there is
14939 a reference to an unknown section. Thus, for vtables only,
14940 we emit the TOC reference to reference the symbol and not the
14942 const char *name = XSTR (x, 0);
14944 if (VTABLE_NAME_P (name))
14946 RS6000_OUTPUT_BASENAME (file, name);
14949 assemble_name (file, name);
14952 /* Output a TOC entry. We derive the entry name from what is being
14956 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14959 const char *name = buf;
14960 const char *real_name;
14964 gcc_assert (!TARGET_NO_TOC);
14966 /* When the linker won't eliminate them, don't output duplicate
14967 TOC entries (this happens on AIX if there is any kind of TOC,
14968 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14970 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14972 struct toc_hash_struct *h;
14975 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14976 time because GGC is not initialized at that point. */
14977 if (toc_hash_table == NULL)
14978 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14979 toc_hash_eq, NULL);
14981 h = ggc_alloc (sizeof (*h));
14983 h->key_mode = mode;
14984 h->labelno = labelno;
14986 found = htab_find_slot (toc_hash_table, h, 1);
14987 if (*found == NULL)
14989 else /* This is indeed a duplicate.
14990 Set this label equal to that label. */
14992 fputs ("\t.set ", file);
14993 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14994 fprintf (file, "%d,", labelno);
14995 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14996 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15002 /* If we're going to put a double constant in the TOC, make sure it's
15003 aligned properly when strict alignment is on. */
15004 if (GET_CODE (x) == CONST_DOUBLE
15005 && STRICT_ALIGNMENT
15006 && GET_MODE_BITSIZE (mode) >= 64
15007 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15008 ASM_OUTPUT_ALIGN (file, 3);
15011 (*targetm.asm_out.internal_label) (file, "LC", labelno);
15013 /* Handle FP constants specially. Note that if we have a minimal
15014 TOC, things we put here aren't actually in the TOC, so we can allow
15016 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15018 REAL_VALUE_TYPE rv;
15021 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15022 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15026 if (TARGET_MINIMAL_TOC)
15027 fputs (DOUBLE_INT_ASM_OP, file);
15029 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15030 k[0] & 0xffffffff, k[1] & 0xffffffff,
15031 k[2] & 0xffffffff, k[3] & 0xffffffff);
15032 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15033 k[0] & 0xffffffff, k[1] & 0xffffffff,
15034 k[2] & 0xffffffff, k[3] & 0xffffffff);
15039 if (TARGET_MINIMAL_TOC)
15040 fputs ("\t.long ", file);
15042 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15043 k[0] & 0xffffffff, k[1] & 0xffffffff,
15044 k[2] & 0xffffffff, k[3] & 0xffffffff);
15045 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15046 k[0] & 0xffffffff, k[1] & 0xffffffff,
15047 k[2] & 0xffffffff, k[3] & 0xffffffff);
15051 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
15053 REAL_VALUE_TYPE rv;
15056 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15057 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
15061 if (TARGET_MINIMAL_TOC)
15062 fputs (DOUBLE_INT_ASM_OP, file);
15064 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15065 k[0] & 0xffffffff, k[1] & 0xffffffff);
15066 fprintf (file, "0x%lx%08lx\n",
15067 k[0] & 0xffffffff, k[1] & 0xffffffff);
15072 if (TARGET_MINIMAL_TOC)
15073 fputs ("\t.long ", file);
15075 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15076 k[0] & 0xffffffff, k[1] & 0xffffffff);
15077 fprintf (file, "0x%lx,0x%lx\n",
15078 k[0] & 0xffffffff, k[1] & 0xffffffff);
15082 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
15084 REAL_VALUE_TYPE rv;
15087 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15088 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15092 if (TARGET_MINIMAL_TOC)
15093 fputs (DOUBLE_INT_ASM_OP, file);
15095 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15096 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
15101 if (TARGET_MINIMAL_TOC)
15102 fputs ("\t.long ", file);
15104 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15105 fprintf (file, "0x%lx\n", l & 0xffffffff);
15109 else if (GET_MODE (x) == VOIDmode
15110 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
15112 unsigned HOST_WIDE_INT low;
15113 HOST_WIDE_INT high;
15115 if (GET_CODE (x) == CONST_DOUBLE)
15117 low = CONST_DOUBLE_LOW (x);
15118 high = CONST_DOUBLE_HIGH (x);
15121 #if HOST_BITS_PER_WIDE_INT == 32
15124 high = (low & 0x80000000) ? ~0 : 0;
15128 low = INTVAL (x) & 0xffffffff;
15129 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
15133 /* TOC entries are always Pmode-sized, but since this
15134 is a bigendian machine then if we're putting smaller
15135 integer constants in the TOC we have to pad them.
15136 (This is still a win over putting the constants in
15137 a separate constant pool, because then we'd have
15138 to have both a TOC entry _and_ the actual constant.)
15140 For a 32-bit target, CONST_INT values are loaded and shifted
15141 entirely within `low' and can be stored in one TOC entry. */
15143 /* It would be easy to make this work, but it doesn't now. */
15144 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
15146 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
15148 #if HOST_BITS_PER_WIDE_INT == 32
15149 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15150 POINTER_SIZE, &low, &high, 0);
15153 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15154 high = (HOST_WIDE_INT) low >> 32;
15161 if (TARGET_MINIMAL_TOC)
15162 fputs (DOUBLE_INT_ASM_OP, file);
15164 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15165 (long) high & 0xffffffff, (long) low & 0xffffffff);
15166 fprintf (file, "0x%lx%08lx\n",
15167 (long) high & 0xffffffff, (long) low & 0xffffffff);
15172 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15174 if (TARGET_MINIMAL_TOC)
15175 fputs ("\t.long ", file);
15177 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15178 (long) high & 0xffffffff, (long) low & 0xffffffff);
15179 fprintf (file, "0x%lx,0x%lx\n",
15180 (long) high & 0xffffffff, (long) low & 0xffffffff);
15184 if (TARGET_MINIMAL_TOC)
15185 fputs ("\t.long ", file);
15187 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15188 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15194 if (GET_CODE (x) == CONST)
15196 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
15198 base = XEXP (XEXP (x, 0), 0);
15199 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15202 switch (GET_CODE (base))
15205 name = XSTR (base, 0);
15209 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15210 CODE_LABEL_NUMBER (XEXP (base, 0)));
15214 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15218 gcc_unreachable ();
15221 real_name = (*targetm.strip_name_encoding) (name);
15222 if (TARGET_MINIMAL_TOC)
15223 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15226 fprintf (file, "\t.tc %s", real_name);
15229 fprintf (file, ".N%d", - offset);
15231 fprintf (file, ".P%d", offset);
15233 fputs ("[TC],", file);
15236 /* Currently C++ toc references to vtables can be emitted before it
15237 is decided whether the vtable is public or private. If this is
15238 the case, then the linker will eventually complain that there is
15239 a TOC reference to an unknown section. Thus, for vtables only,
15240 we emit the TOC reference to reference the symbol and not the
15242 if (VTABLE_NAME_P (name))
15244 RS6000_OUTPUT_BASENAME (file, name);
15246 fprintf (file, "%d", offset);
15247 else if (offset > 0)
15248 fprintf (file, "+%d", offset);
15251 output_addr_const (file, x);
15255 /* Output an assembler pseudo-op to write an ASCII string of N characters
15256 starting at P to FILE.
15258 On the RS/6000, we have to do this using the .byte operation and
15259 write out special characters outside the quoted string.
15260 Also, the assembler is broken; very long strings are truncated,
15261 so we must artificially break them up early. */
15264 output_ascii (FILE *file, const char *p, int n)
15267 int i, count_string;
15268 const char *for_string = "\t.byte \"";
15269 const char *for_decimal = "\t.byte ";
15270 const char *to_close = NULL;
15273 for (i = 0; i < n; i++)
15276 if (c >= ' ' && c < 0177)
15279 fputs (for_string, file);
15282 /* Write two quotes to get one. */
15290 for_decimal = "\"\n\t.byte ";
15294 if (count_string >= 512)
15296 fputs (to_close, file);
15298 for_string = "\t.byte \"";
15299 for_decimal = "\t.byte ";
15307 fputs (for_decimal, file);
15308 fprintf (file, "%d", c);
15310 for_string = "\n\t.byte \"";
15311 for_decimal = ", ";
15317 /* Now close the string if we have written one. Then end the line. */
15319 fputs (to_close, file);
15322 /* Generate a unique section name for FILENAME for a section type
15323 represented by SECTION_DESC. Output goes into BUF.
15325 SECTION_DESC can be any string, as long as it is different for each
15326 possible section type.
15328 We name the section in the same manner as xlc. The name begins with an
15329 underscore followed by the filename (after stripping any leading directory
15330 names) with the last period replaced by the string SECTION_DESC. If
15331 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15335 rs6000_gen_section_name (char **buf, const char *filename,
15336 const char *section_desc)
15338 const char *q, *after_last_slash, *last_period = 0;
15342 after_last_slash = filename;
15343 for (q = filename; *q; q++)
15346 after_last_slash = q + 1;
15347 else if (*q == '.')
15351 len = strlen (after_last_slash) + strlen (section_desc) + 2;
15352 *buf = (char *) xmalloc (len);
15357 for (q = after_last_slash; *q; q++)
15359 if (q == last_period)
15361 strcpy (p, section_desc);
15362 p += strlen (section_desc);
15366 else if (ISALNUM (*q))
15370 if (last_period == 0)
15371 strcpy (p, section_desc);
15376 /* Emit profile function. */
15379 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
15381 /* Non-standard profiling for kernels, which just saves LR then calls
15382 _mcount without worrying about arg saves. The idea is to change
15383 the function prologue as little as possible as it isn't easy to
15384 account for arg save/restore code added just for _mcount. */
15385 if (TARGET_PROFILE_KERNEL)
15388 if (DEFAULT_ABI == ABI_AIX)
15390 #ifndef NO_PROFILE_COUNTERS
15391 # define NO_PROFILE_COUNTERS 0
15393 if (NO_PROFILE_COUNTERS)
15394 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
15398 const char *label_name;
15401 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15402 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
15403 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
15405 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
15409 else if (DEFAULT_ABI == ABI_DARWIN)
15411 const char *mcount_name = RS6000_MCOUNT;
15412 int caller_addr_regno = LINK_REGISTER_REGNUM;
15414 /* Be conservative and always set this, at least for now. */
15415 current_function_uses_pic_offset_table = 1;
15418 /* For PIC code, set up a stub and collect the caller's address
15419 from r0, which is where the prologue puts it. */
15420 if (MACHOPIC_INDIRECT
15421 && current_function_uses_pic_offset_table)
15422 caller_addr_regno = 0;
15424 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
15426 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
15430 /* Write function profiler code. */
15433 output_function_profiler (FILE *file, int labelno)
15437 switch (DEFAULT_ABI)
15440 gcc_unreachable ();
15445 warning (0, "no profiling of 64-bit code for this ABI");
15448 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15449 fprintf (file, "\tmflr %s\n", reg_names[0]);
15450 if (NO_PROFILE_COUNTERS)
15452 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
15453 reg_names[0], reg_names[1]);
15455 else if (TARGET_SECURE_PLT && flag_pic)
15457 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
15458 reg_names[0], reg_names[1]);
15459 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15460 asm_fprintf (file, "\t{cau|addis} %s,%s,",
15461 reg_names[12], reg_names[12]);
15462 assemble_name (file, buf);
15463 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
15464 assemble_name (file, buf);
15465 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
15467 else if (flag_pic == 1)
15469 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
15470 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
15471 reg_names[0], reg_names[1]);
15472 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15473 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
15474 assemble_name (file, buf);
15475 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
15477 else if (flag_pic > 1)
15479 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
15480 reg_names[0], reg_names[1]);
15481 /* Now, we need to get the address of the label. */
15482 fputs ("\tbcl 20,31,1f\n\t.long ", file);
15483 assemble_name (file, buf);
15484 fputs ("-.\n1:", file);
15485 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
15486 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
15487 reg_names[0], reg_names[11]);
15488 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
15489 reg_names[0], reg_names[0], reg_names[11]);
15493 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
15494 assemble_name (file, buf);
15495 fputs ("@ha\n", file);
15496 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
15497 reg_names[0], reg_names[1]);
15498 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
15499 assemble_name (file, buf);
15500 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
15503 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15504 fprintf (file, "\tbl %s%s\n",
15505 RS6000_MCOUNT, flag_pic ? "@plt" : "");
15510 if (!TARGET_PROFILE_KERNEL)
15512 /* Don't do anything, done in output_profile_hook (). */
15516 gcc_assert (!TARGET_32BIT);
15518 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
15519 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
15521 if (cfun->static_chain_decl != NULL)
15523 asm_fprintf (file, "\tstd %s,24(%s)\n",
15524 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15525 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15526 asm_fprintf (file, "\tld %s,24(%s)\n",
15527 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15530 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15537 /* Power4 load update and store update instructions are cracked into a
15538 load or store and an integer insn which are executed in the same cycle.
15539 Branches have their own dispatch slot which does not count against the
15540 GCC issue rate, but it changes the program flow so there are no other
15541 instructions to issue in this cycle. */
15544 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
15545 int verbose ATTRIBUTE_UNUSED,
15546 rtx insn, int more)
15548 if (GET_CODE (PATTERN (insn)) == USE
15549 || GET_CODE (PATTERN (insn)) == CLOBBER)
15552 if (rs6000_sched_groups)
15554 if (is_microcoded_insn (insn))
15556 else if (is_cracked_insn (insn))
15557 return more > 2 ? more - 2 : 0;
15563 /* Adjust the cost of a scheduling dependency. Return the new cost of
15564 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15567 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
15569 if (! recog_memoized (insn))
15572 if (REG_NOTE_KIND (link) != 0)
15575 if (REG_NOTE_KIND (link) == 0)
15577 /* Data dependency; DEP_INSN writes a register that INSN reads
15578 some cycles later. */
15580 /* Separate a load from a narrower, dependent store. */
15581 if (rs6000_sched_groups
15582 && GET_CODE (PATTERN (insn)) == SET
15583 && GET_CODE (PATTERN (dep_insn)) == SET
15584 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
15585 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
15586 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
15587 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
15590 switch (get_attr_type (insn))
15593 /* Tell the first scheduling pass about the latency between
15594 a mtctr and bctr (and mtlr and br/blr). The first
15595 scheduling pass will not know about this latency since
15596 the mtctr instruction, which has the latency associated
15597 to it, will be generated by reload. */
15598 return TARGET_POWER ? 5 : 4;
15600 /* Leave some extra cycles between a compare and its
15601 dependent branch, to inhibit expensive mispredicts. */
15602 if ((rs6000_cpu_attr == CPU_PPC603
15603 || rs6000_cpu_attr == CPU_PPC604
15604 || rs6000_cpu_attr == CPU_PPC604E
15605 || rs6000_cpu_attr == CPU_PPC620
15606 || rs6000_cpu_attr == CPU_PPC630
15607 || rs6000_cpu_attr == CPU_PPC750
15608 || rs6000_cpu_attr == CPU_PPC7400
15609 || rs6000_cpu_attr == CPU_PPC7450
15610 || rs6000_cpu_attr == CPU_POWER4
15611 || rs6000_cpu_attr == CPU_POWER5)
15612 && recog_memoized (dep_insn)
15613 && (INSN_CODE (dep_insn) >= 0)
15614 && (get_attr_type (dep_insn) == TYPE_CMP
15615 || get_attr_type (dep_insn) == TYPE_COMPARE
15616 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
15617 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
15618 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
15619 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
15620 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
15621 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
15626 /* Fall out to return default cost. */
15632 /* The function returns a true if INSN is microcoded.
15633 Return false otherwise. */
15636 is_microcoded_insn (rtx insn)
15638 if (!insn || !INSN_P (insn)
15639 || GET_CODE (PATTERN (insn)) == USE
15640 || GET_CODE (PATTERN (insn)) == CLOBBER)
15643 if (rs6000_sched_groups)
15645 enum attr_type type = get_attr_type (insn);
15646 if (type == TYPE_LOAD_EXT_U
15647 || type == TYPE_LOAD_EXT_UX
15648 || type == TYPE_LOAD_UX
15649 || type == TYPE_STORE_UX
15650 || type == TYPE_MFCR)
15657 /* The function returns a nonzero value if INSN can be scheduled only
15658 as the first insn in a dispatch group ("dispatch-slot restricted").
15659 In this case, the returned value indicates how many dispatch slots
15660 the insn occupies (at the beginning of the group).
15661 Return 0 otherwise. */
15664 is_dispatch_slot_restricted (rtx insn)
15666 enum attr_type type;
15668 if (!rs6000_sched_groups)
15672 || insn == NULL_RTX
15673 || GET_CODE (insn) == NOTE
15674 || GET_CODE (PATTERN (insn)) == USE
15675 || GET_CODE (PATTERN (insn)) == CLOBBER)
15678 type = get_attr_type (insn);
15685 case TYPE_DELAYED_CR:
15686 case TYPE_CR_LOGICAL:
15694 if (rs6000_cpu == PROCESSOR_POWER5
15695 && is_cracked_insn (insn))
15701 /* The function returns true if INSN is cracked into 2 instructions
15702 by the processor (and therefore occupies 2 issue slots). */
15705 is_cracked_insn (rtx insn)
15707 if (!insn || !INSN_P (insn)
15708 || GET_CODE (PATTERN (insn)) == USE
15709 || GET_CODE (PATTERN (insn)) == CLOBBER)
15712 if (rs6000_sched_groups)
15714 enum attr_type type = get_attr_type (insn);
15715 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
15716 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
15717 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
15718 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
15719 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
15720 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
15721 || type == TYPE_IDIV || type == TYPE_LDIV
15722 || type == TYPE_INSERT_WORD)
15729 /* The function returns true if INSN can be issued only from
15730 the branch slot. */
15733 is_branch_slot_insn (rtx insn)
15735 if (!insn || !INSN_P (insn)
15736 || GET_CODE (PATTERN (insn)) == USE
15737 || GET_CODE (PATTERN (insn)) == CLOBBER)
15740 if (rs6000_sched_groups)
15742 enum attr_type type = get_attr_type (insn);
15743 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
15751 /* A C statement (sans semicolon) to update the integer scheduling
15752 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15753 INSN earlier, reduce the priority to execute INSN later. Do not
15754 define this macro if you do not need to adjust the scheduling
15755 priorities of insns. */
15758 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
15760 /* On machines (like the 750) which have asymmetric integer units,
15761 where one integer unit can do multiply and divides and the other
15762 can't, reduce the priority of multiply/divide so it is scheduled
15763 before other integer operations. */
15766 if (! INSN_P (insn))
15769 if (GET_CODE (PATTERN (insn)) == USE)
15772 switch (rs6000_cpu_attr) {
15774 switch (get_attr_type (insn))
15781 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
15782 priority, priority);
15783 if (priority >= 0 && priority < 0x01000000)
15790 if (is_dispatch_slot_restricted (insn)
15791 && reload_completed
15792 && current_sched_info->sched_max_insns_priority
15793 && rs6000_sched_restricted_insns_priority)
15796 /* Prioritize insns that can be dispatched only in the first
15798 if (rs6000_sched_restricted_insns_priority == 1)
15799 /* Attach highest priority to insn. This means that in
15800 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15801 precede 'priority' (critical path) considerations. */
15802 return current_sched_info->sched_max_insns_priority;
15803 else if (rs6000_sched_restricted_insns_priority == 2)
15804 /* Increase priority of insn by a minimal amount. This means that in
15805 haifa-sched.c:ready_sort(), only 'priority' (critical path)
15806 considerations precede dispatch-slot restriction considerations. */
15807 return (priority + 1);
15813 /* Return how many instructions the machine can issue per cycle. */
15816 rs6000_issue_rate (void)
15818 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15819 if (!reload_completed)
15822 switch (rs6000_cpu_attr) {
15823 case CPU_RIOS1: /* ? */
15825 case CPU_PPC601: /* ? */
15848 /* Return how many instructions to look ahead for better insn
15852 rs6000_use_sched_lookahead (void)
15854 if (rs6000_cpu_attr == CPU_PPC8540)
15859 /* Determine is PAT refers to memory. */
15862 is_mem_ref (rtx pat)
15868 if (GET_CODE (pat) == MEM)
15871 /* Recursively process the pattern. */
15872 fmt = GET_RTX_FORMAT (GET_CODE (pat));
15874 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
15877 ret |= is_mem_ref (XEXP (pat, i));
15878 else if (fmt[i] == 'E')
15879 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
15880 ret |= is_mem_ref (XVECEXP (pat, i, j));
15886 /* Determine if PAT is a PATTERN of a load insn. */
15889 is_load_insn1 (rtx pat)
15891 if (!pat || pat == NULL_RTX)
15894 if (GET_CODE (pat) == SET)
15895 return is_mem_ref (SET_SRC (pat));
15897 if (GET_CODE (pat) == PARALLEL)
15901 for (i = 0; i < XVECLEN (pat, 0); i++)
15902 if (is_load_insn1 (XVECEXP (pat, 0, i)))
15909 /* Determine if INSN loads from memory. */
15912 is_load_insn (rtx insn)
15914 if (!insn || !INSN_P (insn))
15917 if (GET_CODE (insn) == CALL_INSN)
15920 return is_load_insn1 (PATTERN (insn));
15923 /* Determine if PAT is a PATTERN of a store insn. */
15926 is_store_insn1 (rtx pat)
15928 if (!pat || pat == NULL_RTX)
15931 if (GET_CODE (pat) == SET)
15932 return is_mem_ref (SET_DEST (pat));
15934 if (GET_CODE (pat) == PARALLEL)
15938 for (i = 0; i < XVECLEN (pat, 0); i++)
15939 if (is_store_insn1 (XVECEXP (pat, 0, i)))
15946 /* Determine if INSN stores to memory. */
15949 is_store_insn (rtx insn)
15951 if (!insn || !INSN_P (insn))
15954 return is_store_insn1 (PATTERN (insn));
15957 /* Returns whether the dependence between INSN and NEXT is considered
15958 costly by the given target. */
15961 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
15964 /* If the flag is not enabled - no dependence is considered costly;
15965 allow all dependent insns in the same group.
15966 This is the most aggressive option. */
15967 if (rs6000_sched_costly_dep == no_dep_costly)
15970 /* If the flag is set to 1 - a dependence is always considered costly;
15971 do not allow dependent instructions in the same group.
15972 This is the most conservative option. */
15973 if (rs6000_sched_costly_dep == all_deps_costly)
15976 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15977 && is_load_insn (next)
15978 && is_store_insn (insn))
15979 /* Prevent load after store in the same group. */
15982 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15983 && is_load_insn (next)
15984 && is_store_insn (insn)
15985 && (!link || (int) REG_NOTE_KIND (link) == 0))
15986 /* Prevent load after store in the same group if it is a true
15990 /* The flag is set to X; dependences with latency >= X are considered costly,
15991 and will not be scheduled in the same group. */
15992 if (rs6000_sched_costly_dep <= max_dep_latency
15993 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15999 /* Return the next insn after INSN that is found before TAIL is reached,
16000 skipping any "non-active" insns - insns that will not actually occupy
16001 an issue slot. Return NULL_RTX if such an insn is not found. */
16004 get_next_active_insn (rtx insn, rtx tail)
16008 if (!insn || insn == tail)
16011 next_insn = NEXT_INSN (insn);
16014 && next_insn != tail
16015 && (GET_CODE (next_insn) == NOTE
16016 || GET_CODE (PATTERN (next_insn)) == USE
16017 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
16019 next_insn = NEXT_INSN (next_insn);
16022 if (!next_insn || next_insn == tail)
16028 /* Return whether the presence of INSN causes a dispatch group termination
16029 of group WHICH_GROUP.
16031 If WHICH_GROUP == current_group, this function will return true if INSN
16032 causes the termination of the current group (i.e, the dispatch group to
16033 which INSN belongs). This means that INSN will be the last insn in the
16034 group it belongs to.
16036 If WHICH_GROUP == previous_group, this function will return true if INSN
16037 causes the termination of the previous group (i.e, the dispatch group that
16038 precedes the group to which INSN belongs). This means that INSN will be
16039 the first insn in the group it belongs to). */
16042 insn_terminates_group_p (rtx insn, enum group_termination which_group)
16044 enum attr_type type;
16049 type = get_attr_type (insn);
16051 if (is_microcoded_insn (insn))
16054 if (which_group == current_group)
16056 if (is_branch_slot_insn (insn))
16060 else if (which_group == previous_group)
16062 if (is_dispatch_slot_restricted (insn))
16070 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16071 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16074 is_costly_group (rtx *group_insns, rtx next_insn)
16079 int issue_rate = rs6000_issue_rate ();
16081 for (i = 0; i < issue_rate; i++)
16083 rtx insn = group_insns[i];
16086 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
16088 rtx next = XEXP (link, 0);
16089 if (next == next_insn)
16091 cost = insn_cost (insn, link, next_insn);
16092 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16101 /* Utility of the function redefine_groups.
16102 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16103 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16104 to keep it "far" (in a separate group) from GROUP_INSNS, following
16105 one of the following schemes, depending on the value of the flag
16106 -minsert_sched_nops = X:
16107 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16108 in order to force NEXT_INSN into a separate group.
16109 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16110 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16111 insertion (has a group just ended, how many vacant issue slots remain in the
16112 last group, and how many dispatch groups were encountered so far). */
16115 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16116 rtx next_insn, bool *group_end, int can_issue_more,
16121 int issue_rate = rs6000_issue_rate ();
16122 bool end = *group_end;
16125 if (next_insn == NULL_RTX)
16126 return can_issue_more;
16128 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16129 return can_issue_more;
16131 force = is_costly_group (group_insns, next_insn);
16133 return can_issue_more;
16135 if (sched_verbose > 6)
16136 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
16137 *group_count ,can_issue_more);
16139 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16142 can_issue_more = 0;
16144 /* Since only a branch can be issued in the last issue_slot, it is
16145 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16146 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16147 in this case the last nop will start a new group and the branch
16148 will be forced to the new group. */
16149 if (can_issue_more && !is_branch_slot_insn (next_insn))
16152 while (can_issue_more > 0)
16155 emit_insn_before (nop, next_insn);
16163 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16165 int n_nops = rs6000_sched_insert_nops;
16167 /* Nops can't be issued from the branch slot, so the effective
16168 issue_rate for nops is 'issue_rate - 1'. */
16169 if (can_issue_more == 0)
16170 can_issue_more = issue_rate;
16172 if (can_issue_more == 0)
16174 can_issue_more = issue_rate - 1;
16177 for (i = 0; i < issue_rate; i++)
16179 group_insns[i] = 0;
16186 emit_insn_before (nop, next_insn);
16187 if (can_issue_more == issue_rate - 1) /* new group begins */
16190 if (can_issue_more == 0)
16192 can_issue_more = issue_rate - 1;
16195 for (i = 0; i < issue_rate; i++)
16197 group_insns[i] = 0;
16203 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16206 /* Is next_insn going to start a new group? */
16209 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16210 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16211 || (can_issue_more < issue_rate &&
16212 insn_terminates_group_p (next_insn, previous_group)));
16213 if (*group_end && end)
16216 if (sched_verbose > 6)
16217 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16218 *group_count, can_issue_more);
16219 return can_issue_more;
16222 return can_issue_more;
16225 /* This function tries to synch the dispatch groups that the compiler "sees"
16226 with the dispatch groups that the processor dispatcher is expected to
16227 form in practice. It tries to achieve this synchronization by forcing the
16228 estimated processor grouping on the compiler (as opposed to the function
16229 'pad_goups' which tries to force the scheduler's grouping on the processor).
16231 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16232 examines the (estimated) dispatch groups that will be formed by the processor
16233 dispatcher. It marks these group boundaries to reflect the estimated
16234 processor grouping, overriding the grouping that the scheduler had marked.
16235 Depending on the value of the flag '-minsert-sched-nops' this function can
16236 force certain insns into separate groups or force a certain distance between
16237 them by inserting nops, for example, if there exists a "costly dependence"
16240 The function estimates the group boundaries that the processor will form as
16241 follows: It keeps track of how many vacant issue slots are available after
16242 each insn. A subsequent insn will start a new group if one of the following
16244 - no more vacant issue slots remain in the current dispatch group.
16245 - only the last issue slot, which is the branch slot, is vacant, but the next
16246 insn is not a branch.
16247 - only the last 2 or less issue slots, including the branch slot, are vacant,
16248 which means that a cracked insn (which occupies two issue slots) can't be
16249 issued in this group.
16250 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16251 start a new group. */
16254 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16256 rtx insn, next_insn;
16258 int can_issue_more;
16261 int group_count = 0;
16265 issue_rate = rs6000_issue_rate ();
16266 group_insns = alloca (issue_rate * sizeof (rtx));
16267 for (i = 0; i < issue_rate; i++)
16269 group_insns[i] = 0;
16271 can_issue_more = issue_rate;
16273 insn = get_next_active_insn (prev_head_insn, tail);
16276 while (insn != NULL_RTX)
16278 slot = (issue_rate - can_issue_more);
16279 group_insns[slot] = insn;
16281 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16282 if (insn_terminates_group_p (insn, current_group))
16283 can_issue_more = 0;
16285 next_insn = get_next_active_insn (insn, tail);
16286 if (next_insn == NULL_RTX)
16287 return group_count + 1;
16289 /* Is next_insn going to start a new group? */
16291 = (can_issue_more == 0
16292 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16293 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16294 || (can_issue_more < issue_rate &&
16295 insn_terminates_group_p (next_insn, previous_group)));
16297 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16298 next_insn, &group_end, can_issue_more,
16304 can_issue_more = 0;
16305 for (i = 0; i < issue_rate; i++)
16307 group_insns[i] = 0;
16311 if (GET_MODE (next_insn) == TImode && can_issue_more)
16312 PUT_MODE (next_insn, VOIDmode);
16313 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16314 PUT_MODE (next_insn, TImode);
16317 if (can_issue_more == 0)
16318 can_issue_more = issue_rate;
16321 return group_count;
16324 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16325 dispatch group boundaries that the scheduler had marked. Pad with nops
16326 any dispatch groups which have vacant issue slots, in order to force the
16327 scheduler's grouping on the processor dispatcher. The function
16328 returns the number of dispatch groups found. */
16331 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16333 rtx insn, next_insn;
16336 int can_issue_more;
16338 int group_count = 0;
16340 /* Initialize issue_rate. */
16341 issue_rate = rs6000_issue_rate ();
16342 can_issue_more = issue_rate;
16344 insn = get_next_active_insn (prev_head_insn, tail);
16345 next_insn = get_next_active_insn (insn, tail);
16347 while (insn != NULL_RTX)
16350 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16352 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
16354 if (next_insn == NULL_RTX)
16359 /* If the scheduler had marked group termination at this location
16360 (between insn and next_indn), and neither insn nor next_insn will
16361 force group termination, pad the group with nops to force group
16364 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
16365 && !insn_terminates_group_p (insn, current_group)
16366 && !insn_terminates_group_p (next_insn, previous_group))
16368 if (!is_branch_slot_insn (next_insn))
16371 while (can_issue_more)
16374 emit_insn_before (nop, next_insn);
16379 can_issue_more = issue_rate;
16384 next_insn = get_next_active_insn (insn, tail);
16387 return group_count;
16390 /* The following function is called at the end of scheduling BB.
16391 After reload, it inserts nops at insn group bundling. */
16394 rs6000_sched_finish (FILE *dump, int sched_verbose)
16399 fprintf (dump, "=== Finishing schedule.\n");
16401 if (reload_completed && rs6000_sched_groups)
16403 if (rs6000_sched_insert_nops == sched_finish_none)
16406 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
16407 n_groups = pad_groups (dump, sched_verbose,
16408 current_sched_info->prev_head,
16409 current_sched_info->next_tail);
16411 n_groups = redefine_groups (dump, sched_verbose,
16412 current_sched_info->prev_head,
16413 current_sched_info->next_tail);
16415 if (sched_verbose >= 6)
16417 fprintf (dump, "ngroups = %d\n", n_groups);
16418 print_rtl (dump, current_sched_info->prev_head);
16419 fprintf (dump, "Done finish_sched\n");
16424 /* Length in units of the trampoline for entering a nested function. */
16427 rs6000_trampoline_size (void)
16431 switch (DEFAULT_ABI)
16434 gcc_unreachable ();
16437 ret = (TARGET_32BIT) ? 12 : 24;
16442 ret = (TARGET_32BIT) ? 40 : 48;
16449 /* Emit RTL insns to initialize the variable parts of a trampoline.
16450 FNADDR is an RTX for the address of the function's pure code.
16451 CXT is an RTX for the static chain value for the function. */
16454 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
16456 enum machine_mode pmode = Pmode;
16457 int regsize = (TARGET_32BIT) ? 4 : 8;
16458 rtx ctx_reg = force_reg (pmode, cxt);
16460 switch (DEFAULT_ABI)
16463 gcc_unreachable ();
16465 /* Macros to shorten the code expansions below. */
16466 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16467 #define MEM_PLUS(addr,offset) \
16468 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16470 /* Under AIX, just build the 3 word function descriptor */
16473 rtx fn_reg = gen_reg_rtx (pmode);
16474 rtx toc_reg = gen_reg_rtx (pmode);
16475 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
16476 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
16477 emit_move_insn (MEM_DEREF (addr), fn_reg);
16478 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
16479 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
16483 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16486 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
16487 FALSE, VOIDmode, 4,
16489 GEN_INT (rs6000_trampoline_size ()), SImode,
16499 /* Table of valid machine attributes. */
16501 const struct attribute_spec rs6000_attribute_table[] =
16503 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16504 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
16505 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16506 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16507 #ifdef SUBTARGET_ATTRIBUTE_TABLE
16508 SUBTARGET_ATTRIBUTE_TABLE,
16510 { NULL, 0, 0, false, false, false, NULL }
16513 /* Handle the "altivec" attribute. The attribute may have
16514 arguments as follows:
16516 __attribute__((altivec(vector__)))
16517 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16518 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16520 and may appear more than once (e.g., 'vector bool char') in a
16521 given declaration. */
16524 rs6000_handle_altivec_attribute (tree *node,
16525 tree name ATTRIBUTE_UNUSED,
16527 int flags ATTRIBUTE_UNUSED,
16528 bool *no_add_attrs)
16530 tree type = *node, result = NULL_TREE;
16531 enum machine_mode mode;
16534 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
16535 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
16536 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
16539 while (POINTER_TYPE_P (type)
16540 || TREE_CODE (type) == FUNCTION_TYPE
16541 || TREE_CODE (type) == METHOD_TYPE
16542 || TREE_CODE (type) == ARRAY_TYPE)
16543 type = TREE_TYPE (type);
16545 mode = TYPE_MODE (type);
16547 /* Check for invalid AltiVec type qualifiers. */
16548 if (type == long_unsigned_type_node || type == long_integer_type_node)
16551 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
16552 else if (rs6000_warn_altivec_long)
16553 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
16555 else if (type == long_long_unsigned_type_node
16556 || type == long_long_integer_type_node)
16557 error ("use of %<long long%> in AltiVec types is invalid");
16558 else if (type == double_type_node)
16559 error ("use of %<double%> in AltiVec types is invalid");
16560 else if (type == long_double_type_node)
16561 error ("use of %<long double%> in AltiVec types is invalid");
16562 else if (type == boolean_type_node)
16563 error ("use of boolean types in AltiVec types is invalid");
16564 else if (TREE_CODE (type) == COMPLEX_TYPE)
16565 error ("use of %<complex%> in AltiVec types is invalid");
16567 switch (altivec_type)
16570 unsigned_p = TYPE_UNSIGNED (type);
16574 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
16577 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
16580 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
16582 case SFmode: result = V4SF_type_node; break;
16583 /* If the user says 'vector int bool', we may be handed the 'bool'
16584 attribute _before_ the 'vector' attribute, and so select the
16585 proper type in the 'b' case below. */
16586 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
16594 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
16595 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
16596 case QImode: case V16QImode: result = bool_V16QI_type_node;
16603 case V8HImode: result = pixel_V8HI_type_node;
16609 if (result && result != type && TYPE_READONLY (type))
16610 result = build_qualified_type (result, TYPE_QUAL_CONST);
16612 *no_add_attrs = true; /* No need to hang on to the attribute. */
16615 *node = reconstruct_complex_type (*node, result);
16620 /* AltiVec defines four built-in scalar types that serve as vector
16621 elements; we must teach the compiler how to mangle them. */
16623 static const char *
16624 rs6000_mangle_fundamental_type (tree type)
16626 if (type == bool_char_type_node) return "U6__boolc";
16627 if (type == bool_short_type_node) return "U6__bools";
16628 if (type == pixel_type_node) return "u7__pixel";
16629 if (type == bool_int_type_node) return "U6__booli";
16631 /* For all other types, use normal C++ mangling. */
16635 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16636 struct attribute_spec.handler. */
16639 rs6000_handle_longcall_attribute (tree *node, tree name,
16640 tree args ATTRIBUTE_UNUSED,
16641 int flags ATTRIBUTE_UNUSED,
16642 bool *no_add_attrs)
16644 if (TREE_CODE (*node) != FUNCTION_TYPE
16645 && TREE_CODE (*node) != FIELD_DECL
16646 && TREE_CODE (*node) != TYPE_DECL)
16648 warning (OPT_Wattributes, "%qs attribute only applies to functions",
16649 IDENTIFIER_POINTER (name));
16650 *no_add_attrs = true;
16656 /* Set longcall attributes on all functions declared when
16657 rs6000_default_long_calls is true. */
16659 rs6000_set_default_type_attributes (tree type)
16661 if (rs6000_default_long_calls
16662 && (TREE_CODE (type) == FUNCTION_TYPE
16663 || TREE_CODE (type) == METHOD_TYPE))
16664 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
16666 TYPE_ATTRIBUTES (type));
16669 /* Return a reference suitable for calling a function with the
16670 longcall attribute. */
16673 rs6000_longcall_ref (rtx call_ref)
16675 const char *call_name;
16678 if (GET_CODE (call_ref) != SYMBOL_REF)
16681 /* System V adds '.' to the internal name, so skip them. */
16682 call_name = XSTR (call_ref, 0);
16683 if (*call_name == '.')
16685 while (*call_name == '.')
16688 node = get_identifier (call_name);
16689 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
16692 return force_reg (Pmode, call_ref);
16695 #ifdef USING_ELFOS_H
16697 /* A C statement or statements to switch to the appropriate section
16698 for output of RTX in mode MODE. You can assume that RTX is some
16699 kind of constant in RTL. The argument MODE is redundant except in
16700 the case of a `const_int' rtx. Select the section by calling
16701 `text_section' or one of the alternatives for other sections.
16703 Do not define this macro if you put all constants in the read-only
16707 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
16708 unsigned HOST_WIDE_INT align)
16710 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16713 default_elf_select_rtx_section (mode, x, align);
16716 /* A C statement or statements to switch to the appropriate
16717 section for output of DECL. DECL is either a `VAR_DECL' node
16718 or a constant of some sort. RELOC indicates whether forming
16719 the initial value of DECL requires link-time relocations. */
16722 rs6000_elf_select_section (tree decl, int reloc,
16723 unsigned HOST_WIDE_INT align)
16725 /* Pretend that we're always building for a shared library when
16726 ABI_AIX, because otherwise we end up with dynamic relocations
16727 in read-only sections. This happens for function pointers,
16728 references to vtables in typeinfo, and probably other cases. */
16729 default_elf_select_section_1 (decl, reloc, align,
16730 flag_pic || DEFAULT_ABI == ABI_AIX);
16733 /* A C statement to build up a unique section name, expressed as a
16734 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16735 RELOC indicates whether the initial value of EXP requires
16736 link-time relocations. If you do not define this macro, GCC will use
16737 the symbol name prefixed by `.' as the section name. Note - this
16738 macro can now be called for uninitialized data items as well as
16739 initialized data and functions. */
16742 rs6000_elf_unique_section (tree decl, int reloc)
16744 /* As above, pretend that we're always building for a shared library
16745 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16746 default_unique_section_1 (decl, reloc,
16747 flag_pic || DEFAULT_ABI == ABI_AIX);
16750 /* For a SYMBOL_REF, set generic flags and then perform some
16751 target-specific processing.
16753 When the AIX ABI is requested on a non-AIX system, replace the
16754 function name with the real name (with a leading .) rather than the
16755 function descriptor name. This saves a lot of overriding code to
16756 read the prefixes. */
16759 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
16761 default_encode_section_info (decl, rtl, first);
16764 && TREE_CODE (decl) == FUNCTION_DECL
16766 && DEFAULT_ABI == ABI_AIX)
16768 rtx sym_ref = XEXP (rtl, 0);
16769 size_t len = strlen (XSTR (sym_ref, 0));
16770 char *str = alloca (len + 2);
16772 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
16773 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
16778 rs6000_elf_in_small_data_p (tree decl)
16780 if (rs6000_sdata == SDATA_NONE)
16783 /* We want to merge strings, so we never consider them small data. */
16784 if (TREE_CODE (decl) == STRING_CST)
16787 /* Functions are never in the small data area. */
16788 if (TREE_CODE (decl) == FUNCTION_DECL)
16791 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
16793 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
16794 if (strcmp (section, ".sdata") == 0
16795 || strcmp (section, ".sdata2") == 0
16796 || strcmp (section, ".sbss") == 0
16797 || strcmp (section, ".sbss2") == 0
16798 || strcmp (section, ".PPC.EMB.sdata0") == 0
16799 || strcmp (section, ".PPC.EMB.sbss0") == 0)
16804 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
16807 && (unsigned HOST_WIDE_INT) size <= g_switch_value
16808 /* If it's not public, and we're not going to reference it there,
16809 there's no need to put it in the small data section. */
16810 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
16817 #endif /* USING_ELFOS_H */
16820 /* Return a REG that occurs in ADDR with coefficient 1.
16821 ADDR can be effectively incremented by incrementing REG.
16823 r0 is special and we must not select it as an address
16824 register by this routine since our caller will try to
16825 increment the returned register via an "la" instruction. */
16828 find_addr_reg (rtx addr)
16830 while (GET_CODE (addr) == PLUS)
16832 if (GET_CODE (XEXP (addr, 0)) == REG
16833 && REGNO (XEXP (addr, 0)) != 0)
16834 addr = XEXP (addr, 0);
16835 else if (GET_CODE (XEXP (addr, 1)) == REG
16836 && REGNO (XEXP (addr, 1)) != 0)
16837 addr = XEXP (addr, 1);
16838 else if (CONSTANT_P (XEXP (addr, 0)))
16839 addr = XEXP (addr, 1);
16840 else if (CONSTANT_P (XEXP (addr, 1)))
16841 addr = XEXP (addr, 0);
16843 gcc_unreachable ();
16845 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
16850 rs6000_fatal_bad_address (rtx op)
16852 fatal_insn ("bad address", op);
16857 static tree branch_island_list = 0;
16859 /* Remember to generate a branch island for far calls to the given
16863 add_compiler_branch_island (tree label_name, tree function_name,
16866 tree branch_island = build_tree_list (function_name, label_name);
16867 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
16868 TREE_CHAIN (branch_island) = branch_island_list;
16869 branch_island_list = branch_island;
16872 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
16873 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
16874 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
16875 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16877 /* Generate far-jump branch islands for everything on the
16878 branch_island_list. Invoked immediately after the last instruction
16879 of the epilogue has been emitted; the branch-islands must be
16880 appended to, and contiguous with, the function body. Mach-O stubs
16881 are generated in machopic_output_stub(). */
16884 macho_branch_islands (void)
16887 tree branch_island;
16889 for (branch_island = branch_island_list;
16891 branch_island = TREE_CHAIN (branch_island))
16893 const char *label =
16894 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
16896 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
16897 char name_buf[512];
16898 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16899 if (name[0] == '*' || name[0] == '&')
16900 strcpy (name_buf, name+1);
16904 strcpy (name_buf+1, name);
16906 strcpy (tmp_buf, "\n");
16907 strcat (tmp_buf, label);
16908 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16909 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16910 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
16911 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16914 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
16915 strcat (tmp_buf, label);
16916 strcat (tmp_buf, "_pic\n");
16917 strcat (tmp_buf, label);
16918 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
16920 strcat (tmp_buf, "\taddis r11,r11,ha16(");
16921 strcat (tmp_buf, name_buf);
16922 strcat (tmp_buf, " - ");
16923 strcat (tmp_buf, label);
16924 strcat (tmp_buf, "_pic)\n");
16926 strcat (tmp_buf, "\tmtlr r0\n");
16928 strcat (tmp_buf, "\taddi r12,r11,lo16(");
16929 strcat (tmp_buf, name_buf);
16930 strcat (tmp_buf, " - ");
16931 strcat (tmp_buf, label);
16932 strcat (tmp_buf, "_pic)\n");
16934 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
16938 strcat (tmp_buf, ":\nlis r12,hi16(");
16939 strcat (tmp_buf, name_buf);
16940 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
16941 strcat (tmp_buf, name_buf);
16942 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
16944 output_asm_insn (tmp_buf, 0);
16945 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16946 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16947 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
16948 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16951 branch_island_list = 0;
16954 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16955 already there or not. */
16958 no_previous_def (tree function_name)
16960 tree branch_island;
16961 for (branch_island = branch_island_list;
16963 branch_island = TREE_CHAIN (branch_island))
16964 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16969 /* GET_PREV_LABEL gets the label name from the previous definition of
16973 get_prev_label (tree function_name)
16975 tree branch_island;
16976 for (branch_island = branch_island_list;
16978 branch_island = TREE_CHAIN (branch_island))
16979 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16980 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16984 /* INSN is either a function call or a millicode call. It may have an
16985 unconditional jump in its delay slot.
16987 CALL_DEST is the routine we are calling. */
16990 output_call (rtx insn, rtx *operands, int dest_operand_number,
16991 int cookie_operand_number)
16993 static char buf[256];
16994 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16995 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16998 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
17000 if (no_previous_def (funname))
17002 int line_number = 0;
17003 rtx label_rtx = gen_label_rtx ();
17004 char *label_buf, temp_buf[256];
17005 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17006 CODE_LABEL_NUMBER (label_rtx));
17007 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17008 labelname = get_identifier (label_buf);
17009 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17011 line_number = NOTE_LINE_NUMBER (insn);
17012 add_compiler_branch_island (labelname, funname, line_number);
17015 labelname = get_prev_label (funname);
17017 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17018 instruction will reach 'foo', otherwise link as 'bl L42'".
17019 "L42" should be a 'branch island', that will do a far jump to
17020 'foo'. Branch islands are generated in
17021 macho_branch_islands(). */
17022 sprintf (buf, "jbsr %%z%d,%.246s",
17023 dest_operand_number, IDENTIFIER_POINTER (labelname));
17026 sprintf (buf, "bl %%z%d", dest_operand_number);
17030 /* Generate PIC and indirect symbol stubs. */
17033 machopic_output_stub (FILE *file, const char *symb, const char *stub)
17035 unsigned int length;
17036 char *symbol_name, *lazy_ptr_name;
17037 char *local_label_0;
17038 static int label = 0;
17040 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17041 symb = (*targetm.strip_name_encoding) (symb);
17044 length = strlen (symb);
17045 symbol_name = alloca (length + 32);
17046 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17048 lazy_ptr_name = alloca (length + 32);
17049 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17052 machopic_picsymbol_stub1_section ();
17054 machopic_symbol_stub1_section ();
17058 fprintf (file, "\t.align 5\n");
17060 fprintf (file, "%s:\n", stub);
17061 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17064 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
17065 sprintf (local_label_0, "\"L%011d$spb\"", label);
17067 fprintf (file, "\tmflr r0\n");
17068 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17069 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17070 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17071 lazy_ptr_name, local_label_0);
17072 fprintf (file, "\tmtlr r0\n");
17073 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17074 (TARGET_64BIT ? "ldu" : "lwzu"),
17075 lazy_ptr_name, local_label_0);
17076 fprintf (file, "\tmtctr r12\n");
17077 fprintf (file, "\tbctr\n");
17081 fprintf (file, "\t.align 4\n");
17083 fprintf (file, "%s:\n", stub);
17084 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17086 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
17087 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17088 (TARGET_64BIT ? "ldu" : "lwzu"),
17090 fprintf (file, "\tmtctr r12\n");
17091 fprintf (file, "\tbctr\n");
17094 machopic_lazy_symbol_ptr_section ();
17095 fprintf (file, "%s:\n", lazy_ptr_name);
17096 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17097 fprintf (file, "%sdyld_stub_binding_helper\n",
17098 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
17101 /* Legitimize PIC addresses. If the address is already
17102 position-independent, we return ORIG. Newly generated
17103 position-independent addresses go into a reg. This is REG if non
17104 zero, otherwise we allocate register(s) as necessary. */
17106 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
17109 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
17114 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17115 reg = gen_reg_rtx (Pmode);
17117 if (GET_CODE (orig) == CONST)
17121 if (GET_CODE (XEXP (orig, 0)) == PLUS
17122 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17125 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
17127 /* Use a different reg for the intermediate value, as
17128 it will be marked UNCHANGING. */
17129 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17130 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17133 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17136 if (GET_CODE (offset) == CONST_INT)
17138 if (SMALL_INT (offset))
17139 return plus_constant (base, INTVAL (offset));
17140 else if (! reload_in_progress && ! reload_completed)
17141 offset = force_reg (Pmode, offset);
17144 rtx mem = force_const_mem (Pmode, orig);
17145 return machopic_legitimize_pic_address (mem, Pmode, reg);
17148 return gen_rtx_PLUS (Pmode, base, offset);
17151 /* Fall back on generic machopic code. */
17152 return machopic_legitimize_pic_address (orig, mode, reg);
17155 /* This is just a placeholder to make linking work without having to
17156 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17157 ever needed for Darwin (not too likely!) this would have to get a
17158 real definition. */
17165 /* Output a .machine directive for the Darwin assembler, and call
17166 the generic start_file routine. */
17169 rs6000_darwin_file_start (void)
17171 static const struct
17177 { "ppc64", "ppc64", MASK_64BIT },
17178 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17179 { "power4", "ppc970", 0 },
17180 { "G5", "ppc970", 0 },
17181 { "7450", "ppc7450", 0 },
17182 { "7400", "ppc7400", MASK_ALTIVEC },
17183 { "G4", "ppc7400", 0 },
17184 { "750", "ppc750", 0 },
17185 { "740", "ppc750", 0 },
17186 { "G3", "ppc750", 0 },
17187 { "604e", "ppc604e", 0 },
17188 { "604", "ppc604", 0 },
17189 { "603e", "ppc603", 0 },
17190 { "603", "ppc603", 0 },
17191 { "601", "ppc601", 0 },
17192 { NULL, "ppc", 0 } };
17193 const char *cpu_id = "";
17196 rs6000_file_start ();
17198 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17199 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17200 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17201 && rs6000_select[i].string[0] != '\0')
17202 cpu_id = rs6000_select[i].string;
17204 /* Look through the mapping array. Pick the first name that either
17205 matches the argument, has a bit set in IF_SET that is also set
17206 in the target flags, or has a NULL name. */
17209 while (mapping[i].arg != NULL
17210 && strcmp (mapping[i].arg, cpu_id) != 0
17211 && (mapping[i].if_set & target_flags) == 0)
17214 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17217 #endif /* TARGET_MACHO */
17220 static unsigned int
17221 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17223 return default_section_type_flags_1 (decl, name, reloc,
17224 flag_pic || DEFAULT_ABI == ABI_AIX);
17227 /* Record an element in the table of global constructors. SYMBOL is
17228 a SYMBOL_REF of the function to be called; PRIORITY is a number
17229 between 0 and MAX_INIT_PRIORITY.
17231 This differs from default_named_section_asm_out_constructor in
17232 that we have special handling for -mrelocatable. */
17235 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17237 const char *section = ".ctors";
17240 if (priority != DEFAULT_INIT_PRIORITY)
17242 sprintf (buf, ".ctors.%.5u",
17243 /* Invert the numbering so the linker puts us in the proper
17244 order; constructors are run from right to left, and the
17245 linker sorts in increasing order. */
17246 MAX_INIT_PRIORITY - priority);
17250 named_section_flags (section, SECTION_WRITE);
17251 assemble_align (POINTER_SIZE);
17253 if (TARGET_RELOCATABLE)
17255 fputs ("\t.long (", asm_out_file);
17256 output_addr_const (asm_out_file, symbol);
17257 fputs (")@fixup\n", asm_out_file);
17260 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17264 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17266 const char *section = ".dtors";
17269 if (priority != DEFAULT_INIT_PRIORITY)
17271 sprintf (buf, ".dtors.%.5u",
17272 /* Invert the numbering so the linker puts us in the proper
17273 order; constructors are run from right to left, and the
17274 linker sorts in increasing order. */
17275 MAX_INIT_PRIORITY - priority);
17279 named_section_flags (section, SECTION_WRITE);
17280 assemble_align (POINTER_SIZE);
17282 if (TARGET_RELOCATABLE)
17284 fputs ("\t.long (", asm_out_file);
17285 output_addr_const (asm_out_file, symbol);
17286 fputs (")@fixup\n", asm_out_file);
17289 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17293 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17297 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17298 ASM_OUTPUT_LABEL (file, name);
17299 fputs (DOUBLE_INT_ASM_OP, file);
17300 rs6000_output_function_entry (file, name);
17301 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17304 fputs ("\t.size\t", file);
17305 assemble_name (file, name);
17306 fputs (",24\n\t.type\t.", file);
17307 assemble_name (file, name);
17308 fputs (",@function\n", file);
17309 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17311 fputs ("\t.globl\t.", file);
17312 assemble_name (file, name);
17317 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17318 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17319 rs6000_output_function_entry (file, name);
17320 fputs (":\n", file);
17324 if (TARGET_RELOCATABLE
17325 && !TARGET_SECURE_PLT
17326 && (get_pool_size () != 0 || current_function_profile)
17331 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17333 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17334 fprintf (file, "\t.long ");
17335 assemble_name (file, buf);
17337 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17338 assemble_name (file, buf);
17342 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17343 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17345 if (DEFAULT_ABI == ABI_AIX)
17347 const char *desc_name, *orig_name;
17349 orig_name = (*targetm.strip_name_encoding) (name);
17350 desc_name = orig_name;
17351 while (*desc_name == '.')
17354 if (TREE_PUBLIC (decl))
17355 fprintf (file, "\t.globl %s\n", desc_name);
17357 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17358 fprintf (file, "%s:\n", desc_name);
17359 fprintf (file, "\t.long %s\n", orig_name);
17360 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
17361 if (DEFAULT_ABI == ABI_AIX)
17362 fputs ("\t.long 0\n", file);
17363 fprintf (file, "\t.previous\n");
17365 ASM_OUTPUT_LABEL (file, name);
17369 rs6000_elf_end_indicate_exec_stack (void)
17372 file_end_indicate_exec_stack ();
17378 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
17380 fputs (GLOBAL_ASM_OP, stream);
17381 RS6000_OUTPUT_BASENAME (stream, name);
17382 putc ('\n', stream);
17386 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
17387 tree decl ATTRIBUTE_UNUSED)
17390 static const char * const suffix[3] = { "PR", "RO", "RW" };
17392 if (flags & SECTION_CODE)
17394 else if (flags & SECTION_WRITE)
17399 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
17400 (flags & SECTION_CODE) ? "." : "",
17401 name, suffix[smclass], flags & SECTION_ENTSIZE);
17405 rs6000_xcoff_select_section (tree decl, int reloc,
17406 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17408 if (decl_readonly_section_1 (decl, reloc, 1))
17410 if (TREE_PUBLIC (decl))
17411 read_only_data_section ();
17413 read_only_private_data_section ();
17417 if (TREE_PUBLIC (decl))
17420 private_data_section ();
17425 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
17429 /* Use select_section for private and uninitialized data. */
17430 if (!TREE_PUBLIC (decl)
17431 || DECL_COMMON (decl)
17432 || DECL_INITIAL (decl) == NULL_TREE
17433 || DECL_INITIAL (decl) == error_mark_node
17434 || (flag_zero_initialized_in_bss
17435 && initializer_zerop (DECL_INITIAL (decl))))
17438 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
17439 name = (*targetm.strip_name_encoding) (name);
17440 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
17443 /* Select section for constant in constant pool.
17445 On RS/6000, all constants are in the private read-only data area.
17446 However, if this is being placed in the TOC it must be output as a
17450 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
17451 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17453 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17456 read_only_private_data_section ();
17459 /* Remove any trailing [DS] or the like from the symbol name. */
17461 static const char *
17462 rs6000_xcoff_strip_name_encoding (const char *name)
17467 len = strlen (name);
17468 if (name[len - 1] == ']')
17469 return ggc_alloc_string (name, len - 4);
17474 /* Section attributes. AIX is always PIC. */
17476 static unsigned int
17477 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
17479 unsigned int align;
17480 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
17482 /* Align to at least UNIT size. */
17483 if (flags & SECTION_CODE)
17484 align = MIN_UNITS_PER_WORD;
17486 /* Increase alignment of large objects if not already stricter. */
17487 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
17488 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
17489 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
17491 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
17494 /* Output at beginning of assembler file.
17496 Initialize the section names for the RS/6000 at this point.
17498 Specify filename, including full path, to assembler.
17500 We want to go into the TOC section so at least one .toc will be emitted.
17501 Also, in order to output proper .bs/.es pairs, we need at least one static
17502 [RW] section emitted.
17504 Finally, declare mcount when profiling to make the assembler happy. */
17507 rs6000_xcoff_file_start (void)
17509 rs6000_gen_section_name (&xcoff_bss_section_name,
17510 main_input_filename, ".bss_");
17511 rs6000_gen_section_name (&xcoff_private_data_section_name,
17512 main_input_filename, ".rw_");
17513 rs6000_gen_section_name (&xcoff_read_only_section_name,
17514 main_input_filename, ".ro_");
17516 fputs ("\t.file\t", asm_out_file);
17517 output_quoted_string (asm_out_file, main_input_filename);
17518 fputc ('\n', asm_out_file);
17519 if (write_symbols != NO_DEBUG)
17520 private_data_section ();
17523 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
17524 rs6000_file_start ();
17527 /* Output at end of assembler file.
17528 On the RS/6000, referencing data should automatically pull in text. */
17531 rs6000_xcoff_file_end (void)
17534 fputs ("_section_.text:\n", asm_out_file);
17536 fputs (TARGET_32BIT
17537 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17540 #endif /* TARGET_XCOFF */
17542 /* Compute a (partial) cost for rtx X. Return true if the complete
17543 cost has been computed, and false if subexpressions should be
17544 scanned. In either case, *TOTAL contains the cost result. */
17547 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
17549 enum machine_mode mode = GET_MODE (x);
17553 /* On the RS/6000, if it is valid in the insn, it is free. */
17555 if (((outer_code == SET
17556 || outer_code == PLUS
17557 || outer_code == MINUS)
17558 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17559 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17560 || (outer_code == AND
17561 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17562 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17563 mode == SImode ? 'L' : 'J'))
17564 || mask_operand (x, VOIDmode)))
17565 || ((outer_code == IOR || outer_code == XOR)
17566 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17567 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17568 mode == SImode ? 'L' : 'J'))))
17569 || outer_code == ASHIFT
17570 || outer_code == ASHIFTRT
17571 || outer_code == LSHIFTRT
17572 || outer_code == ROTATE
17573 || outer_code == ROTATERT
17574 || outer_code == ZERO_EXTRACT
17575 || (outer_code == MULT
17576 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17577 || ((outer_code == DIV || outer_code == UDIV
17578 || outer_code == MOD || outer_code == UMOD)
17579 && exact_log2 (INTVAL (x)) >= 0)
17580 || (outer_code == COMPARE
17581 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17582 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
17583 || (outer_code == EQ
17584 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17585 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17586 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17587 mode == SImode ? 'L' : 'J'))))
17588 || (outer_code == GTU
17589 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17590 || (outer_code == LTU
17591 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
17596 else if ((outer_code == PLUS
17597 && reg_or_add_cint_operand (x, VOIDmode))
17598 || (outer_code == MINUS
17599 && reg_or_sub_cint_operand (x, VOIDmode))
17600 || ((outer_code == SET
17601 || outer_code == IOR
17602 || outer_code == XOR)
17604 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
17606 *total = COSTS_N_INSNS (1);
17613 && ((outer_code == AND
17614 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17615 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17616 || mask_operand (x, DImode)))
17617 || ((outer_code == IOR || outer_code == XOR)
17618 && CONST_DOUBLE_HIGH (x) == 0
17619 && (CONST_DOUBLE_LOW (x)
17620 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
17625 else if (mode == DImode
17626 && (outer_code == SET
17627 || outer_code == IOR
17628 || outer_code == XOR)
17629 && CONST_DOUBLE_HIGH (x) == 0)
17631 *total = COSTS_N_INSNS (1);
17640 /* When optimizing for size, MEM should be slightly more expensive
17641 than generating address, e.g., (plus (reg) (const)).
17642 L1 cache latency is about two instructions. */
17643 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17651 if (mode == DFmode)
17653 if (GET_CODE (XEXP (x, 0)) == MULT)
17655 /* FNMA accounted in outer NEG. */
17656 if (outer_code == NEG)
17657 *total = rs6000_cost->dmul - rs6000_cost->fp;
17659 *total = rs6000_cost->dmul;
17662 *total = rs6000_cost->fp;
17664 else if (mode == SFmode)
17666 /* FNMA accounted in outer NEG. */
17667 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17670 *total = rs6000_cost->fp;
17672 else if (GET_CODE (XEXP (x, 0)) == MULT)
17674 /* The rs6000 doesn't have shift-and-add instructions. */
17675 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
17676 *total += COSTS_N_INSNS (1);
17679 *total = COSTS_N_INSNS (1);
17683 if (mode == DFmode)
17685 if (GET_CODE (XEXP (x, 0)) == MULT)
17687 /* FNMA accounted in outer NEG. */
17688 if (outer_code == NEG)
17691 *total = rs6000_cost->dmul;
17694 *total = rs6000_cost->fp;
17696 else if (mode == SFmode)
17698 /* FNMA accounted in outer NEG. */
17699 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17702 *total = rs6000_cost->fp;
17704 else if (GET_CODE (XEXP (x, 0)) == MULT)
17706 /* The rs6000 doesn't have shift-and-sub instructions. */
17707 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
17708 *total += COSTS_N_INSNS (1);
17711 *total = COSTS_N_INSNS (1);
17715 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17716 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
17718 if (INTVAL (XEXP (x, 1)) >= -256
17719 && INTVAL (XEXP (x, 1)) <= 255)
17720 *total = rs6000_cost->mulsi_const9;
17722 *total = rs6000_cost->mulsi_const;
17724 /* FMA accounted in outer PLUS/MINUS. */
17725 else if ((mode == DFmode || mode == SFmode)
17726 && (outer_code == PLUS || outer_code == MINUS))
17728 else if (mode == DFmode)
17729 *total = rs6000_cost->dmul;
17730 else if (mode == SFmode)
17731 *total = rs6000_cost->fp;
17732 else if (mode == DImode)
17733 *total = rs6000_cost->muldi;
17735 *total = rs6000_cost->mulsi;
17740 if (FLOAT_MODE_P (mode))
17742 *total = mode == DFmode ? rs6000_cost->ddiv
17743 : rs6000_cost->sdiv;
17750 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17751 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
17753 if (code == DIV || code == MOD)
17755 *total = COSTS_N_INSNS (2);
17758 *total = COSTS_N_INSNS (1);
17762 if (GET_MODE (XEXP (x, 1)) == DImode)
17763 *total = rs6000_cost->divdi;
17765 *total = rs6000_cost->divsi;
17767 /* Add in shift and subtract for MOD. */
17768 if (code == MOD || code == UMOD)
17769 *total += COSTS_N_INSNS (2);
17773 *total = COSTS_N_INSNS (4);
17777 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
17788 *total = COSTS_N_INSNS (1);
17796 /* Handle mul_highpart. */
17797 if (outer_code == TRUNCATE
17798 && GET_CODE (XEXP (x, 0)) == MULT)
17800 if (mode == DImode)
17801 *total = rs6000_cost->muldi;
17803 *total = rs6000_cost->mulsi;
17806 else if (outer_code == AND)
17809 *total = COSTS_N_INSNS (1);
17814 if (GET_CODE (XEXP (x, 0)) == MEM)
17817 *total = COSTS_N_INSNS (1);
17823 if (!FLOAT_MODE_P (mode))
17825 *total = COSTS_N_INSNS (1);
17831 case UNSIGNED_FLOAT:
17835 case FLOAT_TRUNCATE:
17836 *total = rs6000_cost->fp;
17840 switch (XINT (x, 1))
17843 *total = rs6000_cost->fp;
17855 *total = COSTS_N_INSNS (1);
17858 else if (FLOAT_MODE_P (mode)
17859 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
17861 *total = rs6000_cost->fp;
17869 /* Carry bit requires mode == Pmode.
17870 NEG or PLUS already counted so only add one. */
17872 && (outer_code == NEG || outer_code == PLUS))
17874 *total = COSTS_N_INSNS (1);
17877 if (outer_code == SET)
17879 if (XEXP (x, 1) == const0_rtx)
17881 *total = COSTS_N_INSNS (2);
17884 else if (mode == Pmode)
17886 *total = COSTS_N_INSNS (3);
17895 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
17897 *total = COSTS_N_INSNS (2);
17901 if (outer_code == COMPARE)
17915 /* A C expression returning the cost of moving data from a register of class
17916 CLASS1 to one of CLASS2. */
17919 rs6000_register_move_cost (enum machine_mode mode,
17920 enum reg_class from, enum reg_class to)
17922 /* Moves from/to GENERAL_REGS. */
17923 if (reg_classes_intersect_p (to, GENERAL_REGS)
17924 || reg_classes_intersect_p (from, GENERAL_REGS))
17926 if (! reg_classes_intersect_p (to, GENERAL_REGS))
17929 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
17930 return (rs6000_memory_move_cost (mode, from, 0)
17931 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
17933 /* It's more expensive to move CR_REGS than CR0_REGS because of the
17935 else if (from == CR_REGS)
17939 /* A move will cost one instruction per GPR moved. */
17940 return 2 * hard_regno_nregs[0][mode];
17943 /* Moving between two similar registers is just one instruction. */
17944 else if (reg_classes_intersect_p (to, from))
17945 return mode == TFmode ? 4 : 2;
17947 /* Everything else has to go through GENERAL_REGS. */
17949 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
17950 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
17953 /* A C expressions returning the cost of moving data of MODE from a register to
17957 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
17958 int in ATTRIBUTE_UNUSED)
17960 if (reg_classes_intersect_p (class, GENERAL_REGS))
17961 return 4 * hard_regno_nregs[0][mode];
17962 else if (reg_classes_intersect_p (class, FLOAT_REGS))
17963 return 4 * hard_regno_nregs[32][mode];
17964 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
17965 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
17967 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
17970 /* Newton-Raphson approximation of single-precision floating point divide n/d.
17971 Assumes no trapping math and finite arguments. */
17974 rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
17976 rtx x0, e0, e1, y1, u0, v0, one;
17978 x0 = gen_reg_rtx (SFmode);
17979 e0 = gen_reg_rtx (SFmode);
17980 e1 = gen_reg_rtx (SFmode);
17981 y1 = gen_reg_rtx (SFmode);
17982 u0 = gen_reg_rtx (SFmode);
17983 v0 = gen_reg_rtx (SFmode);
17984 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
17986 /* x0 = 1./d estimate */
17987 emit_insn (gen_rtx_SET (VOIDmode, x0,
17988 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
17990 /* e0 = 1. - d * x0 */
17991 emit_insn (gen_rtx_SET (VOIDmode, e0,
17992 gen_rtx_MINUS (SFmode, one,
17993 gen_rtx_MULT (SFmode, d, x0))));
17994 /* e1 = e0 + e0 * e0 */
17995 emit_insn (gen_rtx_SET (VOIDmode, e1,
17996 gen_rtx_PLUS (SFmode,
17997 gen_rtx_MULT (SFmode, e0, e0), e0)));
17998 /* y1 = x0 + e1 * x0 */
17999 emit_insn (gen_rtx_SET (VOIDmode, y1,
18000 gen_rtx_PLUS (SFmode,
18001 gen_rtx_MULT (SFmode, e1, x0), x0)));
18003 emit_insn (gen_rtx_SET (VOIDmode, u0,
18004 gen_rtx_MULT (SFmode, n, y1)));
18005 /* v0 = n - d * u0 */
18006 emit_insn (gen_rtx_SET (VOIDmode, v0,
18007 gen_rtx_MINUS (SFmode, n,
18008 gen_rtx_MULT (SFmode, d, u0))));
18009 /* res = u0 + v0 * y1 */
18010 emit_insn (gen_rtx_SET (VOIDmode, res,
18011 gen_rtx_PLUS (SFmode,
18012 gen_rtx_MULT (SFmode, v0, y1), u0)));
18015 /* Newton-Raphson approximation of double-precision floating point divide n/d.
18016 Assumes no trapping math and finite arguments. */
18019 rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18021 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18023 x0 = gen_reg_rtx (DFmode);
18024 e0 = gen_reg_rtx (DFmode);
18025 e1 = gen_reg_rtx (DFmode);
18026 e2 = gen_reg_rtx (DFmode);
18027 y1 = gen_reg_rtx (DFmode);
18028 y2 = gen_reg_rtx (DFmode);
18029 y3 = gen_reg_rtx (DFmode);
18030 u0 = gen_reg_rtx (DFmode);
18031 v0 = gen_reg_rtx (DFmode);
18032 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18034 /* x0 = 1./d estimate */
18035 emit_insn (gen_rtx_SET (VOIDmode, x0,
18036 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18038 /* e0 = 1. - d * x0 */
18039 emit_insn (gen_rtx_SET (VOIDmode, e0,
18040 gen_rtx_MINUS (DFmode, one,
18041 gen_rtx_MULT (SFmode, d, x0))));
18042 /* y1 = x0 + e0 * x0 */
18043 emit_insn (gen_rtx_SET (VOIDmode, y1,
18044 gen_rtx_PLUS (DFmode,
18045 gen_rtx_MULT (DFmode, e0, x0), x0)));
18047 emit_insn (gen_rtx_SET (VOIDmode, e1,
18048 gen_rtx_MULT (DFmode, e0, e0)));
18049 /* y2 = y1 + e1 * y1 */
18050 emit_insn (gen_rtx_SET (VOIDmode, y2,
18051 gen_rtx_PLUS (DFmode,
18052 gen_rtx_MULT (DFmode, e1, y1), y1)));
18054 emit_insn (gen_rtx_SET (VOIDmode, e2,
18055 gen_rtx_MULT (DFmode, e1, e1)));
18056 /* y3 = y2 + e2 * y2 */
18057 emit_insn (gen_rtx_SET (VOIDmode, y3,
18058 gen_rtx_PLUS (DFmode,
18059 gen_rtx_MULT (DFmode, e2, y2), y2)));
18061 emit_insn (gen_rtx_SET (VOIDmode, u0,
18062 gen_rtx_MULT (DFmode, n, y3)));
18063 /* v0 = n - d * u0 */
18064 emit_insn (gen_rtx_SET (VOIDmode, v0,
18065 gen_rtx_MINUS (DFmode, n,
18066 gen_rtx_MULT (DFmode, d, u0))));
18067 /* res = u0 + v0 * y3 */
18068 emit_insn (gen_rtx_SET (VOIDmode, res,
18069 gen_rtx_PLUS (DFmode,
18070 gen_rtx_MULT (DFmode, v0, y3), u0)));
18073 /* Return an RTX representing where to find the function value of a
18074 function returning MODE. */
18076 rs6000_complex_function_value (enum machine_mode mode)
18078 unsigned int regno;
18080 enum machine_mode inner = GET_MODE_INNER (mode);
18081 unsigned int inner_bytes = GET_MODE_SIZE (inner);
18083 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18084 regno = FP_ARG_RETURN;
18087 regno = GP_ARG_RETURN;
18089 /* 32-bit is OK since it'll go in r3/r4. */
18090 if (TARGET_32BIT && inner_bytes >= 4)
18091 return gen_rtx_REG (mode, regno);
18094 if (inner_bytes >= 8)
18095 return gen_rtx_REG (mode, regno);
18097 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18099 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
18100 GEN_INT (inner_bytes));
18101 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18104 /* Define how to find the value returned by a function.
18105 VALTYPE is the data type of the value (as a tree).
18106 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18107 otherwise, FUNC is 0.
18109 On the SPE, both FPs and vectors are returned in r3.
18111 On RS/6000 an integer value is in r3 and a floating-point value is in
18112 fp1, unless -msoft-float. */
18115 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18117 enum machine_mode mode;
18118 unsigned int regno;
18120 /* Special handling for structs in darwin64. */
18121 if (rs6000_darwin64_abi
18122 && TYPE_MODE (valtype) == BLKmode
18123 && TREE_CODE (valtype) == RECORD_TYPE
18124 && int_size_in_bytes (valtype) > 0)
18126 CUMULATIVE_ARGS valcum;
18130 valcum.fregno = FP_ARG_MIN_REG;
18131 valcum.vregno = ALTIVEC_ARG_MIN_REG;
18132 /* Do a trial code generation as if this were going to be passed as
18133 an argument; if any part goes in memory, we return NULL. */
18134 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
18137 /* Otherwise fall through to standard ABI rules. */
18140 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18142 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18143 return gen_rtx_PARALLEL (DImode,
18145 gen_rtx_EXPR_LIST (VOIDmode,
18146 gen_rtx_REG (SImode, GP_ARG_RETURN),
18148 gen_rtx_EXPR_LIST (VOIDmode,
18149 gen_rtx_REG (SImode,
18150 GP_ARG_RETURN + 1),
18154 if ((INTEGRAL_TYPE_P (valtype)
18155 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18156 || POINTER_TYPE_P (valtype))
18157 mode = TARGET_32BIT ? SImode : DImode;
18159 mode = TYPE_MODE (valtype);
18161 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
18162 regno = FP_ARG_RETURN;
18163 else if (TREE_CODE (valtype) == COMPLEX_TYPE
18164 && targetm.calls.split_complex_arg)
18165 return rs6000_complex_function_value (mode);
18166 else if (TREE_CODE (valtype) == VECTOR_TYPE
18167 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
18168 && ALTIVEC_VECTOR_MODE (mode))
18169 regno = ALTIVEC_ARG_RETURN;
18170 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18171 && (mode == DFmode || mode == DCmode))
18172 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18174 regno = GP_ARG_RETURN;
18176 return gen_rtx_REG (mode, regno);
18179 /* Define how to find the value returned by a library function
18180 assuming the value has mode MODE. */
18182 rs6000_libcall_value (enum machine_mode mode)
18184 unsigned int regno;
18186 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18188 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18189 return gen_rtx_PARALLEL (DImode,
18191 gen_rtx_EXPR_LIST (VOIDmode,
18192 gen_rtx_REG (SImode, GP_ARG_RETURN),
18194 gen_rtx_EXPR_LIST (VOIDmode,
18195 gen_rtx_REG (SImode,
18196 GP_ARG_RETURN + 1),
18200 if (GET_MODE_CLASS (mode) == MODE_FLOAT
18201 && TARGET_HARD_FLOAT && TARGET_FPRS)
18202 regno = FP_ARG_RETURN;
18203 else if (ALTIVEC_VECTOR_MODE (mode)
18204 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
18205 regno = ALTIVEC_ARG_RETURN;
18206 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
18207 return rs6000_complex_function_value (mode);
18208 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18209 && (mode == DFmode || mode == DCmode))
18210 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18212 regno = GP_ARG_RETURN;
18214 return gen_rtx_REG (mode, regno);
18217 /* Define the offset between two registers, FROM to be eliminated and its
18218 replacement TO, at the start of a routine. */
18220 rs6000_initial_elimination_offset (int from, int to)
18222 rs6000_stack_t *info = rs6000_stack_info ();
18223 HOST_WIDE_INT offset;
18225 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18226 offset = info->push_p ? 0 : -info->total_size;
18227 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
18228 offset = info->total_size;
18229 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18230 offset = info->push_p ? info->total_size : 0;
18231 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18234 gcc_unreachable ();
18239 /* Return true if TYPE is a SPE or AltiVec opaque type. */
18242 rs6000_is_opaque_type (tree type)
18244 return (type == opaque_V2SI_type_node
18245 || type == opaque_V2SF_type_node
18246 || type == opaque_p_V2SI_type_node
18247 || type == opaque_V4SI_type_node);
18251 rs6000_dwarf_register_span (rtx reg)
18256 && (SPE_VECTOR_MODE (GET_MODE (reg))
18257 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18262 regno = REGNO (reg);
18264 /* The duality of the SPE register size wreaks all kinds of havoc.
18265 This is a way of distinguishing r0 in 32-bits from r0 in
18268 gen_rtx_PARALLEL (VOIDmode,
18271 gen_rtx_REG (SImode, regno + 1200),
18272 gen_rtx_REG (SImode, regno))
18274 gen_rtx_REG (SImode, regno),
18275 gen_rtx_REG (SImode, regno + 1200)));
18278 /* Map internal gcc register numbers to DWARF2 register numbers. */
18281 rs6000_dbx_register_number (unsigned int regno)
18283 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18285 if (regno == MQ_REGNO)
18287 if (regno == LINK_REGISTER_REGNUM)
18289 if (regno == COUNT_REGISTER_REGNUM)
18291 if (CR_REGNO_P (regno))
18292 return regno - CR0_REGNO + 86;
18293 if (regno == XER_REGNO)
18295 if (ALTIVEC_REGNO_P (regno))
18296 return regno - FIRST_ALTIVEC_REGNO + 1124;
18297 if (regno == VRSAVE_REGNO)
18299 if (regno == VSCR_REGNO)
18301 if (regno == SPE_ACC_REGNO)
18303 if (regno == SPEFSCR_REGNO)
18305 /* SPE high reg number. We get these values of regno from
18306 rs6000_dwarf_register_span. */
18307 gcc_assert (regno >= 1200 && regno < 1232);
18311 /* target hook eh_return_filter_mode */
18312 static enum machine_mode
18313 rs6000_eh_return_filter_mode (void)
18315 return TARGET_32BIT ? SImode : word_mode;
18318 /* Target hook for vector_mode_supported_p. */
18320 rs6000_vector_mode_supported_p (enum machine_mode mode)
18323 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
18326 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
18333 /* Target hook for invalid_arg_for_unprototyped_fn. */
18334 static const char *
18335 invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
18337 return (!rs6000_darwin64_abi
18339 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
18340 && (funcdecl == NULL_TREE
18341 || (TREE_CODE (funcdecl) == FUNCTION_DECL
18342 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
18343 ? N_("AltiVec argument passed to unprototyped function")
18347 #include "gt-rs6000.h"