1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
58 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61 #include "gstab.h" /* for N_SLINE */
64 #ifndef TARGET_NO_PROTOTYPE
65 #define TARGET_NO_PROTOTYPE 0
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 int world_save_p; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
104 int lr_size; /* size to hold LR if not in save_size */
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
110 int toc_size; /* size to hold TOC if not in save_size */
111 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
112 int spe_64bit_regs_used;
115 /* A C structure for machine-specific, per-function data.
116 This is added to the cfun structure. */
117 typedef struct machine_function GTY(())
119 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
120 int ra_needs_full_frame;
121 /* Some local-dynamic symbol. */
122 const char *some_ld_name;
123 /* Whether the instruction chain has been scanned already. */
124 int insn_chain_scanned_p;
125 /* Flags if __builtin_return_address (0) was used. */
127 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
128 varargs save area. */
129 HOST_WIDE_INT varargs_save_offset;
132 /* Target cpu type */
134 enum processor_type rs6000_cpu;
135 struct rs6000_cpu_select rs6000_select[3] =
137 /* switch name, tune arch */
138 { (const char *)0, "--with-cpu=", 1, 1 },
139 { (const char *)0, "-mcpu=", 1, 1 },
140 { (const char *)0, "-mtune=", 1, 0 },
143 /* Always emit branch hint bits. */
144 static GTY(()) bool rs6000_always_hint;
146 /* Schedule instructions for group formation. */
147 static GTY(()) bool rs6000_sched_groups;
149 /* Support for -msched-costly-dep option. */
150 const char *rs6000_sched_costly_dep_str;
151 enum rs6000_dependence_cost rs6000_sched_costly_dep;
153 /* Support for -minsert-sched-nops option. */
154 const char *rs6000_sched_insert_nops_str;
155 enum rs6000_nop_insertion rs6000_sched_insert_nops;
157 /* Support targetm.vectorize.builtin_mask_for_load. */
158 static GTY(()) tree altivec_builtin_mask_for_load;
160 /* Size of long double */
161 int rs6000_long_double_type_size;
163 /* Whether -mabi=altivec has appeared */
164 int rs6000_altivec_abi;
166 /* Nonzero if we want SPE ABI extensions. */
169 /* Nonzero if floating point operations are done in the GPRs. */
170 int rs6000_float_gprs = 0;
172 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
173 int rs6000_darwin64_abi;
175 /* Set to nonzero once AIX common-mode calls have been defined. */
176 static GTY(()) int common_mode_defined;
178 /* Save information from a "cmpxx" operation until the branch or scc is
180 rtx rs6000_compare_op0, rs6000_compare_op1;
181 int rs6000_compare_fp_p;
183 /* Label number of label created for -mrelocatable, to call to so we can
184 get the address of the GOT section */
185 int rs6000_pic_labelno;
188 /* Which abi to adhere to */
189 const char *rs6000_abi_name;
191 /* Semantics of the small data area */
192 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
194 /* Which small data model to use */
195 const char *rs6000_sdata_name = (char *)0;
197 /* Counter for labels which are to be placed in .fixup. */
198 int fixuplabelno = 0;
201 /* Bit size of immediate TLS offsets and string from which it is decoded. */
202 int rs6000_tls_size = 32;
203 const char *rs6000_tls_size_string;
205 /* ABI enumeration available for subtarget to use. */
206 enum rs6000_abi rs6000_current_abi;
208 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
212 const char *rs6000_debug_name;
213 int rs6000_debug_stack; /* debug stack applications */
214 int rs6000_debug_arg; /* debug argument handling */
216 /* Value is TRUE if register/mode pair is acceptable. */
217 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
219 /* Built in types. */
221 tree rs6000_builtin_types[RS6000_BTI_MAX];
222 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
224 const char *rs6000_traceback_name;
226 traceback_default = 0,
232 /* Flag to say the TOC is initialized */
234 char toc_label_name[10];
236 /* Alias set for saves and restores from the rs6000 stack. */
237 static GTY(()) int rs6000_sr_alias_set;
239 /* Control alignment for fields within structures. */
240 /* String from -malign-XXXXX. */
241 int rs6000_alignment_flags;
243 /* True for any options that were explicitly set. */
245 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
246 bool alignment; /* True if -malign- was used. */
247 bool abi; /* True if -mabi= was used. */
248 bool spe; /* True if -mspe= was used. */
249 bool float_gprs; /* True if -mfloat-gprs= was used. */
250 bool isel; /* True if -misel was used. */
251 bool long_double; /* True if -mlong-double- was used. */
252 } rs6000_explicit_options;
254 struct builtin_description
256 /* mask is not const because we're going to alter it below. This
257 nonsense will go away when we rewrite the -march infrastructure
258 to give us more target flag bits. */
260 const enum insn_code icode;
261 const char *const name;
262 const enum rs6000_builtins code;
265 /* Target cpu costs. */
267 struct processor_costs {
268 const int mulsi; /* cost of SImode multiplication. */
269 const int mulsi_const; /* cost of SImode multiplication by constant. */
270 const int mulsi_const9; /* cost of SImode mult by short constant. */
271 const int muldi; /* cost of DImode multiplication. */
272 const int divsi; /* cost of SImode division. */
273 const int divdi; /* cost of DImode division. */
274 const int fp; /* cost of simple SFmode and DFmode insns. */
275 const int dmul; /* cost of DFmode multiplication (and fmadd). */
276 const int sdiv; /* cost of SFmode division (fdivs). */
277 const int ddiv; /* cost of DFmode division (fdiv). */
280 const struct processor_costs *rs6000_cost;
282 /* Processor costs (relative to an add) */
284 /* Instruction size costs on 32bit processors. */
286 struct processor_costs size32_cost = {
287 COSTS_N_INSNS (1), /* mulsi */
288 COSTS_N_INSNS (1), /* mulsi_const */
289 COSTS_N_INSNS (1), /* mulsi_const9 */
290 COSTS_N_INSNS (1), /* muldi */
291 COSTS_N_INSNS (1), /* divsi */
292 COSTS_N_INSNS (1), /* divdi */
293 COSTS_N_INSNS (1), /* fp */
294 COSTS_N_INSNS (1), /* dmul */
295 COSTS_N_INSNS (1), /* sdiv */
296 COSTS_N_INSNS (1), /* ddiv */
299 /* Instruction size costs on 64bit processors. */
301 struct processor_costs size64_cost = {
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
314 /* Instruction costs on RIOS1 processors. */
316 struct processor_costs rios1_cost = {
317 COSTS_N_INSNS (5), /* mulsi */
318 COSTS_N_INSNS (4), /* mulsi_const */
319 COSTS_N_INSNS (3), /* mulsi_const9 */
320 COSTS_N_INSNS (5), /* muldi */
321 COSTS_N_INSNS (19), /* divsi */
322 COSTS_N_INSNS (19), /* divdi */
323 COSTS_N_INSNS (2), /* fp */
324 COSTS_N_INSNS (2), /* dmul */
325 COSTS_N_INSNS (19), /* sdiv */
326 COSTS_N_INSNS (19), /* ddiv */
329 /* Instruction costs on RIOS2 processors. */
331 struct processor_costs rios2_cost = {
332 COSTS_N_INSNS (2), /* mulsi */
333 COSTS_N_INSNS (2), /* mulsi_const */
334 COSTS_N_INSNS (2), /* mulsi_const9 */
335 COSTS_N_INSNS (2), /* muldi */
336 COSTS_N_INSNS (13), /* divsi */
337 COSTS_N_INSNS (13), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (17), /* sdiv */
341 COSTS_N_INSNS (17), /* ddiv */
344 /* Instruction costs on RS64A processors. */
346 struct processor_costs rs64a_cost = {
347 COSTS_N_INSNS (20), /* mulsi */
348 COSTS_N_INSNS (12), /* mulsi_const */
349 COSTS_N_INSNS (8), /* mulsi_const9 */
350 COSTS_N_INSNS (34), /* muldi */
351 COSTS_N_INSNS (65), /* divsi */
352 COSTS_N_INSNS (67), /* divdi */
353 COSTS_N_INSNS (4), /* fp */
354 COSTS_N_INSNS (4), /* dmul */
355 COSTS_N_INSNS (31), /* sdiv */
356 COSTS_N_INSNS (31), /* ddiv */
359 /* Instruction costs on MPCCORE processors. */
361 struct processor_costs mpccore_cost = {
362 COSTS_N_INSNS (2), /* mulsi */
363 COSTS_N_INSNS (2), /* mulsi_const */
364 COSTS_N_INSNS (2), /* mulsi_const9 */
365 COSTS_N_INSNS (2), /* muldi */
366 COSTS_N_INSNS (6), /* divsi */
367 COSTS_N_INSNS (6), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (5), /* dmul */
370 COSTS_N_INSNS (10), /* sdiv */
371 COSTS_N_INSNS (17), /* ddiv */
374 /* Instruction costs on PPC403 processors. */
376 struct processor_costs ppc403_cost = {
377 COSTS_N_INSNS (4), /* mulsi */
378 COSTS_N_INSNS (4), /* mulsi_const */
379 COSTS_N_INSNS (4), /* mulsi_const9 */
380 COSTS_N_INSNS (4), /* muldi */
381 COSTS_N_INSNS (33), /* divsi */
382 COSTS_N_INSNS (33), /* divdi */
383 COSTS_N_INSNS (11), /* fp */
384 COSTS_N_INSNS (11), /* dmul */
385 COSTS_N_INSNS (11), /* sdiv */
386 COSTS_N_INSNS (11), /* ddiv */
389 /* Instruction costs on PPC405 processors. */
391 struct processor_costs ppc405_cost = {
392 COSTS_N_INSNS (5), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (3), /* mulsi_const9 */
395 COSTS_N_INSNS (5), /* muldi */
396 COSTS_N_INSNS (35), /* divsi */
397 COSTS_N_INSNS (35), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
404 /* Instruction costs on PPC440 processors. */
406 struct processor_costs ppc440_cost = {
407 COSTS_N_INSNS (3), /* mulsi */
408 COSTS_N_INSNS (2), /* mulsi_const */
409 COSTS_N_INSNS (2), /* mulsi_const9 */
410 COSTS_N_INSNS (3), /* muldi */
411 COSTS_N_INSNS (34), /* divsi */
412 COSTS_N_INSNS (34), /* divdi */
413 COSTS_N_INSNS (5), /* fp */
414 COSTS_N_INSNS (5), /* dmul */
415 COSTS_N_INSNS (19), /* sdiv */
416 COSTS_N_INSNS (33), /* ddiv */
419 /* Instruction costs on PPC601 processors. */
421 struct processor_costs ppc601_cost = {
422 COSTS_N_INSNS (5), /* mulsi */
423 COSTS_N_INSNS (5), /* mulsi_const */
424 COSTS_N_INSNS (5), /* mulsi_const9 */
425 COSTS_N_INSNS (5), /* muldi */
426 COSTS_N_INSNS (36), /* divsi */
427 COSTS_N_INSNS (36), /* divdi */
428 COSTS_N_INSNS (4), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (17), /* sdiv */
431 COSTS_N_INSNS (31), /* ddiv */
434 /* Instruction costs on PPC603 processors. */
436 struct processor_costs ppc603_cost = {
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (3), /* mulsi_const */
439 COSTS_N_INSNS (2), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (37), /* divsi */
442 COSTS_N_INSNS (37), /* divdi */
443 COSTS_N_INSNS (3), /* fp */
444 COSTS_N_INSNS (4), /* dmul */
445 COSTS_N_INSNS (18), /* sdiv */
446 COSTS_N_INSNS (33), /* ddiv */
449 /* Instruction costs on PPC604 processors. */
451 struct processor_costs ppc604_cost = {
452 COSTS_N_INSNS (4), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (4), /* mulsi_const9 */
455 COSTS_N_INSNS (4), /* muldi */
456 COSTS_N_INSNS (20), /* divsi */
457 COSTS_N_INSNS (20), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (3), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (32), /* ddiv */
464 /* Instruction costs on PPC604e processors. */
466 struct processor_costs ppc604e_cost = {
467 COSTS_N_INSNS (2), /* mulsi */
468 COSTS_N_INSNS (2), /* mulsi_const */
469 COSTS_N_INSNS (2), /* mulsi_const9 */
470 COSTS_N_INSNS (2), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
479 /* Instruction costs on PPC620 processors. */
481 struct processor_costs ppc620_cost = {
482 COSTS_N_INSNS (5), /* mulsi */
483 COSTS_N_INSNS (4), /* mulsi_const */
484 COSTS_N_INSNS (3), /* mulsi_const9 */
485 COSTS_N_INSNS (7), /* muldi */
486 COSTS_N_INSNS (21), /* divsi */
487 COSTS_N_INSNS (37), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
494 /* Instruction costs on PPC630 processors. */
496 struct processor_costs ppc630_cost = {
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (17), /* sdiv */
506 COSTS_N_INSNS (21), /* ddiv */
509 /* Instruction costs on PPC750 and PPC7400 processors. */
511 struct processor_costs ppc750_cost = {
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (3), /* mulsi_const */
514 COSTS_N_INSNS (2), /* mulsi_const9 */
515 COSTS_N_INSNS (5), /* muldi */
516 COSTS_N_INSNS (17), /* divsi */
517 COSTS_N_INSNS (17), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (31), /* ddiv */
524 /* Instruction costs on PPC7450 processors. */
526 struct processor_costs ppc7450_cost = {
527 COSTS_N_INSNS (4), /* mulsi */
528 COSTS_N_INSNS (3), /* mulsi_const */
529 COSTS_N_INSNS (3), /* mulsi_const9 */
530 COSTS_N_INSNS (4), /* muldi */
531 COSTS_N_INSNS (23), /* divsi */
532 COSTS_N_INSNS (23), /* divdi */
533 COSTS_N_INSNS (5), /* fp */
534 COSTS_N_INSNS (5), /* dmul */
535 COSTS_N_INSNS (21), /* sdiv */
536 COSTS_N_INSNS (35), /* ddiv */
539 /* Instruction costs on PPC8540 processors. */
541 struct processor_costs ppc8540_cost = {
542 COSTS_N_INSNS (4), /* mulsi */
543 COSTS_N_INSNS (4), /* mulsi_const */
544 COSTS_N_INSNS (4), /* mulsi_const9 */
545 COSTS_N_INSNS (4), /* muldi */
546 COSTS_N_INSNS (19), /* divsi */
547 COSTS_N_INSNS (19), /* divdi */
548 COSTS_N_INSNS (4), /* fp */
549 COSTS_N_INSNS (4), /* dmul */
550 COSTS_N_INSNS (29), /* sdiv */
551 COSTS_N_INSNS (29), /* ddiv */
554 /* Instruction costs on POWER4 and POWER5 processors. */
556 struct processor_costs power4_cost = {
557 COSTS_N_INSNS (3), /* mulsi */
558 COSTS_N_INSNS (2), /* mulsi_const */
559 COSTS_N_INSNS (2), /* mulsi_const9 */
560 COSTS_N_INSNS (4), /* muldi */
561 COSTS_N_INSNS (18), /* divsi */
562 COSTS_N_INSNS (34), /* divdi */
563 COSTS_N_INSNS (3), /* fp */
564 COSTS_N_INSNS (3), /* dmul */
565 COSTS_N_INSNS (17), /* sdiv */
566 COSTS_N_INSNS (17), /* ddiv */
570 static bool rs6000_function_ok_for_sibcall (tree, tree);
571 static const char *rs6000_invalid_within_doloop (rtx);
572 static rtx rs6000_generate_compare (enum rtx_code);
573 static void rs6000_maybe_dead (rtx);
574 static void rs6000_emit_stack_tie (void);
575 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
576 static rtx spe_synthesize_frame_save (rtx);
577 static bool spe_func_has_64bit_regs_p (void);
578 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
580 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
581 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
582 static unsigned rs6000_hash_constant (rtx);
583 static unsigned toc_hash_function (const void *);
584 static int toc_hash_eq (const void *, const void *);
585 static int constant_pool_expr_1 (rtx, int *, int *);
586 static bool constant_pool_expr_p (rtx);
587 static bool legitimate_small_data_p (enum machine_mode, rtx);
588 static bool legitimate_indexed_address_p (rtx, int);
589 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
590 static struct machine_function * rs6000_init_machine_status (void);
591 static bool rs6000_assemble_integer (rtx, unsigned int, int);
592 static bool no_global_regs_above (int);
593 #ifdef HAVE_GAS_HIDDEN
594 static void rs6000_assemble_visibility (tree, int);
596 static int rs6000_ra_ever_killed (void);
597 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
598 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
599 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
600 static const char *rs6000_mangle_fundamental_type (tree);
601 extern const struct attribute_spec rs6000_attribute_table[];
602 static void rs6000_set_default_type_attributes (tree);
603 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
604 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
605 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
607 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
608 static bool rs6000_return_in_memory (tree, tree);
609 static void rs6000_file_start (void);
611 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
612 static void rs6000_elf_asm_out_constructor (rtx, int);
613 static void rs6000_elf_asm_out_destructor (rtx, int);
614 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
615 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
616 static void rs6000_elf_unique_section (tree, int);
617 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
618 unsigned HOST_WIDE_INT);
619 static void rs6000_elf_encode_section_info (tree, rtx, int)
621 static bool rs6000_elf_in_small_data_p (tree);
624 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
625 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
626 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
627 static void rs6000_xcoff_unique_section (tree, int);
628 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
629 unsigned HOST_WIDE_INT);
630 static const char * rs6000_xcoff_strip_name_encoding (const char *);
631 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
632 static void rs6000_xcoff_file_start (void);
633 static void rs6000_xcoff_file_end (void);
635 static int rs6000_variable_issue (FILE *, int, rtx, int);
636 static bool rs6000_rtx_costs (rtx, int, int, int *);
637 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
638 static bool is_microcoded_insn (rtx);
639 static int is_dispatch_slot_restricted (rtx);
640 static bool is_cracked_insn (rtx);
641 static bool is_branch_slot_insn (rtx);
642 static int rs6000_adjust_priority (rtx, int);
643 static int rs6000_issue_rate (void);
644 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
645 static rtx get_next_active_insn (rtx, rtx);
646 static bool insn_terminates_group_p (rtx , enum group_termination);
647 static bool is_costly_group (rtx *, rtx);
648 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
649 static int redefine_groups (FILE *, int, rtx, rtx);
650 static int pad_groups (FILE *, int, rtx, rtx);
651 static void rs6000_sched_finish (FILE *, int);
652 static int rs6000_use_sched_lookahead (void);
653 static tree rs6000_builtin_mask_for_load (void);
655 static void def_builtin (int, const char *, tree, int);
656 static void rs6000_init_builtins (void);
657 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
658 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
659 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
660 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
661 static void altivec_init_builtins (void);
662 static void rs6000_common_init_builtins (void);
663 static void rs6000_init_libfuncs (void);
665 static void enable_mask_for_builtins (struct builtin_description *, int,
666 enum rs6000_builtins,
667 enum rs6000_builtins);
668 static tree build_opaque_vector_type (tree, int);
669 static void spe_init_builtins (void);
670 static rtx spe_expand_builtin (tree, rtx, bool *);
671 static rtx spe_expand_stv_builtin (enum insn_code, tree);
672 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
673 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
674 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
675 static rs6000_stack_t *rs6000_stack_info (void);
676 static void debug_stack_info (rs6000_stack_t *);
678 static rtx altivec_expand_builtin (tree, rtx, bool *);
679 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
680 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
681 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
682 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
683 static rtx altivec_expand_predicate_builtin (enum insn_code,
684 const char *, tree, rtx);
685 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
686 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
687 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
688 static rtx altivec_expand_vec_set_builtin (tree);
689 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
690 static int get_element_number (tree, tree);
691 static bool rs6000_handle_option (size_t, const char *, int);
692 static void rs6000_parse_tls_size_option (void);
693 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
694 static int first_altivec_reg_to_save (void);
695 static unsigned int compute_vrsave_mask (void);
696 static void compute_save_world_info (rs6000_stack_t *info_ptr);
697 static void is_altivec_return_reg (rtx, void *);
698 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
699 int easy_vector_constant (rtx, enum machine_mode);
700 static bool rs6000_is_opaque_type (tree);
701 static rtx rs6000_dwarf_register_span (rtx);
702 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
703 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
704 static rtx rs6000_tls_get_addr (void);
705 static rtx rs6000_got_sym (void);
706 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
707 static const char *rs6000_get_some_local_dynamic_name (void);
708 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
709 static rtx rs6000_complex_function_value (enum machine_mode);
710 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
711 enum machine_mode, tree);
712 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
714 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
715 tree, HOST_WIDE_INT);
716 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
719 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
722 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
723 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
724 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
725 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
726 enum machine_mode, tree,
728 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
730 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
732 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
734 static void macho_branch_islands (void);
735 static void add_compiler_branch_island (tree, tree, int);
736 static int no_previous_def (tree function_name);
737 static tree get_prev_label (tree function_name);
738 static void rs6000_darwin_file_start (void);
741 static tree rs6000_build_builtin_va_list (void);
742 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
743 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
744 static bool rs6000_vector_mode_supported_p (enum machine_mode);
745 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
747 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
749 static int get_vsel_insn (enum machine_mode);
750 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
751 static tree rs6000_stack_protect_fail (void);
753 const int INSN_NOT_AVAILABLE = -1;
754 static enum machine_mode rs6000_eh_return_filter_mode (void);
756 /* Hash table stuff for keeping track of TOC entries. */
758 struct toc_hash_struct GTY(())
760 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
761 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
763 enum machine_mode key_mode;
767 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
769 /* Default register names. */
770 char rs6000_reg_names[][8] =
772 "0", "1", "2", "3", "4", "5", "6", "7",
773 "8", "9", "10", "11", "12", "13", "14", "15",
774 "16", "17", "18", "19", "20", "21", "22", "23",
775 "24", "25", "26", "27", "28", "29", "30", "31",
776 "0", "1", "2", "3", "4", "5", "6", "7",
777 "8", "9", "10", "11", "12", "13", "14", "15",
778 "16", "17", "18", "19", "20", "21", "22", "23",
779 "24", "25", "26", "27", "28", "29", "30", "31",
780 "mq", "lr", "ctr","ap",
781 "0", "1", "2", "3", "4", "5", "6", "7",
783 /* AltiVec registers. */
784 "0", "1", "2", "3", "4", "5", "6", "7",
785 "8", "9", "10", "11", "12", "13", "14", "15",
786 "16", "17", "18", "19", "20", "21", "22", "23",
787 "24", "25", "26", "27", "28", "29", "30", "31",
790 "spe_acc", "spefscr",
791 /* Soft frame pointer. */
795 #ifdef TARGET_REGNAMES
796 static const char alt_reg_names[][8] =
798 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
799 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
800 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
801 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
802 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
803 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
804 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
805 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
806 "mq", "lr", "ctr", "ap",
807 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
809 /* AltiVec registers. */
810 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
811 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
812 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
813 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
816 "spe_acc", "spefscr",
817 /* Soft frame pointer. */
822 #ifndef MASK_STRICT_ALIGN
823 #define MASK_STRICT_ALIGN 0
825 #ifndef TARGET_PROFILE_KERNEL
826 #define TARGET_PROFILE_KERNEL 0
829 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
830 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
832 /* Initialize the GCC target structure. */
833 #undef TARGET_ATTRIBUTE_TABLE
834 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
835 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
836 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
838 #undef TARGET_ASM_ALIGNED_DI_OP
839 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
841 /* Default unaligned ops are only provided for ELF. Find the ops needed
842 for non-ELF systems. */
843 #ifndef OBJECT_FORMAT_ELF
845 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
847 #undef TARGET_ASM_UNALIGNED_HI_OP
848 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
849 #undef TARGET_ASM_UNALIGNED_SI_OP
850 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
851 #undef TARGET_ASM_UNALIGNED_DI_OP
852 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
855 #undef TARGET_ASM_UNALIGNED_HI_OP
856 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
857 #undef TARGET_ASM_UNALIGNED_SI_OP
858 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
859 #undef TARGET_ASM_UNALIGNED_DI_OP
860 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
861 #undef TARGET_ASM_ALIGNED_DI_OP
862 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
866 /* This hook deals with fixups for relocatable code and DI-mode objects
868 #undef TARGET_ASM_INTEGER
869 #define TARGET_ASM_INTEGER rs6000_assemble_integer
871 #ifdef HAVE_GAS_HIDDEN
872 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
873 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
876 #undef TARGET_HAVE_TLS
877 #define TARGET_HAVE_TLS HAVE_AS_TLS
879 #undef TARGET_CANNOT_FORCE_CONST_MEM
880 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
882 #undef TARGET_ASM_FUNCTION_PROLOGUE
883 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
884 #undef TARGET_ASM_FUNCTION_EPILOGUE
885 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
887 #undef TARGET_SCHED_VARIABLE_ISSUE
888 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
890 #undef TARGET_SCHED_ISSUE_RATE
891 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
892 #undef TARGET_SCHED_ADJUST_COST
893 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
894 #undef TARGET_SCHED_ADJUST_PRIORITY
895 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
896 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
897 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
898 #undef TARGET_SCHED_FINISH
899 #define TARGET_SCHED_FINISH rs6000_sched_finish
901 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
902 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
904 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
905 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
907 #undef TARGET_INIT_BUILTINS
908 #define TARGET_INIT_BUILTINS rs6000_init_builtins
910 #undef TARGET_EXPAND_BUILTIN
911 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
913 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
914 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
916 #undef TARGET_INIT_LIBFUNCS
917 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
920 #undef TARGET_BINDS_LOCAL_P
921 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
924 #undef TARGET_ASM_OUTPUT_MI_THUNK
925 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
927 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
928 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
930 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
931 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
933 #undef TARGET_INVALID_WITHIN_DOLOOP
934 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
936 #undef TARGET_RTX_COSTS
937 #define TARGET_RTX_COSTS rs6000_rtx_costs
938 #undef TARGET_ADDRESS_COST
939 #define TARGET_ADDRESS_COST hook_int_rtx_0
941 #undef TARGET_VECTOR_OPAQUE_P
942 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
944 #undef TARGET_DWARF_REGISTER_SPAN
945 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
947 /* On rs6000, function arguments are promoted, as are function return
949 #undef TARGET_PROMOTE_FUNCTION_ARGS
950 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
951 #undef TARGET_PROMOTE_FUNCTION_RETURN
952 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
954 #undef TARGET_RETURN_IN_MEMORY
955 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
957 #undef TARGET_SETUP_INCOMING_VARARGS
958 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
960 /* Always strict argument naming on rs6000. */
961 #undef TARGET_STRICT_ARGUMENT_NAMING
962 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
963 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
964 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
965 #undef TARGET_SPLIT_COMPLEX_ARG
966 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
967 #undef TARGET_MUST_PASS_IN_STACK
968 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
969 #undef TARGET_PASS_BY_REFERENCE
970 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
971 #undef TARGET_ARG_PARTIAL_BYTES
972 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
974 #undef TARGET_BUILD_BUILTIN_VA_LIST
975 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
977 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
978 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
980 #undef TARGET_EH_RETURN_FILTER_MODE
981 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
983 #undef TARGET_VECTOR_MODE_SUPPORTED_P
984 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
986 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
987 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
989 #undef TARGET_HANDLE_OPTION
990 #define TARGET_HANDLE_OPTION rs6000_handle_option
992 #undef TARGET_DEFAULT_TARGET_FLAGS
993 #define TARGET_DEFAULT_TARGET_FLAGS \
994 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
996 #undef TARGET_STACK_PROTECT_FAIL
997 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
999 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1000 The PowerPC architecture requires only weak consistency among
1001 processors--that is, memory accesses between processors need not be
1002 sequentially consistent and memory accesses among processors can occur
1003 in any order. The ability to order memory accesses weakly provides
1004 opportunities for more efficient use of the system bus. Unless a
1005 dependency exists, the 604e allows read operations to precede store
1007 #undef TARGET_RELAXED_ORDERING
1008 #define TARGET_RELAXED_ORDERING true
1011 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1012 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1015 struct gcc_target targetm = TARGET_INITIALIZER;
1018 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1021 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1023 /* The GPRs can hold any mode, but values bigger than one register
1024 cannot go past R31. */
1025 if (INT_REGNO_P (regno))
1026 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1028 /* The float registers can only hold floating modes and DImode. */
1029 if (FP_REGNO_P (regno))
1031 (GET_MODE_CLASS (mode) == MODE_FLOAT
1032 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1033 || (GET_MODE_CLASS (mode) == MODE_INT
1034 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1036 /* The CR register can only hold CC modes. */
1037 if (CR_REGNO_P (regno))
1038 return GET_MODE_CLASS (mode) == MODE_CC;
1040 if (XER_REGNO_P (regno))
1041 return mode == PSImode;
1043 /* AltiVec only in AldyVec registers. */
1044 if (ALTIVEC_REGNO_P (regno))
1045 return ALTIVEC_VECTOR_MODE (mode);
1047 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1048 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1051 /* We cannot put TImode anywhere except general register and it must be
1052 able to fit within the register set. */
1054 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1057 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1059 rs6000_init_hard_regno_mode_ok (void)
1063 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1064 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1065 if (rs6000_hard_regno_mode_ok (r, m))
1066 rs6000_hard_regno_mode_ok_p[m][r] = true;
1069 /* If not otherwise specified by a target, make 'long double' equivalent to
1072 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1073 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1076 /* Override command line options. Mostly we process the processor
1077 type and sometimes adjust other TARGET_ options. */
1080 rs6000_override_options (const char *default_cpu)
1083 struct rs6000_cpu_select *ptr;
1086 /* Simplifications for entries below. */
1089 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1090 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1093 /* This table occasionally claims that a processor does not support
1094 a particular feature even though it does, but the feature is slower
1095 than the alternative. Thus, it shouldn't be relied on as a
1096 complete description of the processor's support.
1098 Please keep this list in order, and don't forget to update the
1099 documentation in invoke.texi when adding a new processor or
1103 const char *const name; /* Canonical processor name. */
1104 const enum processor_type processor; /* Processor type enum value. */
1105 const int target_enable; /* Target flags to enable. */
1106 } const processor_target_table[]
1107 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1108 {"403", PROCESSOR_PPC403,
1109 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1110 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1111 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1112 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1113 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1114 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1115 {"601", PROCESSOR_PPC601,
1116 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1117 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1118 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1119 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1120 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1121 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1122 {"620", PROCESSOR_PPC620,
1123 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1124 {"630", PROCESSOR_PPC630,
1125 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1126 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1127 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1128 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1129 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1130 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1131 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1132 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1133 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1134 /* 8548 has a dummy entry for now. */
1135 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1136 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1137 {"970", PROCESSOR_POWER4,
1138 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1139 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1140 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1141 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1142 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1143 {"G5", PROCESSOR_POWER4,
1144 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1145 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1146 {"power2", PROCESSOR_POWER,
1147 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1148 {"power3", PROCESSOR_PPC630,
1149 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1150 {"power4", PROCESSOR_POWER4,
1151 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1152 {"power5", PROCESSOR_POWER5,
1153 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1154 | MASK_MFCRF | MASK_POPCNTB},
1155 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1156 {"powerpc64", PROCESSOR_POWERPC64,
1157 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1158 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1159 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1160 {"rios2", PROCESSOR_RIOS2,
1161 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1162 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1163 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1164 {"rs64", PROCESSOR_RS64A,
1165 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1168 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1170 /* Some OSs don't support saving the high part of 64-bit registers on
1171 context switch. Other OSs don't support saving Altivec registers.
1172 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1173 settings; if the user wants either, the user must explicitly specify
1174 them and we won't interfere with the user's specification. */
1177 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1178 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1179 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1183 rs6000_init_hard_regno_mode_ok ();
1185 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1186 #ifdef OS_MISSING_POWERPC64
1187 if (OS_MISSING_POWERPC64)
1188 set_masks &= ~MASK_POWERPC64;
1190 #ifdef OS_MISSING_ALTIVEC
1191 if (OS_MISSING_ALTIVEC)
1192 set_masks &= ~MASK_ALTIVEC;
1195 /* Don't override by the processor default if given explicitly. */
1196 set_masks &= ~target_flags_explicit;
1198 /* Identify the processor type. */
1199 rs6000_select[0].string = default_cpu;
1200 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1202 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1204 ptr = &rs6000_select[i];
1205 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1207 for (j = 0; j < ptt_size; j++)
1208 if (! strcmp (ptr->string, processor_target_table[j].name))
1210 if (ptr->set_tune_p)
1211 rs6000_cpu = processor_target_table[j].processor;
1213 if (ptr->set_arch_p)
1215 target_flags &= ~set_masks;
1216 target_flags |= (processor_target_table[j].target_enable
1223 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1230 /* If we are optimizing big endian systems for space, use the load/store
1231 multiple and string instructions. */
1232 if (BYTES_BIG_ENDIAN && optimize_size)
1233 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1235 /* Don't allow -mmultiple or -mstring on little endian systems
1236 unless the cpu is a 750, because the hardware doesn't support the
1237 instructions used in little endian mode, and causes an alignment
1238 trap. The 750 does not cause an alignment trap (except when the
1239 target is unaligned). */
1241 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1243 if (TARGET_MULTIPLE)
1245 target_flags &= ~MASK_MULTIPLE;
1246 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1247 warning (0, "-mmultiple is not supported on little endian systems");
1252 target_flags &= ~MASK_STRING;
1253 if ((target_flags_explicit & MASK_STRING) != 0)
1254 warning (0, "-mstring is not supported on little endian systems");
1258 /* Set debug flags */
1259 if (rs6000_debug_name)
1261 if (! strcmp (rs6000_debug_name, "all"))
1262 rs6000_debug_stack = rs6000_debug_arg = 1;
1263 else if (! strcmp (rs6000_debug_name, "stack"))
1264 rs6000_debug_stack = 1;
1265 else if (! strcmp (rs6000_debug_name, "arg"))
1266 rs6000_debug_arg = 1;
1268 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1271 if (rs6000_traceback_name)
1273 if (! strncmp (rs6000_traceback_name, "full", 4))
1274 rs6000_traceback = traceback_full;
1275 else if (! strncmp (rs6000_traceback_name, "part", 4))
1276 rs6000_traceback = traceback_part;
1277 else if (! strncmp (rs6000_traceback_name, "no", 2))
1278 rs6000_traceback = traceback_none;
1280 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1281 rs6000_traceback_name);
1284 if (!rs6000_explicit_options.long_double)
1285 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1287 /* Set Altivec ABI as default for powerpc64 linux. */
1288 if (TARGET_ELF && TARGET_64BIT)
1290 rs6000_altivec_abi = 1;
1291 TARGET_ALTIVEC_VRSAVE = 1;
1294 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1295 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1297 rs6000_darwin64_abi = 1;
1299 darwin_one_byte_bool = 1;
1301 /* Default to natural alignment, for better performance. */
1302 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1305 /* Handle -mtls-size option. */
1306 rs6000_parse_tls_size_option ();
1308 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1309 SUBTARGET_OVERRIDE_OPTIONS;
1311 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1312 SUBSUBTARGET_OVERRIDE_OPTIONS;
1314 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1315 SUB3TARGET_OVERRIDE_OPTIONS;
1321 error ("AltiVec and E500 instructions cannot coexist");
1323 /* The e500 does not have string instructions, and we set
1324 MASK_STRING above when optimizing for size. */
1325 if ((target_flags & MASK_STRING) != 0)
1326 target_flags = target_flags & ~MASK_STRING;
1328 else if (rs6000_select[1].string != NULL)
1330 /* For the powerpc-eabispe configuration, we set all these by
1331 default, so let's unset them if we manually set another
1332 CPU that is not the E500. */
1333 if (!rs6000_explicit_options.abi)
1335 if (!rs6000_explicit_options.spe)
1337 if (!rs6000_explicit_options.float_gprs)
1338 rs6000_float_gprs = 0;
1339 if (!rs6000_explicit_options.isel)
1341 if (!rs6000_explicit_options.long_double)
1342 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1345 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1346 && rs6000_cpu != PROCESSOR_POWER5);
1347 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1348 || rs6000_cpu == PROCESSOR_POWER5);
1350 rs6000_sched_restricted_insns_priority
1351 = (rs6000_sched_groups ? 1 : 0);
1353 /* Handle -msched-costly-dep option. */
1354 rs6000_sched_costly_dep
1355 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1357 if (rs6000_sched_costly_dep_str)
1359 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1360 rs6000_sched_costly_dep = no_dep_costly;
1361 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1362 rs6000_sched_costly_dep = all_deps_costly;
1363 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1364 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1365 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1366 rs6000_sched_costly_dep = store_to_load_dep_costly;
1368 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1371 /* Handle -minsert-sched-nops option. */
1372 rs6000_sched_insert_nops
1373 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1375 if (rs6000_sched_insert_nops_str)
1377 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1378 rs6000_sched_insert_nops = sched_finish_none;
1379 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1380 rs6000_sched_insert_nops = sched_finish_pad_groups;
1381 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1382 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1384 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1387 #ifdef TARGET_REGNAMES
1388 /* If the user desires alternate register names, copy in the
1389 alternate names now. */
1390 if (TARGET_REGNAMES)
1391 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1394 /* Set aix_struct_return last, after the ABI is determined.
1395 If -maix-struct-return or -msvr4-struct-return was explicitly
1396 used, don't override with the ABI default. */
1397 if (!rs6000_explicit_options.aix_struct_ret)
1398 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1400 if (TARGET_LONG_DOUBLE_128
1401 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1402 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1404 /* Allocate an alias set for register saves & restores from stack. */
1405 rs6000_sr_alias_set = new_alias_set ();
1408 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1410 /* We can only guarantee the availability of DI pseudo-ops when
1411 assembling for 64-bit targets. */
1414 targetm.asm_out.aligned_op.di = NULL;
1415 targetm.asm_out.unaligned_op.di = NULL;
1418 /* Set branch target alignment, if not optimizing for size. */
1421 if (rs6000_sched_groups)
1423 if (align_functions <= 0)
1424 align_functions = 16;
1425 if (align_jumps <= 0)
1427 if (align_loops <= 0)
1430 if (align_jumps_max_skip <= 0)
1431 align_jumps_max_skip = 15;
1432 if (align_loops_max_skip <= 0)
1433 align_loops_max_skip = 15;
1436 /* Arrange to save and restore machine status around nested functions. */
1437 init_machine_status = rs6000_init_machine_status;
1439 /* We should always be splitting complex arguments, but we can't break
1440 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1441 if (DEFAULT_ABI != ABI_AIX)
1442 targetm.calls.split_complex_arg = NULL;
1444 /* Initialize rs6000_cost with the appropriate target costs. */
1446 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1450 case PROCESSOR_RIOS1:
1451 rs6000_cost = &rios1_cost;
1454 case PROCESSOR_RIOS2:
1455 rs6000_cost = &rios2_cost;
1458 case PROCESSOR_RS64A:
1459 rs6000_cost = &rs64a_cost;
1462 case PROCESSOR_MPCCORE:
1463 rs6000_cost = &mpccore_cost;
1466 case PROCESSOR_PPC403:
1467 rs6000_cost = &ppc403_cost;
1470 case PROCESSOR_PPC405:
1471 rs6000_cost = &ppc405_cost;
1474 case PROCESSOR_PPC440:
1475 rs6000_cost = &ppc440_cost;
1478 case PROCESSOR_PPC601:
1479 rs6000_cost = &ppc601_cost;
1482 case PROCESSOR_PPC603:
1483 rs6000_cost = &ppc603_cost;
1486 case PROCESSOR_PPC604:
1487 rs6000_cost = &ppc604_cost;
1490 case PROCESSOR_PPC604e:
1491 rs6000_cost = &ppc604e_cost;
1494 case PROCESSOR_PPC620:
1495 rs6000_cost = &ppc620_cost;
1498 case PROCESSOR_PPC630:
1499 rs6000_cost = &ppc630_cost;
1502 case PROCESSOR_PPC750:
1503 case PROCESSOR_PPC7400:
1504 rs6000_cost = &ppc750_cost;
1507 case PROCESSOR_PPC7450:
1508 rs6000_cost = &ppc7450_cost;
1511 case PROCESSOR_PPC8540:
1512 rs6000_cost = &ppc8540_cost;
1515 case PROCESSOR_POWER4:
1516 case PROCESSOR_POWER5:
1517 rs6000_cost = &power4_cost;
1525 /* Implement targetm.vectorize.builtin_mask_for_load. */
1527 rs6000_builtin_mask_for_load (void)
1530 return altivec_builtin_mask_for_load;
1535 /* Handle generic options of the form -mfoo=yes/no.
1536 NAME is the option name.
1537 VALUE is the option value.
1538 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1539 whether the option value is 'yes' or 'no' respectively. */
1541 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1545 else if (!strcmp (value, "yes"))
1547 else if (!strcmp (value, "no"))
1550 error ("unknown -m%s= option specified: '%s'", name, value);
1553 /* Validate and record the size specified with the -mtls-size option. */
1556 rs6000_parse_tls_size_option (void)
1558 if (rs6000_tls_size_string == 0)
1560 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1561 rs6000_tls_size = 16;
1562 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1563 rs6000_tls_size = 32;
1564 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1565 rs6000_tls_size = 64;
1567 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1571 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1573 if (DEFAULT_ABI == ABI_DARWIN)
1574 /* The Darwin libraries never set errno, so we might as well
1575 avoid calling them when that's the only reason we would. */
1576 flag_errno_math = 0;
1579 /* Implement TARGET_HANDLE_OPTION. */
1582 rs6000_handle_option (size_t code, const char *arg, int value)
1587 target_flags &= ~(MASK_POWER | MASK_POWER2
1588 | MASK_MULTIPLE | MASK_STRING);
1589 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1590 | MASK_MULTIPLE | MASK_STRING);
1592 case OPT_mno_powerpc:
1593 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1594 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1595 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1596 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1599 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1600 | MASK_NO_SUM_IN_TOC);
1601 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1602 | MASK_NO_SUM_IN_TOC);
1603 #ifdef TARGET_USES_SYSV4_OPT
1604 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1605 just the same as -mminimal-toc. */
1606 target_flags |= MASK_MINIMAL_TOC;
1607 target_flags_explicit |= MASK_MINIMAL_TOC;
1611 #ifdef TARGET_USES_SYSV4_OPT
1613 /* Make -mtoc behave like -mminimal-toc. */
1614 target_flags |= MASK_MINIMAL_TOC;
1615 target_flags_explicit |= MASK_MINIMAL_TOC;
1619 #ifdef TARGET_USES_AIX64_OPT
1624 target_flags |= MASK_POWERPC64 | MASK_POWERPC | MASK_PPC_GFXOPT;
1625 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC
1629 #ifdef TARGET_USES_AIX64_OPT
1634 target_flags &= ~MASK_POWERPC64;
1635 target_flags_explicit |= MASK_POWERPC64;
1638 case OPT_minsert_sched_nops_:
1639 rs6000_sched_insert_nops_str = arg;
1642 case OPT_mminimal_toc:
1645 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1646 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1653 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1654 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1661 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1662 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1666 case OPT_mpowerpc_gpopt:
1667 case OPT_mpowerpc_gfxopt:
1670 target_flags |= MASK_POWERPC;
1671 target_flags_explicit |= MASK_POWERPC;
1675 case OPT_maix_struct_return:
1676 case OPT_msvr4_struct_return:
1677 rs6000_explicit_options.aix_struct_ret = true;
1681 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1685 rs6000_explicit_options.isel = true;
1686 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1690 rs6000_explicit_options.spe = true;
1691 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1692 /* No SPE means 64-bit long doubles, even if an E500. */
1694 rs6000_long_double_type_size = 64;
1698 rs6000_debug_name = arg;
1701 #ifdef TARGET_USES_SYSV4_OPT
1703 rs6000_abi_name = arg;
1707 rs6000_sdata_name = arg;
1710 case OPT_mtls_size_:
1711 rs6000_tls_size_string = arg;
1714 case OPT_mrelocatable:
1717 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1718 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1722 case OPT_mrelocatable_lib:
1725 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1726 | MASK_NO_FP_IN_TOC;
1727 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1728 | MASK_NO_FP_IN_TOC;
1732 target_flags &= ~MASK_RELOCATABLE;
1733 target_flags_explicit |= MASK_RELOCATABLE;
1739 rs6000_explicit_options.abi = true;
1740 if (!strcmp (arg, "altivec"))
1742 rs6000_altivec_abi = 1;
1745 else if (! strcmp (arg, "no-altivec"))
1746 rs6000_altivec_abi = 0;
1747 else if (! strcmp (arg, "spe"))
1750 rs6000_altivec_abi = 0;
1751 if (!TARGET_SPE_ABI)
1752 error ("not configured for ABI: '%s'", arg);
1754 else if (! strcmp (arg, "no-spe"))
1757 /* These are here for testing during development only, do not
1758 document in the manual please. */
1759 else if (! strcmp (arg, "d64"))
1761 rs6000_darwin64_abi = 1;
1762 warning (0, "Using darwin64 ABI");
1764 else if (! strcmp (arg, "d32"))
1766 rs6000_darwin64_abi = 0;
1767 warning (0, "Using old darwin ABI");
1772 error ("unknown ABI specified: '%s'", arg);
1778 rs6000_select[1].string = arg;
1782 rs6000_select[2].string = arg;
1785 case OPT_mtraceback_:
1786 rs6000_traceback_name = arg;
1789 case OPT_mfloat_gprs_:
1790 rs6000_explicit_options.float_gprs = true;
1791 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1792 rs6000_float_gprs = 1;
1793 else if (! strcmp (arg, "double"))
1794 rs6000_float_gprs = 2;
1795 else if (! strcmp (arg, "no"))
1796 rs6000_float_gprs = 0;
1799 error ("invalid option for -mfloat-gprs: '%s'", arg);
1804 case OPT_mlong_double_:
1805 rs6000_explicit_options.long_double = true;
1806 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1807 if (value != 64 && value != 128)
1809 error ("Unknown switch -mlong-double-%s", arg);
1810 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1814 rs6000_long_double_type_size = value;
1817 case OPT_msched_costly_dep_:
1818 rs6000_sched_costly_dep_str = arg;
1822 rs6000_explicit_options.alignment = true;
1823 if (! strcmp (arg, "power"))
1825 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1826 some C library functions, so warn about it. The flag may be
1827 useful for performance studies from time to time though, so
1828 don't disable it entirely. */
1829 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1830 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1831 " it is incompatible with the installed C and C++ libraries");
1832 rs6000_alignment_flags = MASK_ALIGN_POWER;
1834 else if (! strcmp (arg, "natural"))
1835 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1838 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1846 /* Do anything needed at the start of the asm file. */
1849 rs6000_file_start (void)
1853 const char *start = buffer;
1854 struct rs6000_cpu_select *ptr;
1855 const char *default_cpu = TARGET_CPU_DEFAULT;
1856 FILE *file = asm_out_file;
1858 default_file_start ();
1860 #ifdef TARGET_BI_ARCH
1861 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1865 if (flag_verbose_asm)
1867 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1868 rs6000_select[0].string = default_cpu;
1870 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1872 ptr = &rs6000_select[i];
1873 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1875 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1880 if (PPC405_ERRATUM77)
1882 fprintf (file, "%s PPC405CR_ERRATUM77", start);
1886 #ifdef USING_ELFOS_H
1887 switch (rs6000_sdata)
1889 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1890 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1891 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1892 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1895 if (rs6000_sdata && g_switch_value)
1897 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1907 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1915 /* Return nonzero if this function is known to have a null epilogue. */
1918 direct_return (void)
1920 if (reload_completed)
1922 rs6000_stack_t *info = rs6000_stack_info ();
1924 if (info->first_gp_reg_save == 32
1925 && info->first_fp_reg_save == 64
1926 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1927 && ! info->lr_save_p
1928 && ! info->cr_save_p
1929 && info->vrsave_mask == 0
1937 /* Return the number of instructions it takes to form a constant in an
1938 integer register. */
1941 num_insns_constant_wide (HOST_WIDE_INT value)
1943 /* signed constant loadable with {cal|addi} */
1944 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1947 /* constant loadable with {cau|addis} */
1948 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1951 #if HOST_BITS_PER_WIDE_INT == 64
1952 else if (TARGET_POWERPC64)
1954 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1955 HOST_WIDE_INT high = value >> 31;
1957 if (high == 0 || high == -1)
1963 return num_insns_constant_wide (high) + 1;
1965 return (num_insns_constant_wide (high)
1966 + num_insns_constant_wide (low) + 1);
1975 num_insns_constant (rtx op, enum machine_mode mode)
1977 HOST_WIDE_INT low, high;
1979 switch (GET_CODE (op))
1982 #if HOST_BITS_PER_WIDE_INT == 64
1983 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1984 && mask_operand (op, mode))
1988 return num_insns_constant_wide (INTVAL (op));
1996 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1997 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1998 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2001 if (mode == VOIDmode || mode == DImode)
2003 high = CONST_DOUBLE_HIGH (op);
2004 low = CONST_DOUBLE_LOW (op);
2011 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2012 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2013 high = l[WORDS_BIG_ENDIAN == 0];
2014 low = l[WORDS_BIG_ENDIAN != 0];
2018 return (num_insns_constant_wide (low)
2019 + num_insns_constant_wide (high));
2022 if ((high == 0 && low >= 0)
2023 || (high == -1 && low < 0))
2024 return num_insns_constant_wide (low);
2026 else if (mask_operand (op, mode))
2030 return num_insns_constant_wide (high) + 1;
2033 return (num_insns_constant_wide (high)
2034 + num_insns_constant_wide (low) + 1);
2042 /* Returns the constant for the splat instruction, if exists. */
2045 easy_vector_splat_const (int cst, enum machine_mode mode)
2050 if (EASY_VECTOR_15 (cst)
2051 || EASY_VECTOR_15_ADD_SELF (cst))
2053 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2059 if (EASY_VECTOR_15 (cst)
2060 || EASY_VECTOR_15_ADD_SELF (cst))
2062 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2068 if (EASY_VECTOR_15 (cst)
2069 || EASY_VECTOR_15_ADD_SELF (cst))
2077 /* Return nonzero if all elements of a vector have the same value. */
2080 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2084 units = CONST_VECTOR_NUNITS (op);
2086 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2087 for (i = 1; i < units; ++i)
2088 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2090 if (i == units && easy_vector_splat_const (cst, mode))
2095 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2098 gen_easy_vector_constant_add_self (rtx op)
2102 units = GET_MODE_NUNITS (GET_MODE (op));
2103 v = rtvec_alloc (units);
2105 for (i = 0; i < units; i++)
2107 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2108 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2112 output_vec_const_move (rtx *operands)
2115 enum machine_mode mode;
2121 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2122 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2123 mode = GET_MODE (dest);
2127 if (zero_constant (vec, mode))
2128 return "vxor %0,%0,%0";
2130 gcc_assert (easy_vector_constant (vec, mode));
2132 operands[1] = GEN_INT (cst);
2136 if (EASY_VECTOR_15 (cst))
2138 operands[1] = GEN_INT (cst);
2139 return "vspltisw %0,%1";
2141 else if (EASY_VECTOR_15_ADD_SELF (cst))
2147 if (EASY_VECTOR_15 (cst))
2149 operands[1] = GEN_INT (cst);
2150 return "vspltish %0,%1";
2152 else if (EASY_VECTOR_15_ADD_SELF (cst))
2158 if (EASY_VECTOR_15 (cst))
2160 operands[1] = GEN_INT (cst);
2161 return "vspltisb %0,%1";
2163 else if (EASY_VECTOR_15_ADD_SELF (cst))
2171 gcc_assert (TARGET_SPE);
2173 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2174 pattern of V1DI, V4HI, and V2SF.
2176 FIXME: We should probably return # and add post reload
2177 splitters for these, but this way is so easy ;-). */
2178 operands[1] = GEN_INT (cst);
2179 operands[2] = GEN_INT (cst2);
2181 return "li %0,%1\n\tevmergelo %0,%0,%0";
2183 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2186 /* Initialize vector TARGET to VALS. */
2189 rs6000_expand_vector_init (rtx target, rtx vals)
2191 enum machine_mode mode = GET_MODE (target);
2192 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2193 int n_elts = GET_MODE_NUNITS (mode);
2194 int n_var = 0, one_var = -1;
2195 bool all_same = true, all_const_zero = true;
2199 for (i = 0; i < n_elts; ++i)
2201 x = XVECEXP (vals, 0, i);
2202 if (!CONSTANT_P (x))
2203 ++n_var, one_var = i;
2204 else if (x != CONST0_RTX (inner_mode))
2205 all_const_zero = false;
2207 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2213 if (mode != V4SFmode && all_const_zero)
2215 /* Zero register. */
2216 emit_insn (gen_rtx_SET (VOIDmode, target,
2217 gen_rtx_XOR (mode, target, target)));
2220 else if (mode != V4SFmode && easy_vector_same (vals, mode))
2222 /* Splat immediate. */
2223 x = gen_rtx_VEC_DUPLICATE (mode, CONST_VECTOR_ELT (vals, 0));
2224 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2228 ; /* Splat vector element. */
2231 /* Load from constant pool. */
2232 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2237 /* Store value to stack temp. Load vector element. Splat. */
2240 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2241 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2242 XVECEXP (vals, 0, 0));
2243 x = gen_rtx_UNSPEC (VOIDmode,
2244 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2245 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2247 gen_rtx_SET (VOIDmode,
2250 x = gen_rtx_VEC_SELECT (inner_mode, target,
2251 gen_rtx_PARALLEL (VOIDmode,
2252 gen_rtvec (1, const0_rtx)));
2253 emit_insn (gen_rtx_SET (VOIDmode, target,
2254 gen_rtx_VEC_DUPLICATE (mode, x)));
2258 /* One field is non-constant. Load constant then overwrite
2262 rtx copy = copy_rtx (vals);
2264 /* Load constant part of vector, substititute neighboring value for
2266 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2267 rs6000_expand_vector_init (target, copy);
2269 /* Insert variable. */
2270 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2274 /* Construct the vector in memory one field at a time
2275 and load the whole vector. */
2276 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2277 for (i = 0; i < n_elts; i++)
2278 emit_move_insn (adjust_address_nv (mem, inner_mode,
2279 i * GET_MODE_SIZE (inner_mode)),
2280 XVECEXP (vals, 0, i));
2281 emit_move_insn (target, mem);
2284 /* Set field ELT of TARGET to VAL. */
2287 rs6000_expand_vector_set (rtx target, rtx val, int elt)
2289 enum machine_mode mode = GET_MODE (target);
2290 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2291 rtx reg = gen_reg_rtx (mode);
2293 int width = GET_MODE_SIZE (inner_mode);
2296 /* Load single variable value. */
2297 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2298 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2299 x = gen_rtx_UNSPEC (VOIDmode,
2300 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2301 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2303 gen_rtx_SET (VOIDmode,
2307 /* Linear sequence. */
2308 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2309 for (i = 0; i < 16; ++i)
2310 XVECEXP (mask, 0, i) = GEN_INT (i);
2312 /* Set permute mask to insert element into target. */
2313 for (i = 0; i < width; ++i)
2314 XVECEXP (mask, 0, elt*width + i)
2315 = GEN_INT (i + 0x10);
2316 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2317 x = gen_rtx_UNSPEC (mode,
2318 gen_rtvec (3, target, reg,
2319 force_reg (V16QImode, x)),
2321 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2324 /* Extract field ELT from VEC into TARGET. */
2327 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2329 enum machine_mode mode = GET_MODE (vec);
2330 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2333 /* Allocate mode-sized buffer. */
2334 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2336 /* Add offset to field within buffer matching vector element. */
2337 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2339 /* Store single field into mode-sized buffer. */
2340 x = gen_rtx_UNSPEC (VOIDmode,
2341 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2342 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2344 gen_rtx_SET (VOIDmode,
2347 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2351 mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
2354 if (GET_CODE (op) == CONST_INT)
2356 HOST_WIDE_INT c, lsb;
2361 /* Disallow all zeros. */
2365 /* We can use a single rlwinm insn if no upper bits of C are set
2366 AND there are zero, one or two transitions in the _whole_ of
2368 one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
2370 /* We don't change the number of transitions by inverting,
2371 so make sure we start with the LS bit zero. */
2375 /* Find the first transition. */
2378 /* Invert to look for a second transition. */
2381 /* Erase first transition. */
2384 /* Find the second transition. */
2387 /* Invert to look for a third transition. */
2390 /* Erase second transition. */
2393 if (one_ok && !(allow_one || c))
2396 /* Find the third transition (if any). */
2399 /* Match if all the bits above are 1's (or c is zero). */
2405 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2406 implement ANDing by the mask IN. */
2408 build_mask64_2_operands (rtx in, rtx *out)
2410 #if HOST_BITS_PER_WIDE_INT >= 64
2411 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2414 gcc_assert (GET_CODE (in) == CONST_INT);
2419 /* Assume c initially something like 0x00fff000000fffff. The idea
2420 is to rotate the word so that the middle ^^^^^^ group of zeros
2421 is at the MS end and can be cleared with an rldicl mask. We then
2422 rotate back and clear off the MS ^^ group of zeros with a
2424 c = ~c; /* c == 0xff000ffffff00000 */
2425 lsb = c & -c; /* lsb == 0x0000000000100000 */
2426 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2427 c = ~c; /* c == 0x00fff000000fffff */
2428 c &= -lsb; /* c == 0x00fff00000000000 */
2429 lsb = c & -c; /* lsb == 0x0000100000000000 */
2430 c = ~c; /* c == 0xff000fffffffffff */
2431 c &= -lsb; /* c == 0xff00000000000000 */
2433 while ((lsb >>= 1) != 0)
2434 shift++; /* shift == 44 on exit from loop */
2435 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2436 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2437 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2441 /* Assume c initially something like 0xff000f0000000000. The idea
2442 is to rotate the word so that the ^^^ middle group of zeros
2443 is at the LS end and can be cleared with an rldicr mask. We then
2444 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2446 lsb = c & -c; /* lsb == 0x0000010000000000 */
2447 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2448 c = ~c; /* c == 0x00fff0ffffffffff */
2449 c &= -lsb; /* c == 0x00fff00000000000 */
2450 lsb = c & -c; /* lsb == 0x0000100000000000 */
2451 c = ~c; /* c == 0xff000fffffffffff */
2452 c &= -lsb; /* c == 0xff00000000000000 */
2454 while ((lsb >>= 1) != 0)
2455 shift++; /* shift == 44 on exit from loop */
2456 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2457 m1 >>= shift; /* m1 == 0x0000000000000fff */
2458 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2461 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2462 masks will be all 1's. We are guaranteed more than one transition. */
2463 out[0] = GEN_INT (64 - shift);
2464 out[1] = GEN_INT (m1);
2465 out[2] = GEN_INT (shift);
2466 out[3] = GEN_INT (m2);
2474 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2477 invalid_e500_subreg (rtx op, enum machine_mode mode)
2479 /* Reject (subreg:SI (reg:DF)). */
2480 if (GET_CODE (op) == SUBREG
2482 && REG_P (SUBREG_REG (op))
2483 && GET_MODE (SUBREG_REG (op)) == DFmode)
2486 /* Reject (subreg:DF (reg:DI)). */
2487 if (GET_CODE (op) == SUBREG
2489 && REG_P (SUBREG_REG (op))
2490 && GET_MODE (SUBREG_REG (op)) == DImode)
2496 /* Darwin, AIX increases natural record alignment to doubleword if the first
2497 field is an FP double while the FP fields remain word aligned. */
2500 rs6000_special_round_type_align (tree type, int computed, int specified)
2502 tree field = TYPE_FIELDS (type);
2504 /* Skip all non field decls */
2505 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2506 field = TREE_CHAIN (field);
2508 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2509 return MAX (computed, specified);
2511 return MAX (MAX (computed, specified), 64);
2514 /* Return 1 for an operand in small memory on V.4/eabi. */
2517 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2518 enum machine_mode mode ATTRIBUTE_UNUSED)
2523 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2526 if (DEFAULT_ABI != ABI_V4)
2529 if (GET_CODE (op) == SYMBOL_REF)
2532 else if (GET_CODE (op) != CONST
2533 || GET_CODE (XEXP (op, 0)) != PLUS
2534 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2535 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2540 rtx sum = XEXP (op, 0);
2541 HOST_WIDE_INT summand;
2543 /* We have to be careful here, because it is the referenced address
2544 that must be 32k from _SDA_BASE_, not just the symbol. */
2545 summand = INTVAL (XEXP (sum, 1));
2546 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2549 sym_ref = XEXP (sum, 0);
2552 return SYMBOL_REF_SMALL_P (sym_ref);
2558 /* Return true if either operand is a general purpose register. */
2561 gpr_or_gpr_p (rtx op0, rtx op1)
2563 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2564 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2568 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2571 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2573 switch (GET_CODE (op))
2576 if (RS6000_SYMBOL_REF_TLS_P (op))
2578 else if (CONSTANT_POOL_ADDRESS_P (op))
2580 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2588 else if (! strcmp (XSTR (op, 0), toc_label_name))
2597 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2598 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2600 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2609 constant_pool_expr_p (rtx op)
2613 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2617 toc_relative_expr_p (rtx op)
2621 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2625 legitimate_constant_pool_address_p (rtx x)
2628 && GET_CODE (x) == PLUS
2629 && GET_CODE (XEXP (x, 0)) == REG
2630 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2631 && constant_pool_expr_p (XEXP (x, 1)));
2635 legitimate_small_data_p (enum machine_mode mode, rtx x)
2637 return (DEFAULT_ABI == ABI_V4
2638 && !flag_pic && !TARGET_TOC
2639 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2640 && small_data_operand (x, mode));
2643 /* SPE offset addressing is limited to 5-bits worth of double words. */
2644 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2647 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2649 unsigned HOST_WIDE_INT offset, extra;
2651 if (GET_CODE (x) != PLUS)
2653 if (GET_CODE (XEXP (x, 0)) != REG)
2655 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2657 if (legitimate_constant_pool_address_p (x))
2659 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2662 offset = INTVAL (XEXP (x, 1));
2670 /* AltiVec vector modes. Only reg+reg addressing is valid and
2671 constant offset zero should not occur due to canonicalization.
2672 Allow any offset when not strict before reload. */
2679 /* SPE vector modes. */
2680 return SPE_CONST_OFFSET_OK (offset);
2683 if (TARGET_E500_DOUBLE)
2684 return SPE_CONST_OFFSET_OK (offset);
2687 /* On e500v2, we may have:
2689 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2691 Which gets addressed with evldd instructions. */
2692 if (TARGET_E500_DOUBLE)
2693 return SPE_CONST_OFFSET_OK (offset);
2695 if (mode == DFmode || !TARGET_POWERPC64)
2697 else if (offset & 3)
2703 if (mode == TFmode || !TARGET_POWERPC64)
2705 else if (offset & 3)
2716 return (offset < 0x10000) && (offset + extra < 0x10000);
2720 legitimate_indexed_address_p (rtx x, int strict)
2724 if (GET_CODE (x) != PLUS)
2730 if (!REG_P (op0) || !REG_P (op1))
2733 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2734 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2735 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2736 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2740 legitimate_indirect_address_p (rtx x, int strict)
2742 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2746 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2748 if (!TARGET_MACHO || !flag_pic
2749 || mode != SImode || GET_CODE (x) != MEM)
2753 if (GET_CODE (x) != LO_SUM)
2755 if (GET_CODE (XEXP (x, 0)) != REG)
2757 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2761 return CONSTANT_P (x);
2765 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2767 if (GET_CODE (x) != LO_SUM)
2769 if (GET_CODE (XEXP (x, 0)) != REG)
2771 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2773 /* Restrict addressing for DI because of our SUBREG hackery. */
2774 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2778 if (TARGET_ELF || TARGET_MACHO)
2780 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2784 if (GET_MODE_NUNITS (mode) != 1)
2786 if (GET_MODE_BITSIZE (mode) > 64
2787 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2788 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2791 return CONSTANT_P (x);
2798 /* Try machine-dependent ways of modifying an illegitimate address
2799 to be legitimate. If we find one, return the new, valid address.
2800 This is used from only one place: `memory_address' in explow.c.
2802 OLDX is the address as it was before break_out_memory_refs was
2803 called. In some cases it is useful to look at this to decide what
2806 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2808 It is always safe for this function to do nothing. It exists to
2809 recognize opportunities to optimize the output.
2811 On RS/6000, first check for the sum of a register with a constant
2812 integer that is out of range. If so, generate code to add the
2813 constant with the low-order 16 bits masked to the register and force
2814 this result into another register (this can be done with `cau').
2815 Then generate an address of REG+(CONST&0xffff), allowing for the
2816 possibility of bit 16 being a one.
2818 Then check for the sum of a register and something not constant, try to
2819 load the other things into a register and return the sum. */
2822 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2823 enum machine_mode mode)
2825 if (GET_CODE (x) == SYMBOL_REF)
2827 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2829 return rs6000_legitimize_tls_address (x, model);
2832 if (GET_CODE (x) == PLUS
2833 && GET_CODE (XEXP (x, 0)) == REG
2834 && GET_CODE (XEXP (x, 1)) == CONST_INT
2835 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2837 HOST_WIDE_INT high_int, low_int;
2839 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2840 high_int = INTVAL (XEXP (x, 1)) - low_int;
2841 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2842 GEN_INT (high_int)), 0);
2843 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2845 else if (GET_CODE (x) == PLUS
2846 && GET_CODE (XEXP (x, 0)) == REG
2847 && GET_CODE (XEXP (x, 1)) != CONST_INT
2848 && GET_MODE_NUNITS (mode) == 1
2849 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2851 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2853 && (TARGET_POWERPC64 || mode != DImode)
2856 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2857 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2859 else if (ALTIVEC_VECTOR_MODE (mode))
2863 /* Make sure both operands are registers. */
2864 if (GET_CODE (x) == PLUS)
2865 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2866 force_reg (Pmode, XEXP (x, 1)));
2868 reg = force_reg (Pmode, x);
2871 else if (SPE_VECTOR_MODE (mode)
2872 || (TARGET_E500_DOUBLE && (mode == DFmode
2873 || mode == DImode)))
2877 /* We accept [reg + reg] and [reg + OFFSET]. */
2879 if (GET_CODE (x) == PLUS)
2881 rtx op1 = XEXP (x, 0);
2882 rtx op2 = XEXP (x, 1);
2884 op1 = force_reg (Pmode, op1);
2886 if (GET_CODE (op2) != REG
2887 && (GET_CODE (op2) != CONST_INT
2888 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2889 op2 = force_reg (Pmode, op2);
2891 return gen_rtx_PLUS (Pmode, op1, op2);
2894 return force_reg (Pmode, x);
2900 && GET_CODE (x) != CONST_INT
2901 && GET_CODE (x) != CONST_DOUBLE
2903 && GET_MODE_NUNITS (mode) == 1
2904 && (GET_MODE_BITSIZE (mode) <= 32
2905 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2907 rtx reg = gen_reg_rtx (Pmode);
2908 emit_insn (gen_elf_high (reg, x));
2909 return gen_rtx_LO_SUM (Pmode, reg, x);
2911 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2914 && ! MACHO_DYNAMIC_NO_PIC_P
2916 && GET_CODE (x) != CONST_INT
2917 && GET_CODE (x) != CONST_DOUBLE
2919 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2923 rtx reg = gen_reg_rtx (Pmode);
2924 emit_insn (gen_macho_high (reg, x));
2925 return gen_rtx_LO_SUM (Pmode, reg, x);
2928 && constant_pool_expr_p (x)
2929 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2931 return create_TOC_reference (x);
2937 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
2938 We need to emit DTP-relative relocations. */
2941 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2946 fputs ("\t.long\t", file);
2949 fputs (DOUBLE_INT_ASM_OP, file);
2954 output_addr_const (file, x);
2955 fputs ("@dtprel+0x8000", file);
2958 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2960 static GTY(()) rtx rs6000_tls_symbol;
2962 rs6000_tls_get_addr (void)
2964 if (!rs6000_tls_symbol)
2965 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2967 return rs6000_tls_symbol;
2970 /* Construct the SYMBOL_REF for TLS GOT references. */
2972 static GTY(()) rtx rs6000_got_symbol;
2974 rs6000_got_sym (void)
2976 if (!rs6000_got_symbol)
2978 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2979 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2980 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2983 return rs6000_got_symbol;
2986 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2987 this (thread-local) address. */
2990 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2994 dest = gen_reg_rtx (Pmode);
2995 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3001 tlsreg = gen_rtx_REG (Pmode, 13);
3002 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3006 tlsreg = gen_rtx_REG (Pmode, 2);
3007 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3011 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3015 tmp = gen_reg_rtx (Pmode);
3018 tlsreg = gen_rtx_REG (Pmode, 13);
3019 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3023 tlsreg = gen_rtx_REG (Pmode, 2);
3024 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3028 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3030 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3035 rtx r3, got, tga, tmp1, tmp2, eqv;
3038 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3042 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3045 rtx gsym = rs6000_got_sym ();
3046 got = gen_reg_rtx (Pmode);
3048 rs6000_emit_move (got, gsym, Pmode);
3051 rtx tempLR, tmp3, mem;
3054 tempLR = gen_reg_rtx (Pmode);
3055 tmp1 = gen_reg_rtx (Pmode);
3056 tmp2 = gen_reg_rtx (Pmode);
3057 tmp3 = gen_reg_rtx (Pmode);
3058 mem = gen_const_mem (Pmode, tmp1);
3060 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3061 emit_move_insn (tmp1, tempLR);
3062 emit_move_insn (tmp2, mem);
3063 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3064 last = emit_move_insn (got, tmp3);
3065 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3067 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3069 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3075 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3077 r3 = gen_rtx_REG (Pmode, 3);
3079 insn = gen_tls_gd_64 (r3, got, addr);
3081 insn = gen_tls_gd_32 (r3, got, addr);
3084 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3085 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3086 insn = emit_call_insn (insn);
3087 CONST_OR_PURE_CALL_P (insn) = 1;
3088 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3089 insn = get_insns ();
3091 emit_libcall_block (insn, dest, r3, addr);
3093 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3095 r3 = gen_rtx_REG (Pmode, 3);
3097 insn = gen_tls_ld_64 (r3, got);
3099 insn = gen_tls_ld_32 (r3, got);
3102 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3103 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3104 insn = emit_call_insn (insn);
3105 CONST_OR_PURE_CALL_P (insn) = 1;
3106 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3107 insn = get_insns ();
3109 tmp1 = gen_reg_rtx (Pmode);
3110 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3112 emit_libcall_block (insn, tmp1, r3, eqv);
3113 if (rs6000_tls_size == 16)
3116 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3118 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3120 else if (rs6000_tls_size == 32)
3122 tmp2 = gen_reg_rtx (Pmode);
3124 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3126 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3129 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3131 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3135 tmp2 = gen_reg_rtx (Pmode);
3137 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3139 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3141 insn = gen_rtx_SET (Pmode, dest,
3142 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3148 /* IE, or 64 bit offset LE. */
3149 tmp2 = gen_reg_rtx (Pmode);
3151 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3153 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3156 insn = gen_tls_tls_64 (dest, tmp2, addr);
3158 insn = gen_tls_tls_32 (dest, tmp2, addr);
3166 /* Return 1 if X contains a thread-local symbol. */
3169 rs6000_tls_referenced_p (rtx x)
3171 if (! TARGET_HAVE_TLS)
3174 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3177 /* Return 1 if *X is a thread-local symbol. This is the same as
3178 rs6000_tls_symbol_ref except for the type of the unused argument. */
3181 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3183 return RS6000_SYMBOL_REF_TLS_P (*x);
3186 /* The convention appears to be to define this wherever it is used.
3187 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3188 is now used here. */
3189 #ifndef REG_MODE_OK_FOR_BASE_P
3190 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3193 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3194 replace the input X, or the original X if no replacement is called for.
3195 The output parameter *WIN is 1 if the calling macro should goto WIN,
3198 For RS/6000, we wish to handle large displacements off a base
3199 register by splitting the addend across an addiu/addis and the mem insn.
3200 This cuts number of extra insns needed from 3 to 1.
3202 On Darwin, we use this to generate code for floating point constants.
3203 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3204 The Darwin code is inside #if TARGET_MACHO because only then is
3205 machopic_function_base_name() defined. */
3207 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3208 int opnum, int type,
3209 int ind_levels ATTRIBUTE_UNUSED, int *win)
3211 /* We must recognize output that we have already generated ourselves. */
3212 if (GET_CODE (x) == PLUS
3213 && GET_CODE (XEXP (x, 0)) == PLUS
3214 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3215 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3216 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3218 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3219 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3220 opnum, (enum reload_type)type);
3226 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3227 && GET_CODE (x) == LO_SUM
3228 && GET_CODE (XEXP (x, 0)) == PLUS
3229 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3230 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3231 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3232 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3233 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3234 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3235 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3237 /* Result of previous invocation of this function on Darwin
3238 floating point constant. */
3239 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3240 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3241 opnum, (enum reload_type)type);
3247 /* Force ld/std non-word aligned offset into base register by wrapping
3249 if (GET_CODE (x) == PLUS
3250 && GET_CODE (XEXP (x, 0)) == REG
3251 && REGNO (XEXP (x, 0)) < 32
3252 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3253 && GET_CODE (XEXP (x, 1)) == CONST_INT
3254 && (INTVAL (XEXP (x, 1)) & 3) != 0
3255 && !ALTIVEC_VECTOR_MODE (mode)
3256 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3257 && TARGET_POWERPC64)
3259 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3260 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3261 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3262 opnum, (enum reload_type) type);
3267 if (GET_CODE (x) == PLUS
3268 && GET_CODE (XEXP (x, 0)) == REG
3269 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3270 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3271 && GET_CODE (XEXP (x, 1)) == CONST_INT
3272 && !SPE_VECTOR_MODE (mode)
3273 && !(TARGET_E500_DOUBLE && (mode == DFmode
3275 && !ALTIVEC_VECTOR_MODE (mode))
3277 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3278 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3280 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3282 /* Check for 32-bit overflow. */
3283 if (high + low != val)
3289 /* Reload the high part into a base reg; leave the low part
3290 in the mem directly. */
3292 x = gen_rtx_PLUS (GET_MODE (x),
3293 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3297 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3298 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3299 opnum, (enum reload_type)type);
3305 if (GET_CODE (x) == SYMBOL_REF
3306 && DEFAULT_ABI == ABI_DARWIN
3307 && !ALTIVEC_VECTOR_MODE (mode)
3308 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3309 /* Don't do this for TFmode, since the result isn't offsettable.
3310 The same goes for DImode without 64-bit gprs. */
3312 && (mode != DImode || TARGET_POWERPC64))
3316 rtx offset = gen_rtx_CONST (Pmode,
3317 gen_rtx_MINUS (Pmode, x,
3318 machopic_function_base_sym ()));
3319 x = gen_rtx_LO_SUM (GET_MODE (x),
3320 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3321 gen_rtx_HIGH (Pmode, offset)), offset);
3324 x = gen_rtx_LO_SUM (GET_MODE (x),
3325 gen_rtx_HIGH (Pmode, x), x);
3327 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3328 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3329 opnum, (enum reload_type)type);
3336 && constant_pool_expr_p (x)
3337 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3339 (x) = create_TOC_reference (x);
3347 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3348 that is a valid memory address for an instruction.
3349 The MODE argument is the machine mode for the MEM expression
3350 that wants to use this address.
3352 On the RS/6000, there are four valid address: a SYMBOL_REF that
3353 refers to a constant pool entry of an address (or the sum of it
3354 plus a constant), a short (16-bit signed) constant plus a register,
3355 the sum of two registers, or a register indirect, possibly with an
3356 auto-increment. For DFmode and DImode with a constant plus register,
3357 we must ensure that both words are addressable or PowerPC64 with offset
3360 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3361 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3362 adjacent memory cells are accessed by adding word-sized offsets
3363 during assembly output. */
3365 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3367 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3369 && ALTIVEC_VECTOR_MODE (mode)
3370 && GET_CODE (x) == AND
3371 && GET_CODE (XEXP (x, 1)) == CONST_INT
3372 && INTVAL (XEXP (x, 1)) == -16)
3375 if (RS6000_SYMBOL_REF_TLS_P (x))
3377 if (legitimate_indirect_address_p (x, reg_ok_strict))
3379 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3380 && !ALTIVEC_VECTOR_MODE (mode)
3381 && !SPE_VECTOR_MODE (mode)
3382 /* Restrict addressing for DI because of our SUBREG hackery. */
3383 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3385 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3387 if (legitimate_small_data_p (mode, x))
3389 if (legitimate_constant_pool_address_p (x))
3391 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3393 && GET_CODE (x) == PLUS
3394 && GET_CODE (XEXP (x, 0)) == REG
3395 && (XEXP (x, 0) == virtual_stack_vars_rtx
3396 || XEXP (x, 0) == arg_pointer_rtx)
3397 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3399 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3403 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3405 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3406 && (TARGET_POWERPC64 || mode != DImode)
3407 && legitimate_indexed_address_p (x, reg_ok_strict))
3409 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3414 /* Go to LABEL if ADDR (a legitimate address expression)
3415 has an effect that depends on the machine mode it is used for.
3417 On the RS/6000 this is true of all integral offsets (since AltiVec
3418 modes don't allow them) or is a pre-increment or decrement.
3420 ??? Except that due to conceptual problems in offsettable_address_p
3421 we can't really report the problems of integral offsets. So leave
3422 this assuming that the adjustable offset must be valid for the
3423 sub-words of a TFmode operand, which is what we had before. */
3426 rs6000_mode_dependent_address (rtx addr)
3428 switch (GET_CODE (addr))
3431 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3433 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3434 return val + 12 + 0x8000 >= 0x10000;
3443 return TARGET_UPDATE;
3452 /* Return number of consecutive hard regs needed starting at reg REGNO
3453 to hold something of mode MODE.
3454 This is ordinarily the length in words of a value of mode MODE
3455 but can be less for certain modes in special long registers.
3457 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3458 scalar instructions. The upper 32 bits are only available to the
3461 POWER and PowerPC GPRs hold 32 bits worth;
3462 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3465 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3467 if (FP_REGNO_P (regno))
3468 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3470 if (TARGET_E500_DOUBLE && mode == DFmode)
3473 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3474 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3476 if (ALTIVEC_REGNO_P (regno))
3478 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3480 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3483 /* Change register usage conditional on target flags. */
3485 rs6000_conditional_register_usage (void)
3489 /* Set MQ register fixed (already call_used) if not POWER
3490 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3495 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3497 fixed_regs[13] = call_used_regs[13]
3498 = call_really_used_regs[13] = 1;
3500 /* Conditionally disable FPRs. */
3501 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3502 for (i = 32; i < 64; i++)
3503 fixed_regs[i] = call_used_regs[i]
3504 = call_really_used_regs[i] = 1;
3506 /* The TOC register is not killed across calls in a way that is
3507 visible to the compiler. */
3508 if (DEFAULT_ABI == ABI_AIX)
3509 call_really_used_regs[2] = 0;
3511 if (DEFAULT_ABI == ABI_V4
3512 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3514 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3516 if (DEFAULT_ABI == ABI_V4
3517 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3519 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3520 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3521 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3523 if (DEFAULT_ABI == ABI_DARWIN
3524 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3525 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3526 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3527 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3529 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3530 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3531 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3534 global_regs[VSCR_REGNO] = 1;
3538 global_regs[SPEFSCR_REGNO] = 1;
3539 fixed_regs[FIXED_SCRATCH]
3540 = call_used_regs[FIXED_SCRATCH]
3541 = call_really_used_regs[FIXED_SCRATCH] = 1;
3544 if (! TARGET_ALTIVEC)
3546 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3547 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3548 call_really_used_regs[VRSAVE_REGNO] = 1;
3551 if (TARGET_ALTIVEC_ABI)
3552 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3553 call_used_regs[i] = call_really_used_regs[i] = 1;
3556 /* Try to output insns to set TARGET equal to the constant C if it can
3557 be done in less than N insns. Do all computations in MODE.
3558 Returns the place where the output has been placed if it can be
3559 done and the insns have been emitted. If it would take more than N
3560 insns, zero is returned and no insns and emitted. */
3563 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3564 rtx source, int n ATTRIBUTE_UNUSED)
3566 rtx result, insn, set;
3567 HOST_WIDE_INT c0, c1;
3574 dest = gen_reg_rtx (mode);
3575 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3579 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3581 emit_insn (gen_rtx_SET (VOIDmode, result,
3582 GEN_INT (INTVAL (source)
3583 & (~ (HOST_WIDE_INT) 0xffff))));
3584 emit_insn (gen_rtx_SET (VOIDmode, dest,
3585 gen_rtx_IOR (SImode, result,
3586 GEN_INT (INTVAL (source) & 0xffff))));
3591 switch (GET_CODE (source))
3594 c0 = INTVAL (source);
3599 #if HOST_BITS_PER_WIDE_INT >= 64
3600 c0 = CONST_DOUBLE_LOW (source);
3603 c0 = CONST_DOUBLE_LOW (source);
3604 c1 = CONST_DOUBLE_HIGH (source);
3612 result = rs6000_emit_set_long_const (dest, c0, c1);
3619 insn = get_last_insn ();
3620 set = single_set (insn);
3621 if (! CONSTANT_P (SET_SRC (set)))
3622 set_unique_reg_note (insn, REG_EQUAL, source);
3627 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3628 fall back to a straight forward decomposition. We do this to avoid
3629 exponential run times encountered when looking for longer sequences
3630 with rs6000_emit_set_const. */
3632 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3634 if (!TARGET_POWERPC64)
3636 rtx operand1, operand2;
3638 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3640 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3642 emit_move_insn (operand1, GEN_INT (c1));
3643 emit_move_insn (operand2, GEN_INT (c2));
3647 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3650 ud2 = (c1 & 0xffff0000) >> 16;
3651 #if HOST_BITS_PER_WIDE_INT >= 64
3655 ud4 = (c2 & 0xffff0000) >> 16;
3657 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3658 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3661 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3663 emit_move_insn (dest, GEN_INT (ud1));
3666 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3667 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3670 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3673 emit_move_insn (dest, GEN_INT (ud2 << 16));
3675 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3677 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3678 || (ud4 == 0 && ! (ud3 & 0x8000)))
3681 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3684 emit_move_insn (dest, GEN_INT (ud3 << 16));
3687 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3688 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3690 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3695 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3698 emit_move_insn (dest, GEN_INT (ud4 << 16));
3701 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3703 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3705 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3706 GEN_INT (ud2 << 16)));
3708 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3714 /* Helper for the following. Get rid of [r+r] memory refs
3715 in cases where it won't work (TImode, TFmode). */
3718 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3720 if (GET_CODE (operands[0]) == MEM
3721 && GET_CODE (XEXP (operands[0], 0)) != REG
3722 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3723 && ! reload_in_progress)
3725 = replace_equiv_address (operands[0],
3726 copy_addr_to_reg (XEXP (operands[0], 0)));
3728 if (GET_CODE (operands[1]) == MEM
3729 && GET_CODE (XEXP (operands[1], 0)) != REG
3730 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3731 && ! reload_in_progress)
3733 = replace_equiv_address (operands[1],
3734 copy_addr_to_reg (XEXP (operands[1], 0)));
3737 /* Emit a move from SOURCE to DEST in mode MODE. */
3739 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3743 operands[1] = source;
3745 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3746 if (GET_CODE (operands[1]) == CONST_DOUBLE
3747 && ! FLOAT_MODE_P (mode)
3748 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3750 /* FIXME. This should never happen. */
3751 /* Since it seems that it does, do the safe thing and convert
3753 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3755 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3756 || FLOAT_MODE_P (mode)
3757 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3758 || CONST_DOUBLE_LOW (operands[1]) < 0)
3759 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3760 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3762 /* Check if GCC is setting up a block move that will end up using FP
3763 registers as temporaries. We must make sure this is acceptable. */
3764 if (GET_CODE (operands[0]) == MEM
3765 && GET_CODE (operands[1]) == MEM
3767 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3768 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3769 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3770 ? 32 : MEM_ALIGN (operands[0])))
3771 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3773 : MEM_ALIGN (operands[1]))))
3774 && ! MEM_VOLATILE_P (operands [0])
3775 && ! MEM_VOLATILE_P (operands [1]))
3777 emit_move_insn (adjust_address (operands[0], SImode, 0),
3778 adjust_address (operands[1], SImode, 0));
3779 emit_move_insn (adjust_address (operands[0], SImode, 4),
3780 adjust_address (operands[1], SImode, 4));
3784 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3785 && !gpc_reg_operand (operands[1], mode))
3786 operands[1] = force_reg (mode, operands[1]);
3788 if (mode == SFmode && ! TARGET_POWERPC
3789 && TARGET_HARD_FLOAT && TARGET_FPRS
3790 && GET_CODE (operands[0]) == MEM)
3794 if (reload_in_progress || reload_completed)
3795 regnum = true_regnum (operands[1]);
3796 else if (GET_CODE (operands[1]) == REG)
3797 regnum = REGNO (operands[1]);
3801 /* If operands[1] is a register, on POWER it may have
3802 double-precision data in it, so truncate it to single
3804 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3807 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3808 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3809 operands[1] = newreg;
3813 /* Recognize the case where operand[1] is a reference to thread-local
3814 data and load its address to a register. */
3815 if (rs6000_tls_referenced_p (operands[1]))
3817 enum tls_model model;
3818 rtx tmp = operands[1];
3821 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3823 addend = XEXP (XEXP (tmp, 0), 1);
3824 tmp = XEXP (XEXP (tmp, 0), 0);
3827 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3828 model = SYMBOL_REF_TLS_MODEL (tmp);
3829 gcc_assert (model != 0);
3831 tmp = rs6000_legitimize_tls_address (tmp, model);
3834 tmp = gen_rtx_PLUS (mode, tmp, addend);
3835 tmp = force_operand (tmp, operands[0]);
3840 /* Handle the case where reload calls us with an invalid address. */
3841 if (reload_in_progress && mode == Pmode
3842 && (! general_operand (operands[1], mode)
3843 || ! nonimmediate_operand (operands[0], mode)))
3846 /* 128-bit constant floating-point values on Darwin should really be
3847 loaded as two parts. */
3848 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3849 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3850 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3852 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3853 know how to get a DFmode SUBREG of a TFmode. */
3854 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3855 simplify_gen_subreg (DImode, operands[1], mode, 0),
3857 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3858 GET_MODE_SIZE (DImode)),
3859 simplify_gen_subreg (DImode, operands[1], mode,
3860 GET_MODE_SIZE (DImode)),
3865 /* FIXME: In the long term, this switch statement should go away
3866 and be replaced by a sequence of tests based on things like
3872 if (CONSTANT_P (operands[1])
3873 && GET_CODE (operands[1]) != CONST_INT)
3874 operands[1] = force_const_mem (mode, operands[1]);
3878 rs6000_eliminate_indexed_memrefs (operands);
3883 if (CONSTANT_P (operands[1])
3884 && ! easy_fp_constant (operands[1], mode))
3885 operands[1] = force_const_mem (mode, operands[1]);
3896 if (CONSTANT_P (operands[1])
3897 && !easy_vector_constant (operands[1], mode))
3898 operands[1] = force_const_mem (mode, operands[1]);
3903 /* Use default pattern for address of ELF small data */
3906 && DEFAULT_ABI == ABI_V4
3907 && (GET_CODE (operands[1]) == SYMBOL_REF
3908 || GET_CODE (operands[1]) == CONST)
3909 && small_data_operand (operands[1], mode))
3911 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3915 if (DEFAULT_ABI == ABI_V4
3916 && mode == Pmode && mode == SImode
3917 && flag_pic == 1 && got_operand (operands[1], mode))
3919 emit_insn (gen_movsi_got (operands[0], operands[1]));
3923 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3927 && CONSTANT_P (operands[1])
3928 && GET_CODE (operands[1]) != HIGH
3929 && GET_CODE (operands[1]) != CONST_INT)
3931 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3933 /* If this is a function address on -mcall-aixdesc,
3934 convert it to the address of the descriptor. */
3935 if (DEFAULT_ABI == ABI_AIX
3936 && GET_CODE (operands[1]) == SYMBOL_REF
3937 && XSTR (operands[1], 0)[0] == '.')
3939 const char *name = XSTR (operands[1], 0);
3941 while (*name == '.')
3943 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3944 CONSTANT_POOL_ADDRESS_P (new_ref)
3945 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3946 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3947 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3948 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3949 operands[1] = new_ref;
3952 if (DEFAULT_ABI == ABI_DARWIN)
3955 if (MACHO_DYNAMIC_NO_PIC_P)
3957 /* Take care of any required data indirection. */
3958 operands[1] = rs6000_machopic_legitimize_pic_address (
3959 operands[1], mode, operands[0]);
3960 if (operands[0] != operands[1])
3961 emit_insn (gen_rtx_SET (VOIDmode,
3962 operands[0], operands[1]));
3966 emit_insn (gen_macho_high (target, operands[1]));
3967 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3971 emit_insn (gen_elf_high (target, operands[1]));
3972 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3976 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3977 and we have put it in the TOC, we just need to make a TOC-relative
3980 && GET_CODE (operands[1]) == SYMBOL_REF
3981 && constant_pool_expr_p (operands[1])
3982 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3983 get_pool_mode (operands[1])))
3985 operands[1] = create_TOC_reference (operands[1]);
3987 else if (mode == Pmode
3988 && CONSTANT_P (operands[1])
3989 && ((GET_CODE (operands[1]) != CONST_INT
3990 && ! easy_fp_constant (operands[1], mode))
3991 || (GET_CODE (operands[1]) == CONST_INT
3992 && num_insns_constant (operands[1], mode) > 2)
3993 || (GET_CODE (operands[0]) == REG
3994 && FP_REGNO_P (REGNO (operands[0]))))
3995 && GET_CODE (operands[1]) != HIGH
3996 && ! legitimate_constant_pool_address_p (operands[1])
3997 && ! toc_relative_expr_p (operands[1]))
3999 /* Emit a USE operation so that the constant isn't deleted if
4000 expensive optimizations are turned on because nobody
4001 references it. This should only be done for operands that
4002 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4003 This should not be done for operands that contain LABEL_REFs.
4004 For now, we just handle the obvious case. */
4005 if (GET_CODE (operands[1]) != LABEL_REF)
4006 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4009 /* Darwin uses a special PIC legitimizer. */
4010 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4013 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4015 if (operands[0] != operands[1])
4016 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4021 /* If we are to limit the number of things we put in the TOC and
4022 this is a symbol plus a constant we can add in one insn,
4023 just put the symbol in the TOC and add the constant. Don't do
4024 this if reload is in progress. */
4025 if (GET_CODE (operands[1]) == CONST
4026 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4027 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4028 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4029 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4030 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4031 && ! side_effects_p (operands[0]))
4034 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4035 rtx other = XEXP (XEXP (operands[1], 0), 1);
4037 sym = force_reg (mode, sym);
4039 emit_insn (gen_addsi3 (operands[0], sym, other));
4041 emit_insn (gen_adddi3 (operands[0], sym, other));
4045 operands[1] = force_const_mem (mode, operands[1]);
4048 && constant_pool_expr_p (XEXP (operands[1], 0))
4049 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4050 get_pool_constant (XEXP (operands[1], 0)),
4051 get_pool_mode (XEXP (operands[1], 0))))
4054 = gen_const_mem (mode,
4055 create_TOC_reference (XEXP (operands[1], 0)));
4056 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4062 rs6000_eliminate_indexed_memrefs (operands);
4066 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4068 gen_rtx_SET (VOIDmode,
4069 operands[0], operands[1]),
4070 gen_rtx_CLOBBER (VOIDmode,
4071 gen_rtx_SCRATCH (SImode)))));
4080 /* Above, we may have called force_const_mem which may have returned
4081 an invalid address. If we can, fix this up; otherwise, reload will
4082 have to deal with it. */
4083 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4084 operands[1] = validize_mem (operands[1]);
4087 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4090 /* Nonzero if we can use a floating-point register to pass this arg. */
4091 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4092 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4093 && (CUM)->fregno <= FP_ARG_MAX_REG \
4094 && TARGET_HARD_FLOAT && TARGET_FPRS)
4096 /* Nonzero if we can use an AltiVec register to pass this arg. */
4097 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4098 (ALTIVEC_VECTOR_MODE (MODE) \
4099 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4100 && TARGET_ALTIVEC_ABI \
4103 /* Return a nonzero value to say to return the function value in
4104 memory, just as large structures are always returned. TYPE will be
4105 the data type of the value, and FNTYPE will be the type of the
4106 function doing the returning, or @code{NULL} for libcalls.
4108 The AIX ABI for the RS/6000 specifies that all structures are
4109 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4110 specifies that structures <= 8 bytes are returned in r3/r4, but a
4111 draft put them in memory, and GCC used to implement the draft
4112 instead of the final standard. Therefore, aix_struct_return
4113 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4114 compatibility can change DRAFT_V4_STRUCT_RET to override the
4115 default, and -m switches get the final word. See
4116 rs6000_override_options for more details.
4118 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4119 long double support is enabled. These values are returned in memory.
4121 int_size_in_bytes returns -1 for variable size objects, which go in
4122 memory always. The cast to unsigned makes -1 > 8. */
4125 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4127 /* In the darwin64 abi, try to use registers for larger structs
4129 if (rs6000_darwin64_abi
4130 && TREE_CODE (type) == RECORD_TYPE
4131 && int_size_in_bytes (type) > 0)
4133 CUMULATIVE_ARGS valcum;
4137 valcum.fregno = FP_ARG_MIN_REG;
4138 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4139 /* Do a trial code generation as if this were going to be passed
4140 as an argument; if any part goes in memory, we return NULL. */
4141 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4144 /* Otherwise fall through to more conventional ABI rules. */
4147 if (AGGREGATE_TYPE_P (type)
4148 && (aix_struct_return
4149 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4152 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4153 modes only exist for GCC vector types if -maltivec. */
4154 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4155 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4158 /* Return synthetic vectors in memory. */
4159 if (TREE_CODE (type) == VECTOR_TYPE
4160 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4162 static bool warned_for_return_big_vectors = false;
4163 if (!warned_for_return_big_vectors)
4165 warning (0, "GCC vector returned by reference: "
4166 "non-standard ABI extension with no compatibility guarantee");
4167 warned_for_return_big_vectors = true;
4172 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4178 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4179 for a call to a function whose data type is FNTYPE.
4180 For a library call, FNTYPE is 0.
4182 For incoming args we set the number of arguments in the prototype large
4183 so we never return a PARALLEL. */
4186 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4187 rtx libname ATTRIBUTE_UNUSED, int incoming,
4188 int libcall, int n_named_args)
4190 static CUMULATIVE_ARGS zero_cumulative;
4192 *cum = zero_cumulative;
4194 cum->fregno = FP_ARG_MIN_REG;
4195 cum->vregno = ALTIVEC_ARG_MIN_REG;
4196 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4197 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4198 ? CALL_LIBCALL : CALL_NORMAL);
4199 cum->sysv_gregno = GP_ARG_MIN_REG;
4200 cum->stdarg = fntype
4201 && (TYPE_ARG_TYPES (fntype) != 0
4202 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4203 != void_type_node));
4205 cum->nargs_prototype = 0;
4206 if (incoming || cum->prototype)
4207 cum->nargs_prototype = n_named_args;
4209 /* Check for a longcall attribute. */
4210 if ((!fntype && rs6000_default_long_calls)
4212 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4213 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4214 cum->call_cookie |= CALL_LONG;
4216 if (TARGET_DEBUG_ARG)
4218 fprintf (stderr, "\ninit_cumulative_args:");
4221 tree ret_type = TREE_TYPE (fntype);
4222 fprintf (stderr, " ret code = %s,",
4223 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4226 if (cum->call_cookie & CALL_LONG)
4227 fprintf (stderr, " longcall,");
4229 fprintf (stderr, " proto = %d, nargs = %d\n",
4230 cum->prototype, cum->nargs_prototype);
4235 && TARGET_ALTIVEC_ABI
4236 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4238 error ("cannot return value in vector register because"
4239 " altivec instructions are disabled, use -maltivec"
4244 /* Return true if TYPE must be passed on the stack and not in registers. */
4247 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4249 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4250 return must_pass_in_stack_var_size (mode, type);
4252 return must_pass_in_stack_var_size_or_pad (mode, type);
4255 /* If defined, a C expression which determines whether, and in which
4256 direction, to pad out an argument with extra space. The value
4257 should be of type `enum direction': either `upward' to pad above
4258 the argument, `downward' to pad below, or `none' to inhibit
4261 For the AIX ABI structs are always stored left shifted in their
4265 function_arg_padding (enum machine_mode mode, tree type)
4267 #ifndef AGGREGATE_PADDING_FIXED
4268 #define AGGREGATE_PADDING_FIXED 0
4270 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4271 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4274 if (!AGGREGATE_PADDING_FIXED)
4276 /* GCC used to pass structures of the same size as integer types as
4277 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4278 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4279 passed padded downward, except that -mstrict-align further
4280 muddied the water in that multi-component structures of 2 and 4
4281 bytes in size were passed padded upward.
4283 The following arranges for best compatibility with previous
4284 versions of gcc, but removes the -mstrict-align dependency. */
4285 if (BYTES_BIG_ENDIAN)
4287 HOST_WIDE_INT size = 0;
4289 if (mode == BLKmode)
4291 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4292 size = int_size_in_bytes (type);
4295 size = GET_MODE_SIZE (mode);
4297 if (size == 1 || size == 2 || size == 4)
4303 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4305 if (type != 0 && AGGREGATE_TYPE_P (type))
4309 /* Fall back to the default. */
4310 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4313 /* If defined, a C expression that gives the alignment boundary, in bits,
4314 of an argument with the specified mode and type. If it is not defined,
4315 PARM_BOUNDARY is used for all arguments.
4317 V.4 wants long longs to be double word aligned.
4318 Doubleword align SPE vectors.
4319 Quadword align Altivec vectors.
4320 Quadword align large synthetic vector types. */
4323 function_arg_boundary (enum machine_mode mode, tree type)
4325 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4327 else if (SPE_VECTOR_MODE (mode)
4328 || (type && TREE_CODE (type) == VECTOR_TYPE
4329 && int_size_in_bytes (type) >= 8
4330 && int_size_in_bytes (type) < 16))
4332 else if (ALTIVEC_VECTOR_MODE (mode)
4333 || (type && TREE_CODE (type) == VECTOR_TYPE
4334 && int_size_in_bytes (type) >= 16))
4336 else if (rs6000_darwin64_abi && mode == BLKmode
4337 && type && TYPE_ALIGN (type) > 64)
4340 return PARM_BOUNDARY;
4343 /* For a function parm of MODE and TYPE, return the starting word in
4344 the parameter area. NWORDS of the parameter area are already used. */
4347 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4350 unsigned int parm_offset;
4352 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4353 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4354 return nwords + (-(parm_offset + nwords) & align);
4357 /* Compute the size (in words) of a function argument. */
4359 static unsigned long
4360 rs6000_arg_size (enum machine_mode mode, tree type)
4364 if (mode != BLKmode)
4365 size = GET_MODE_SIZE (mode);
4367 size = int_size_in_bytes (type);
4370 return (size + 3) >> 2;
4372 return (size + 7) >> 3;
4375 /* Use this to flush pending int fields. */
4378 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4379 HOST_WIDE_INT bitpos)
4381 unsigned int startbit, endbit;
4382 int intregs, intoffset;
4383 enum machine_mode mode;
4385 if (cum->intoffset == -1)
4388 intoffset = cum->intoffset;
4389 cum->intoffset = -1;
4391 if (intoffset % BITS_PER_WORD != 0)
4393 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4395 if (mode == BLKmode)
4397 /* We couldn't find an appropriate mode, which happens,
4398 e.g., in packed structs when there are 3 bytes to load.
4399 Back intoffset back to the beginning of the word in this
4401 intoffset = intoffset & -BITS_PER_WORD;
4405 startbit = intoffset & -BITS_PER_WORD;
4406 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4407 intregs = (endbit - startbit) / BITS_PER_WORD;
4408 cum->words += intregs;
4411 /* The darwin64 ABI calls for us to recurse down through structs,
4412 looking for elements passed in registers. Unfortunately, we have
4413 to track int register count here also because of misalignments
4414 in powerpc alignment mode. */
4417 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4419 HOST_WIDE_INT startbitpos)
4423 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4424 if (TREE_CODE (f) == FIELD_DECL)
4426 HOST_WIDE_INT bitpos = startbitpos;
4427 tree ftype = TREE_TYPE (f);
4428 enum machine_mode mode = TYPE_MODE (ftype);
4430 if (DECL_SIZE (f) != 0
4431 && host_integerp (bit_position (f), 1))
4432 bitpos += int_bit_position (f);
4434 /* ??? FIXME: else assume zero offset. */
4436 if (TREE_CODE (ftype) == RECORD_TYPE)
4437 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4438 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4440 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4441 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4442 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4444 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4446 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4450 else if (cum->intoffset == -1)
4451 cum->intoffset = bitpos;
4455 /* Update the data in CUM to advance over an argument
4456 of mode MODE and data type TYPE.
4457 (TYPE is null for libcalls where that information may not be available.)
4459 Note that for args passed by reference, function_arg will be called
4460 with MODE and TYPE set to that of the pointer to the arg, not the arg
4464 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4465 tree type, int named, int depth)
4469 /* Only tick off an argument if we're not recursing. */
4471 cum->nargs_prototype--;
4473 if (TARGET_ALTIVEC_ABI
4474 && (ALTIVEC_VECTOR_MODE (mode)
4475 || (type && TREE_CODE (type) == VECTOR_TYPE
4476 && int_size_in_bytes (type) == 16)))
4480 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4483 if (!TARGET_ALTIVEC)
4484 error ("cannot pass argument in vector register because"
4485 " altivec instructions are disabled, use -maltivec"
4488 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4489 even if it is going to be passed in a vector register.
4490 Darwin does the same for variable-argument functions. */
4491 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4492 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4502 /* Vector parameters must be 16-byte aligned. This places
4503 them at 2 mod 4 in terms of words in 32-bit mode, since
4504 the parameter save area starts at offset 24 from the
4505 stack. In 64-bit mode, they just have to start on an
4506 even word, since the parameter save area is 16-byte
4507 aligned. Space for GPRs is reserved even if the argument
4508 will be passed in memory. */
4510 align = (2 - cum->words) & 3;
4512 align = cum->words & 1;
4513 cum->words += align + rs6000_arg_size (mode, type);
4515 if (TARGET_DEBUG_ARG)
4517 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4519 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4520 cum->nargs_prototype, cum->prototype,
4521 GET_MODE_NAME (mode));
4525 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4527 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4530 else if (rs6000_darwin64_abi
4532 && TREE_CODE (type) == RECORD_TYPE
4533 && (size = int_size_in_bytes (type)) > 0)
4535 /* Variable sized types have size == -1 and are
4536 treated as if consisting entirely of ints.
4537 Pad to 16 byte boundary if needed. */
4538 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4539 && (cum->words % 2) != 0)
4541 /* For varargs, we can just go up by the size of the struct. */
4543 cum->words += (size + 7) / 8;
4546 /* It is tempting to say int register count just goes up by
4547 sizeof(type)/8, but this is wrong in a case such as
4548 { int; double; int; } [powerpc alignment]. We have to
4549 grovel through the fields for these too. */
4551 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4552 rs6000_darwin64_record_arg_advance_flush (cum,
4553 size * BITS_PER_UNIT);
4556 else if (DEFAULT_ABI == ABI_V4)
4558 if (TARGET_HARD_FLOAT && TARGET_FPRS
4559 && (mode == SFmode || mode == DFmode))
4561 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4566 cum->words += cum->words & 1;
4567 cum->words += rs6000_arg_size (mode, type);
4572 int n_words = rs6000_arg_size (mode, type);
4573 int gregno = cum->sysv_gregno;
4575 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4576 (r7,r8) or (r9,r10). As does any other 2 word item such
4577 as complex int due to a historical mistake. */
4579 gregno += (1 - gregno) & 1;
4581 /* Multi-reg args are not split between registers and stack. */
4582 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4584 /* Long long and SPE vectors are aligned on the stack.
4585 So are other 2 word items such as complex int due to
4586 a historical mistake. */
4588 cum->words += cum->words & 1;
4589 cum->words += n_words;
4592 /* Note: continuing to accumulate gregno past when we've started
4593 spilling to the stack indicates the fact that we've started
4594 spilling to the stack to expand_builtin_saveregs. */
4595 cum->sysv_gregno = gregno + n_words;
4598 if (TARGET_DEBUG_ARG)
4600 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4601 cum->words, cum->fregno);
4602 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4603 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4604 fprintf (stderr, "mode = %4s, named = %d\n",
4605 GET_MODE_NAME (mode), named);
4610 int n_words = rs6000_arg_size (mode, type);
4611 int start_words = cum->words;
4612 int align_words = rs6000_parm_start (mode, type, start_words);
4614 cum->words = align_words + n_words;
4616 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4617 && TARGET_HARD_FLOAT && TARGET_FPRS)
4618 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4620 if (TARGET_DEBUG_ARG)
4622 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4623 cum->words, cum->fregno);
4624 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4625 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4626 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4627 named, align_words - start_words, depth);
4633 spe_build_register_parallel (enum machine_mode mode, int gregno)
4640 r1 = gen_rtx_REG (DImode, gregno);
4641 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4642 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4645 r1 = gen_rtx_REG (DImode, gregno);
4646 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4647 r3 = gen_rtx_REG (DImode, gregno + 2);
4648 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4649 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4656 /* Determine where to put a SIMD argument on the SPE. */
4658 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4661 int gregno = cum->sysv_gregno;
4663 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4664 are passed and returned in a pair of GPRs for ABI compatibility. */
4665 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4667 int n_words = rs6000_arg_size (mode, type);
4669 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4671 gregno += (1 - gregno) & 1;
4673 /* Multi-reg args are not split between registers and stack. */
4674 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4677 return spe_build_register_parallel (mode, gregno);
4681 int n_words = rs6000_arg_size (mode, type);
4683 /* SPE vectors are put in odd registers. */
4684 if (n_words == 2 && (gregno & 1) == 0)
4687 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4690 enum machine_mode m = SImode;
4692 r1 = gen_rtx_REG (m, gregno);
4693 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4694 r2 = gen_rtx_REG (m, gregno + 1);
4695 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4696 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4703 if (gregno <= GP_ARG_MAX_REG)
4704 return gen_rtx_REG (mode, gregno);
4710 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4711 structure between cum->intoffset and bitpos to integer registers. */
4714 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4715 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4717 enum machine_mode mode;
4719 unsigned int startbit, endbit;
4720 int this_regno, intregs, intoffset;
4723 if (cum->intoffset == -1)
4726 intoffset = cum->intoffset;
4727 cum->intoffset = -1;
4729 /* If this is the trailing part of a word, try to only load that
4730 much into the register. Otherwise load the whole register. Note
4731 that in the latter case we may pick up unwanted bits. It's not a
4732 problem at the moment but may wish to revisit. */
4734 if (intoffset % BITS_PER_WORD != 0)
4736 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4738 if (mode == BLKmode)
4740 /* We couldn't find an appropriate mode, which happens,
4741 e.g., in packed structs when there are 3 bytes to load.
4742 Back intoffset back to the beginning of the word in this
4744 intoffset = intoffset & -BITS_PER_WORD;
4751 startbit = intoffset & -BITS_PER_WORD;
4752 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4753 intregs = (endbit - startbit) / BITS_PER_WORD;
4754 this_regno = cum->words + intoffset / BITS_PER_WORD;
4756 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4759 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4763 intoffset /= BITS_PER_UNIT;
4766 regno = GP_ARG_MIN_REG + this_regno;
4767 reg = gen_rtx_REG (mode, regno);
4769 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4772 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4776 while (intregs > 0);
4779 /* Recursive workhorse for the following. */
4782 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
4783 HOST_WIDE_INT startbitpos, rtx rvec[],
4788 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4789 if (TREE_CODE (f) == FIELD_DECL)
4791 HOST_WIDE_INT bitpos = startbitpos;
4792 tree ftype = TREE_TYPE (f);
4793 enum machine_mode mode = TYPE_MODE (ftype);
4795 if (DECL_SIZE (f) != 0
4796 && host_integerp (bit_position (f), 1))
4797 bitpos += int_bit_position (f);
4799 /* ??? FIXME: else assume zero offset. */
4801 if (TREE_CODE (ftype) == RECORD_TYPE)
4802 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4803 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
4808 case SCmode: mode = SFmode; break;
4809 case DCmode: mode = DFmode; break;
4810 case TCmode: mode = TFmode; break;
4814 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4816 = gen_rtx_EXPR_LIST (VOIDmode,
4817 gen_rtx_REG (mode, cum->fregno++),
4818 GEN_INT (bitpos / BITS_PER_UNIT));
4822 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4824 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4826 = gen_rtx_EXPR_LIST (VOIDmode,
4827 gen_rtx_REG (mode, cum->vregno++),
4828 GEN_INT (bitpos / BITS_PER_UNIT));
4830 else if (cum->intoffset == -1)
4831 cum->intoffset = bitpos;
4835 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4836 the register(s) to be used for each field and subfield of a struct
4837 being passed by value, along with the offset of where the
4838 register's value may be found in the block. FP fields go in FP
4839 register, vector fields go in vector registers, and everything
4840 else goes in int registers, packed as in memory.
4842 This code is also used for function return values. RETVAL indicates
4843 whether this is the case.
4845 Much of this is taken from the SPARC V9 port, which has a similar
4846 calling convention. */
4849 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4850 int named, bool retval)
4852 rtx rvec[FIRST_PSEUDO_REGISTER];
4853 int k = 1, kbase = 1;
4854 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4855 /* This is a copy; modifications are not visible to our caller. */
4856 CUMULATIVE_ARGS copy_cum = *orig_cum;
4857 CUMULATIVE_ARGS *cum = ©_cum;
4859 /* Pad to 16 byte boundary if needed. */
4860 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4861 && (cum->words % 2) != 0)
4868 /* Put entries into rvec[] for individual FP and vector fields, and
4869 for the chunks of memory that go in int regs. Note we start at
4870 element 1; 0 is reserved for an indication of using memory, and
4871 may or may not be filled in below. */
4872 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4873 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4875 /* If any part of the struct went on the stack put all of it there.
4876 This hack is because the generic code for
4877 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4878 parts of the struct are not at the beginning. */
4882 return NULL_RTX; /* doesn't go in registers at all */
4884 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4886 if (k > 1 || cum->use_stack)
4887 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
4892 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4895 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4899 rtx rvec[GP_ARG_NUM_REG + 1];
4901 if (align_words >= GP_ARG_NUM_REG)
4904 n_units = rs6000_arg_size (mode, type);
4906 /* Optimize the simple case where the arg fits in one gpr, except in
4907 the case of BLKmode due to assign_parms assuming that registers are
4908 BITS_PER_WORD wide. */
4910 || (n_units == 1 && mode != BLKmode))
4911 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4914 if (align_words + n_units > GP_ARG_NUM_REG)
4915 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4916 using a magic NULL_RTX component.
4917 FIXME: This is not strictly correct. Only some of the arg
4918 belongs in memory, not all of it. However, there isn't any way
4919 to do this currently, apart from building rtx descriptions for
4920 the pieces of memory we want stored. Due to bugs in the generic
4921 code we can't use the normal function_arg_partial_nregs scheme
4922 with the PARALLEL arg description we emit here.
4923 In any case, the code to store the whole arg to memory is often
4924 more efficient than code to store pieces, and we know that space
4925 is available in the right place for the whole arg. */
4926 /* FIXME: This should be fixed since the conversion to
4927 TARGET_ARG_PARTIAL_BYTES. */
4928 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4933 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4934 rtx off = GEN_INT (i++ * 4);
4935 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4937 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4939 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4942 /* Determine where to put an argument to a function.
4943 Value is zero to push the argument on the stack,
4944 or a hard register in which to store the argument.
4946 MODE is the argument's machine mode.
4947 TYPE is the data type of the argument (as a tree).
4948 This is null for libcalls where that information may
4950 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4951 the preceding args and about the function being called. It is
4952 not modified in this routine.
4953 NAMED is nonzero if this argument is a named parameter
4954 (otherwise it is an extra parameter matching an ellipsis).
4956 On RS/6000 the first eight words of non-FP are normally in registers
4957 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4958 Under V.4, the first 8 FP args are in registers.
4960 If this is floating-point and no prototype is specified, we use
4961 both an FP and integer register (or possibly FP reg and stack). Library
4962 functions (when CALL_LIBCALL is set) always have the proper types for args,
4963 so we can pass the FP value just in one register. emit_library_function
4964 doesn't support PARALLEL anyway.
4966 Note that for args passed by reference, function_arg will be called
4967 with MODE and TYPE set to that of the pointer to the arg, not the arg
4971 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4972 tree type, int named)
4974 enum rs6000_abi abi = DEFAULT_ABI;
4976 /* Return a marker to indicate whether CR1 needs to set or clear the
4977 bit that V.4 uses to say fp args were passed in registers.
4978 Assume that we don't need the marker for software floating point,
4979 or compiler generated library calls. */
4980 if (mode == VOIDmode)
4983 && cum->nargs_prototype < 0
4984 && (cum->call_cookie & CALL_LIBCALL) == 0
4985 && (cum->prototype || TARGET_NO_PROTOTYPE))
4987 /* For the SPE, we need to crxor CR6 always. */
4989 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4990 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4991 return GEN_INT (cum->call_cookie
4992 | ((cum->fregno == FP_ARG_MIN_REG)
4993 ? CALL_V4_SET_FP_ARGS
4994 : CALL_V4_CLEAR_FP_ARGS));
4997 return GEN_INT (cum->call_cookie);
5000 if (rs6000_darwin64_abi && mode == BLKmode
5001 && TREE_CODE (type) == RECORD_TYPE)
5003 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5004 if (rslt != NULL_RTX)
5006 /* Else fall through to usual handling. */
5009 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5010 if (TARGET_64BIT && ! cum->prototype)
5012 /* Vector parameters get passed in vector register
5013 and also in GPRs or memory, in absence of prototype. */
5016 align_words = (cum->words + 1) & ~1;
5018 if (align_words >= GP_ARG_NUM_REG)
5024 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5026 return gen_rtx_PARALLEL (mode,
5028 gen_rtx_EXPR_LIST (VOIDmode,
5030 gen_rtx_EXPR_LIST (VOIDmode,
5031 gen_rtx_REG (mode, cum->vregno),
5035 return gen_rtx_REG (mode, cum->vregno);
5036 else if (TARGET_ALTIVEC_ABI
5037 && (ALTIVEC_VECTOR_MODE (mode)
5038 || (type && TREE_CODE (type) == VECTOR_TYPE
5039 && int_size_in_bytes (type) == 16)))
5041 if (named || abi == ABI_V4)
5045 /* Vector parameters to varargs functions under AIX or Darwin
5046 get passed in memory and possibly also in GPRs. */
5047 int align, align_words, n_words;
5048 enum machine_mode part_mode;
5050 /* Vector parameters must be 16-byte aligned. This places them at
5051 2 mod 4 in terms of words in 32-bit mode, since the parameter
5052 save area starts at offset 24 from the stack. In 64-bit mode,
5053 they just have to start on an even word, since the parameter
5054 save area is 16-byte aligned. */
5056 align = (2 - cum->words) & 3;
5058 align = cum->words & 1;
5059 align_words = cum->words + align;
5061 /* Out of registers? Memory, then. */
5062 if (align_words >= GP_ARG_NUM_REG)
5065 if (TARGET_32BIT && TARGET_POWERPC64)
5066 return rs6000_mixed_function_arg (mode, type, align_words);
5068 /* The vector value goes in GPRs. Only the part of the
5069 value in GPRs is reported here. */
5071 n_words = rs6000_arg_size (mode, type);
5072 if (align_words + n_words > GP_ARG_NUM_REG)
5073 /* Fortunately, there are only two possibilities, the value
5074 is either wholly in GPRs or half in GPRs and half not. */
5077 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5080 else if (TARGET_SPE_ABI && TARGET_SPE
5081 && (SPE_VECTOR_MODE (mode)
5082 || (TARGET_E500_DOUBLE && (mode == DFmode
5083 || mode == DCmode))))
5084 return rs6000_spe_function_arg (cum, mode, type);
5086 else if (abi == ABI_V4)
5088 if (TARGET_HARD_FLOAT && TARGET_FPRS
5089 && (mode == SFmode || mode == DFmode))
5091 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5092 return gen_rtx_REG (mode, cum->fregno);
5098 int n_words = rs6000_arg_size (mode, type);
5099 int gregno = cum->sysv_gregno;
5101 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5102 (r7,r8) or (r9,r10). As does any other 2 word item such
5103 as complex int due to a historical mistake. */
5105 gregno += (1 - gregno) & 1;
5107 /* Multi-reg args are not split between registers and stack. */
5108 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5111 if (TARGET_32BIT && TARGET_POWERPC64)
5112 return rs6000_mixed_function_arg (mode, type,
5113 gregno - GP_ARG_MIN_REG);
5114 return gen_rtx_REG (mode, gregno);
5119 int align_words = rs6000_parm_start (mode, type, cum->words);
5121 if (USE_FP_FOR_ARG_P (cum, mode, type))
5123 rtx rvec[GP_ARG_NUM_REG + 1];
5127 enum machine_mode fmode = mode;
5128 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5130 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5132 /* Currently, we only ever need one reg here because complex
5133 doubles are split. */
5134 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5136 /* Long double split over regs and memory. */
5140 /* Do we also need to pass this arg in the parameter save
5143 && (cum->nargs_prototype <= 0
5144 || (DEFAULT_ABI == ABI_AIX
5146 && align_words >= GP_ARG_NUM_REG)));
5148 if (!needs_psave && mode == fmode)
5149 return gen_rtx_REG (fmode, cum->fregno);
5154 /* Describe the part that goes in gprs or the stack.
5155 This piece must come first, before the fprs. */
5156 if (align_words < GP_ARG_NUM_REG)
5158 unsigned long n_words = rs6000_arg_size (mode, type);
5160 if (align_words + n_words > GP_ARG_NUM_REG
5161 || (TARGET_32BIT && TARGET_POWERPC64))
5163 /* If this is partially on the stack, then we only
5164 include the portion actually in registers here. */
5165 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5168 if (align_words + n_words > GP_ARG_NUM_REG
5169 && (TARGET_32BIT && TARGET_POWERPC64))
5170 /* Not all of the arg fits in gprs. Say that it
5171 goes in memory too, using a magic NULL_RTX
5172 component. Also see comment in
5173 rs6000_mixed_function_arg for why the normal
5174 function_arg_partial_nregs scheme doesn't work
5176 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5180 r = gen_rtx_REG (rmode,
5181 GP_ARG_MIN_REG + align_words);
5182 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5183 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5185 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5189 /* The whole arg fits in gprs. */
5190 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5191 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5195 /* It's entirely in memory. */
5196 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5199 /* Describe where this piece goes in the fprs. */
5200 r = gen_rtx_REG (fmode, cum->fregno);
5201 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5203 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5205 else if (align_words < GP_ARG_NUM_REG)
5207 if (TARGET_32BIT && TARGET_POWERPC64)
5208 return rs6000_mixed_function_arg (mode, type, align_words);
5210 if (mode == BLKmode)
5213 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5220 /* For an arg passed partly in registers and partly in memory, this is
5221 the number of bytes passed in registers. For args passed entirely in
5222 registers or entirely in memory, zero. When an arg is described by a
5223 PARALLEL, perhaps using more than one register type, this function
5224 returns the number of bytes used by the first element of the PARALLEL. */
5227 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5228 tree type, bool named)
5233 if (DEFAULT_ABI == ABI_V4)
5236 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5237 && cum->nargs_prototype >= 0)
5240 /* In this complicated case we just disable the partial_nregs code. */
5241 if (rs6000_darwin64_abi && mode == BLKmode
5242 && TREE_CODE (type) == RECORD_TYPE
5243 && int_size_in_bytes (type) > 0)
5246 align_words = rs6000_parm_start (mode, type, cum->words);
5248 if (USE_FP_FOR_ARG_P (cum, mode, type)
5249 /* If we are passing this arg in the fixed parameter save area
5250 (gprs or memory) as well as fprs, then this function should
5251 return the number of bytes passed in the parameter save area
5252 rather than bytes passed in fprs. */
5254 && (cum->nargs_prototype <= 0
5255 || (DEFAULT_ABI == ABI_AIX
5257 && align_words >= GP_ARG_NUM_REG))))
5259 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5260 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5261 else if (cum->nargs_prototype >= 0)
5265 if (align_words < GP_ARG_NUM_REG
5266 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5267 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5269 if (ret != 0 && TARGET_DEBUG_ARG)
5270 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5275 /* A C expression that indicates when an argument must be passed by
5276 reference. If nonzero for an argument, a copy of that argument is
5277 made in memory and a pointer to the argument is passed instead of
5278 the argument itself. The pointer is passed in whatever way is
5279 appropriate for passing a pointer to that type.
5281 Under V.4, aggregates and long double are passed by reference.
5283 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5284 reference unless the AltiVec vector extension ABI is in force.
5286 As an extension to all ABIs, variable sized types are passed by
5290 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5291 enum machine_mode mode, tree type,
5292 bool named ATTRIBUTE_UNUSED)
5294 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
5296 if (TARGET_DEBUG_ARG)
5297 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5304 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5306 if (TARGET_DEBUG_ARG)
5307 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5311 if (int_size_in_bytes (type) < 0)
5313 if (TARGET_DEBUG_ARG)
5314 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5318 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5319 modes only exist for GCC vector types if -maltivec. */
5320 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5322 if (TARGET_DEBUG_ARG)
5323 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5327 /* Pass synthetic vectors in memory. */
5328 if (TREE_CODE (type) == VECTOR_TYPE
5329 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5331 static bool warned_for_pass_big_vectors = false;
5332 if (TARGET_DEBUG_ARG)
5333 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5334 if (!warned_for_pass_big_vectors)
5336 warning (0, "GCC vector passed by reference: "
5337 "non-standard ABI extension with no compatibility guarantee");
5338 warned_for_pass_big_vectors = true;
5347 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5350 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5355 for (i = 0; i < nregs; i++)
5357 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5358 if (reload_completed)
5360 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5363 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5364 i * GET_MODE_SIZE (reg_mode));
5367 tem = replace_equiv_address (tem, XEXP (tem, 0));
5371 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5375 /* Perform any needed actions needed for a function that is receiving a
5376 variable number of arguments.
5380 MODE and TYPE are the mode and type of the current parameter.
5382 PRETEND_SIZE is a variable that should be set to the amount of stack
5383 that must be pushed by the prolog to pretend that our caller pushed
5386 Normally, this macro will push all remaining incoming registers on the
5387 stack and set PRETEND_SIZE to the length of the registers pushed. */
5390 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5391 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5394 CUMULATIVE_ARGS next_cum;
5395 int reg_size = TARGET_32BIT ? 4 : 8;
5396 rtx save_area = NULL_RTX, mem;
5397 int first_reg_offset, set;
5399 /* Skip the last named argument. */
5401 function_arg_advance (&next_cum, mode, type, 1, 0);
5403 if (DEFAULT_ABI == ABI_V4)
5405 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5409 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5410 HOST_WIDE_INT offset = 0;
5412 /* Try to optimize the size of the varargs save area.
5413 The ABI requires that ap.reg_save_area is doubleword
5414 aligned, but we don't need to allocate space for all
5415 the bytes, only those to which we actually will save
5417 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5418 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5419 if (TARGET_HARD_FLOAT && TARGET_FPRS
5420 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5421 && cfun->va_list_fpr_size)
5424 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5425 * UNITS_PER_FP_WORD;
5426 if (cfun->va_list_fpr_size
5427 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5428 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5430 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5431 * UNITS_PER_FP_WORD;
5435 offset = -((first_reg_offset * reg_size) & ~7);
5436 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5438 gpr_reg_num = cfun->va_list_gpr_size;
5439 if (reg_size == 4 && (first_reg_offset & 1))
5442 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5445 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5447 - (int) (GP_ARG_NUM_REG * reg_size);
5449 if (gpr_size + fpr_size)
5452 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5453 gcc_assert (GET_CODE (reg_save_area) == MEM);
5454 reg_save_area = XEXP (reg_save_area, 0);
5455 if (GET_CODE (reg_save_area) == PLUS)
5457 gcc_assert (XEXP (reg_save_area, 0)
5458 == virtual_stack_vars_rtx);
5459 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5460 offset += INTVAL (XEXP (reg_save_area, 1));
5463 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5466 cfun->machine->varargs_save_offset = offset;
5467 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5472 first_reg_offset = next_cum.words;
5473 save_area = virtual_incoming_args_rtx;
5475 if (targetm.calls.must_pass_in_stack (mode, type))
5476 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5479 set = get_varargs_alias_set ();
5480 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5481 && cfun->va_list_gpr_size)
5483 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5485 if (va_list_gpr_counter_field)
5487 /* V4 va_list_gpr_size counts number of registers needed. */
5488 if (nregs > cfun->va_list_gpr_size)
5489 nregs = cfun->va_list_gpr_size;
5493 /* char * va_list instead counts number of bytes needed. */
5494 if (nregs > cfun->va_list_gpr_size / reg_size)
5495 nregs = cfun->va_list_gpr_size / reg_size;
5498 mem = gen_rtx_MEM (BLKmode,
5499 plus_constant (save_area,
5500 first_reg_offset * reg_size)),
5501 set_mem_alias_set (mem, set);
5502 set_mem_align (mem, BITS_PER_WORD);
5504 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5508 /* Save FP registers if needed. */
5509 if (DEFAULT_ABI == ABI_V4
5510 && TARGET_HARD_FLOAT && TARGET_FPRS
5512 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5513 && cfun->va_list_fpr_size)
5515 int fregno = next_cum.fregno, nregs;
5516 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5517 rtx lab = gen_label_rtx ();
5518 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5519 * UNITS_PER_FP_WORD);
5522 (gen_rtx_SET (VOIDmode,
5524 gen_rtx_IF_THEN_ELSE (VOIDmode,
5525 gen_rtx_NE (VOIDmode, cr1,
5527 gen_rtx_LABEL_REF (VOIDmode, lab),
5531 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5532 fregno++, off += UNITS_PER_FP_WORD, nregs++)
5534 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5535 set_mem_alias_set (mem, set);
5536 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5537 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5544 /* Create the va_list data type. */
5547 rs6000_build_builtin_va_list (void)
5549 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5551 /* For AIX, prefer 'char *' because that's what the system
5552 header files like. */
5553 if (DEFAULT_ABI != ABI_V4)
5554 return build_pointer_type (char_type_node);
5556 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5557 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5559 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5560 unsigned_char_type_node);
5561 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5562 unsigned_char_type_node);
5563 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5565 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5566 short_unsigned_type_node);
5567 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5569 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5572 va_list_gpr_counter_field = f_gpr;
5573 va_list_fpr_counter_field = f_fpr;
5575 DECL_FIELD_CONTEXT (f_gpr) = record;
5576 DECL_FIELD_CONTEXT (f_fpr) = record;
5577 DECL_FIELD_CONTEXT (f_res) = record;
5578 DECL_FIELD_CONTEXT (f_ovf) = record;
5579 DECL_FIELD_CONTEXT (f_sav) = record;
5581 TREE_CHAIN (record) = type_decl;
5582 TYPE_NAME (record) = type_decl;
5583 TYPE_FIELDS (record) = f_gpr;
5584 TREE_CHAIN (f_gpr) = f_fpr;
5585 TREE_CHAIN (f_fpr) = f_res;
5586 TREE_CHAIN (f_res) = f_ovf;
5587 TREE_CHAIN (f_ovf) = f_sav;
5589 layout_type (record);
5591 /* The correct type is an array type of one element. */
5592 return build_array_type (record, build_index_type (size_zero_node));
5595 /* Implement va_start. */
5598 rs6000_va_start (tree valist, rtx nextarg)
5600 HOST_WIDE_INT words, n_gpr, n_fpr;
5601 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5602 tree gpr, fpr, ovf, sav, t;
5604 /* Only SVR4 needs something special. */
5605 if (DEFAULT_ABI != ABI_V4)
5607 std_expand_builtin_va_start (valist, nextarg);
5611 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5612 f_fpr = TREE_CHAIN (f_gpr);
5613 f_res = TREE_CHAIN (f_fpr);
5614 f_ovf = TREE_CHAIN (f_res);
5615 f_sav = TREE_CHAIN (f_ovf);
5617 valist = build_va_arg_indirect_ref (valist);
5618 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5619 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5620 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5621 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5623 /* Count number of gp and fp argument registers used. */
5624 words = current_function_args_info.words;
5625 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5627 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5630 if (TARGET_DEBUG_ARG)
5631 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5632 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5633 words, n_gpr, n_fpr);
5635 if (cfun->va_list_gpr_size)
5637 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5638 build_int_cst (NULL_TREE, n_gpr));
5639 TREE_SIDE_EFFECTS (t) = 1;
5640 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5643 if (cfun->va_list_fpr_size)
5645 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5646 build_int_cst (NULL_TREE, n_fpr));
5647 TREE_SIDE_EFFECTS (t) = 1;
5648 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5651 /* Find the overflow area. */
5652 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5654 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5655 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5656 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5657 TREE_SIDE_EFFECTS (t) = 1;
5658 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5660 /* If there were no va_arg invocations, don't set up the register
5662 if (!cfun->va_list_gpr_size
5663 && !cfun->va_list_fpr_size
5664 && n_gpr < GP_ARG_NUM_REG
5665 && n_fpr < FP_ARG_V4_MAX_REG)
5668 /* Find the register save area. */
5669 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5670 if (cfun->machine->varargs_save_offset)
5671 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5672 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5673 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5674 TREE_SIDE_EFFECTS (t) = 1;
5675 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5678 /* Implement va_arg. */
5681 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5683 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5684 tree gpr, fpr, ovf, sav, reg, t, u;
5685 int size, rsize, n_reg, sav_ofs, sav_scale;
5686 tree lab_false, lab_over, addr;
5688 tree ptrtype = build_pointer_type (type);
5690 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5692 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5693 return build_va_arg_indirect_ref (t);
5696 if (DEFAULT_ABI != ABI_V4)
5698 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5700 tree elem_type = TREE_TYPE (type);
5701 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5702 int elem_size = GET_MODE_SIZE (elem_mode);
5704 if (elem_size < UNITS_PER_WORD)
5706 tree real_part, imag_part;
5707 tree post = NULL_TREE;
5709 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5711 /* Copy the value into a temporary, lest the formal temporary
5712 be reused out from under us. */
5713 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5714 append_to_statement_list (post, pre_p);
5716 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5719 return build (COMPLEX_EXPR, type, real_part, imag_part);
5723 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5726 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5727 f_fpr = TREE_CHAIN (f_gpr);
5728 f_res = TREE_CHAIN (f_fpr);
5729 f_ovf = TREE_CHAIN (f_res);
5730 f_sav = TREE_CHAIN (f_ovf);
5732 valist = build_va_arg_indirect_ref (valist);
5733 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5734 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5735 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5736 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5738 size = int_size_in_bytes (type);
5739 rsize = (size + 3) / 4;
5742 if (TARGET_HARD_FLOAT && TARGET_FPRS
5743 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5745 /* FP args go in FP registers, if present. */
5750 if (TYPE_MODE (type) == DFmode)
5755 /* Otherwise into GP registers. */
5764 /* Pull the value out of the saved registers.... */
5767 addr = create_tmp_var (ptr_type_node, "addr");
5768 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5770 /* AltiVec vectors never go in registers when -mabi=altivec. */
5771 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5775 lab_false = create_artificial_label ();
5776 lab_over = create_artificial_label ();
5778 /* Long long and SPE vectors are aligned in the registers.
5779 As are any other 2 gpr item such as complex int due to a
5780 historical mistake. */
5784 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5785 size_int (n_reg - 1));
5786 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5789 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5790 t = build2 (GE_EXPR, boolean_type_node, u, t);
5791 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5792 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5793 gimplify_and_add (t, pre_p);
5797 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5799 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5800 u = build1 (CONVERT_EXPR, integer_type_node, u);
5801 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5802 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5804 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5805 gimplify_and_add (t, pre_p);
5807 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5808 gimplify_and_add (t, pre_p);
5810 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5811 append_to_statement_list (t, pre_p);
5815 /* Ensure that we don't find any more args in regs.
5816 Alignment has taken care of the n_reg == 2 case. */
5817 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5818 gimplify_and_add (t, pre_p);
5822 /* ... otherwise out of the overflow area. */
5824 /* Care for on-stack alignment if needed. */
5828 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5829 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5830 build_int_cst (NULL_TREE, -align));
5832 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5834 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5835 gimplify_and_add (u, pre_p);
5837 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5838 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5839 gimplify_and_add (t, pre_p);
5843 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5844 append_to_statement_list (t, pre_p);
5847 addr = fold_convert (ptrtype, addr);
5848 return build_va_arg_indirect_ref (addr);
5854 def_builtin (int mask, const char *name, tree type, int code)
5856 if (mask & target_flags)
5858 if (rs6000_builtin_decls[code])
5861 rs6000_builtin_decls[code] =
5862 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5867 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5869 static const struct builtin_description bdesc_3arg[] =
5871 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5872 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5873 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5874 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5875 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5876 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5877 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5878 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5879 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5880 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5881 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5882 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5883 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5884 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5885 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5886 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5887 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5888 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5889 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5890 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5891 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5892 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5893 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5895 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5896 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5897 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5898 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5899 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5900 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5901 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5902 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5903 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5904 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5905 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5906 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5907 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5908 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5909 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
5912 /* DST operations: void foo (void *, const int, const char). */
5914 static const struct builtin_description bdesc_dst[] =
5916 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5918 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5919 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5921 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5922 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5923 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5924 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
5927 /* Simple binary operations: VECc = foo (VECa, VECb). */
5929 static struct builtin_description bdesc_2arg[] =
5931 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5932 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5933 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5934 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5935 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5936 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5937 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5938 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5939 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5940 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5941 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5942 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5943 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5944 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5945 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5946 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5947 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5948 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5949 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5950 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5951 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5952 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5953 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5954 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5955 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5956 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5957 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5958 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5959 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5960 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5961 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5962 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5963 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5964 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5965 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5966 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5967 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5968 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5969 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5970 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5971 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5972 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5973 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5974 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5975 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5976 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5977 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5980 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5981 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5982 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5983 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5984 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5985 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5986 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5988 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5989 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5990 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5991 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5992 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5993 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5994 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5995 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5996 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5997 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5998 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5999 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6000 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
6001 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6002 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
6003 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6004 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6005 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6007 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6008 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6009 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6010 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6011 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6012 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6013 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6014 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6015 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6016 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6017 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6018 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6019 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6020 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6021 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6022 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6023 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6024 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6025 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6026 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6027 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6028 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6029 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6030 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6031 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6032 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6033 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6034 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6035 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6036 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6037 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6039 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6040 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6041 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6042 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6043 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6051 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6052 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6053 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6054 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6055 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6056 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6057 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6058 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6059 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6060 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6061 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6062 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6063 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6064 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6065 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6066 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6067 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6068 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6069 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6070 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6076 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6173 /* Place holder, leave as first spe builtin. */
6174 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6175 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6176 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6177 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6178 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6179 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6180 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6181 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6182 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6183 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6184 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6185 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6186 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6187 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6188 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6189 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6190 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6191 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6192 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6193 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6194 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6195 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6196 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6197 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6198 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6199 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6200 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6201 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6202 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6203 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6204 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6205 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6206 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6207 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6208 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6209 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6210 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6211 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6212 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6213 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6214 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6215 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6216 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6217 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6218 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6219 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6220 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6221 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6222 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6223 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6224 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6225 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6226 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6227 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6228 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6229 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6230 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6231 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6232 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6233 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6234 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6235 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6236 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6237 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6238 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6239 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6240 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6241 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6242 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6243 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6244 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6245 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6246 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6247 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6248 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6249 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6250 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6251 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6252 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6253 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6254 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6255 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6256 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6257 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6258 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6259 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6260 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6261 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6262 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6263 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6264 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6265 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6266 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6267 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6268 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6269 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6270 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6271 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6272 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6273 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6274 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6275 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6276 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6277 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6278 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6279 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6280 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6281 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6282 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6284 /* SPE binary operations expecting a 5-bit unsigned literal. */
6285 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6287 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6288 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6289 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6290 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6291 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6292 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6293 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6294 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6295 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6296 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6297 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6298 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6299 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6300 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6301 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6302 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6303 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6304 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6305 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6306 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6307 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6308 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6309 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6310 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6311 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6312 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6314 /* Place-holder. Leave as last binary SPE builtin. */
6315 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6318 /* AltiVec predicates. */
6320 struct builtin_description_predicates
6322 const unsigned int mask;
6323 const enum insn_code icode;
6325 const char *const name;
6326 const enum rs6000_builtins code;
6329 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6331 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6332 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6333 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6334 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6335 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6336 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6337 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6338 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6339 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6340 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6341 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6342 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6343 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6345 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6346 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6347 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6350 /* SPE predicates. */
6351 static struct builtin_description bdesc_spe_predicates[] =
6353 /* Place-holder. Leave as first. */
6354 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6355 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6356 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6357 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6358 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6359 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6360 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6361 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6362 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6363 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6364 /* Place-holder. Leave as last. */
6365 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6368 /* SPE evsel predicates. */
6369 static struct builtin_description bdesc_spe_evsel[] =
6371 /* Place-holder. Leave as first. */
6372 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6373 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6374 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6375 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6376 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6377 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6378 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6379 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6380 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6381 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6382 /* Place-holder. Leave as last. */
6383 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6386 /* ABS* operations. */
6388 static const struct builtin_description bdesc_abs[] =
6390 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6391 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6392 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6393 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6394 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6395 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6396 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6399 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6402 static struct builtin_description bdesc_1arg[] =
6404 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6405 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6406 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6407 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6408 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6409 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6410 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6411 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6412 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6413 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6414 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6415 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6416 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6417 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6418 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6419 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6420 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6442 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6443 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6444 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6445 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6446 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6447 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6448 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6449 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6450 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6451 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6452 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6453 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6454 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6455 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6456 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6457 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6458 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6459 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6460 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6461 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6462 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6463 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6464 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6465 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6466 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6467 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6468 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6469 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6470 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6471 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6473 /* Place-holder. Leave as last unary SPE builtin. */
6474 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6478 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6481 tree arg0 = TREE_VALUE (arglist);
6482 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6483 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6484 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6486 if (icode == CODE_FOR_nothing)
6487 /* Builtin not supported on this processor. */
6490 /* If we got invalid arguments bail out before generating bad rtl. */
6491 if (arg0 == error_mark_node)
6494 if (icode == CODE_FOR_altivec_vspltisb
6495 || icode == CODE_FOR_altivec_vspltish
6496 || icode == CODE_FOR_altivec_vspltisw
6497 || icode == CODE_FOR_spe_evsplatfi
6498 || icode == CODE_FOR_spe_evsplati)
6500 /* Only allow 5-bit *signed* literals. */
6501 if (GET_CODE (op0) != CONST_INT
6502 || INTVAL (op0) > 15
6503 || INTVAL (op0) < -16)
6505 error ("argument 1 must be a 5-bit signed literal");
6511 || GET_MODE (target) != tmode
6512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6513 target = gen_reg_rtx (tmode);
6515 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6516 op0 = copy_to_mode_reg (mode0, op0);
6518 pat = GEN_FCN (icode) (target, op0);
6527 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6529 rtx pat, scratch1, scratch2;
6530 tree arg0 = TREE_VALUE (arglist);
6531 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6532 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6533 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6535 /* If we have invalid arguments, bail out before generating bad rtl. */
6536 if (arg0 == error_mark_node)
6540 || GET_MODE (target) != tmode
6541 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6542 target = gen_reg_rtx (tmode);
6544 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6545 op0 = copy_to_mode_reg (mode0, op0);
6547 scratch1 = gen_reg_rtx (mode0);
6548 scratch2 = gen_reg_rtx (mode0);
6550 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6559 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6562 tree arg0 = TREE_VALUE (arglist);
6563 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6564 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6565 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6566 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6567 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6568 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6570 if (icode == CODE_FOR_nothing)
6571 /* Builtin not supported on this processor. */
6574 /* If we got invalid arguments bail out before generating bad rtl. */
6575 if (arg0 == error_mark_node || arg1 == error_mark_node)
6578 if (icode == CODE_FOR_altivec_vcfux
6579 || icode == CODE_FOR_altivec_vcfsx
6580 || icode == CODE_FOR_altivec_vctsxs
6581 || icode == CODE_FOR_altivec_vctuxs
6582 || icode == CODE_FOR_altivec_vspltb
6583 || icode == CODE_FOR_altivec_vsplth
6584 || icode == CODE_FOR_altivec_vspltw
6585 || icode == CODE_FOR_spe_evaddiw
6586 || icode == CODE_FOR_spe_evldd
6587 || icode == CODE_FOR_spe_evldh
6588 || icode == CODE_FOR_spe_evldw
6589 || icode == CODE_FOR_spe_evlhhesplat
6590 || icode == CODE_FOR_spe_evlhhossplat
6591 || icode == CODE_FOR_spe_evlhhousplat
6592 || icode == CODE_FOR_spe_evlwhe
6593 || icode == CODE_FOR_spe_evlwhos
6594 || icode == CODE_FOR_spe_evlwhou
6595 || icode == CODE_FOR_spe_evlwhsplat
6596 || icode == CODE_FOR_spe_evlwwsplat
6597 || icode == CODE_FOR_spe_evrlwi
6598 || icode == CODE_FOR_spe_evslwi
6599 || icode == CODE_FOR_spe_evsrwis
6600 || icode == CODE_FOR_spe_evsubifw
6601 || icode == CODE_FOR_spe_evsrwiu)
6603 /* Only allow 5-bit unsigned literals. */
6605 if (TREE_CODE (arg1) != INTEGER_CST
6606 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6608 error ("argument 2 must be a 5-bit unsigned literal");
6614 || GET_MODE (target) != tmode
6615 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6616 target = gen_reg_rtx (tmode);
6618 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6619 op0 = copy_to_mode_reg (mode0, op0);
6620 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6621 op1 = copy_to_mode_reg (mode1, op1);
6623 pat = GEN_FCN (icode) (target, op0, op1);
6632 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6633 tree arglist, rtx target)
6636 tree cr6_form = TREE_VALUE (arglist);
6637 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6638 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6639 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6640 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6641 enum machine_mode tmode = SImode;
6642 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6643 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6646 if (TREE_CODE (cr6_form) != INTEGER_CST)
6648 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6652 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6654 gcc_assert (mode0 == mode1);
6656 /* If we have invalid arguments, bail out before generating bad rtl. */
6657 if (arg0 == error_mark_node || arg1 == error_mark_node)
6661 || GET_MODE (target) != tmode
6662 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6663 target = gen_reg_rtx (tmode);
6665 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6666 op0 = copy_to_mode_reg (mode0, op0);
6667 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6668 op1 = copy_to_mode_reg (mode1, op1);
6670 scratch = gen_reg_rtx (mode0);
6672 pat = GEN_FCN (icode) (scratch, op0, op1,
6673 gen_rtx_SYMBOL_REF (Pmode, opcode));
6678 /* The vec_any* and vec_all* predicates use the same opcodes for two
6679 different operations, but the bits in CR6 will be different
6680 depending on what information we want. So we have to play tricks
6681 with CR6 to get the right bits out.
6683 If you think this is disgusting, look at the specs for the
6684 AltiVec predicates. */
6686 switch (cr6_form_int)
6689 emit_insn (gen_cr6_test_for_zero (target));
6692 emit_insn (gen_cr6_test_for_zero_reverse (target));
6695 emit_insn (gen_cr6_test_for_lt (target));
6698 emit_insn (gen_cr6_test_for_lt_reverse (target));
6701 error ("argument 1 of __builtin_altivec_predicate is out of range");
6709 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6712 tree arg0 = TREE_VALUE (arglist);
6713 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6714 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6715 enum machine_mode mode0 = Pmode;
6716 enum machine_mode mode1 = Pmode;
6717 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6718 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6720 if (icode == CODE_FOR_nothing)
6721 /* Builtin not supported on this processor. */
6724 /* If we got invalid arguments bail out before generating bad rtl. */
6725 if (arg0 == error_mark_node || arg1 == error_mark_node)
6729 || GET_MODE (target) != tmode
6730 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6731 target = gen_reg_rtx (tmode);
6733 op1 = copy_to_mode_reg (mode1, op1);
6735 if (op0 == const0_rtx)
6737 addr = gen_rtx_MEM (tmode, op1);
6741 op0 = copy_to_mode_reg (mode0, op0);
6742 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6745 pat = GEN_FCN (icode) (target, addr);
6755 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6757 tree arg0 = TREE_VALUE (arglist);
6758 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6759 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6760 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6761 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6762 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6764 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6765 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6766 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6768 /* Invalid arguments. Bail before doing anything stoopid! */
6769 if (arg0 == error_mark_node
6770 || arg1 == error_mark_node
6771 || arg2 == error_mark_node)
6774 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6775 op0 = copy_to_mode_reg (mode2, op0);
6776 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6777 op1 = copy_to_mode_reg (mode0, op1);
6778 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6779 op2 = copy_to_mode_reg (mode1, op2);
6781 pat = GEN_FCN (icode) (op1, op2, op0);
6788 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6790 tree arg0 = TREE_VALUE (arglist);
6791 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6792 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6793 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6794 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6795 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6797 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6798 enum machine_mode mode1 = Pmode;
6799 enum machine_mode mode2 = Pmode;
6801 /* Invalid arguments. Bail before doing anything stoopid! */
6802 if (arg0 == error_mark_node
6803 || arg1 == error_mark_node
6804 || arg2 == error_mark_node)
6807 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6808 op0 = copy_to_mode_reg (tmode, op0);
6810 op2 = copy_to_mode_reg (mode2, op2);
6812 if (op1 == const0_rtx)
6814 addr = gen_rtx_MEM (tmode, op2);
6818 op1 = copy_to_mode_reg (mode1, op1);
6819 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6822 pat = GEN_FCN (icode) (addr, op0);
6829 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6832 tree arg0 = TREE_VALUE (arglist);
6833 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6834 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6835 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6836 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6837 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6838 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6839 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6840 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6841 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6843 if (icode == CODE_FOR_nothing)
6844 /* Builtin not supported on this processor. */
6847 /* If we got invalid arguments bail out before generating bad rtl. */
6848 if (arg0 == error_mark_node
6849 || arg1 == error_mark_node
6850 || arg2 == error_mark_node)
6853 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6854 || icode == CODE_FOR_altivec_vsldoi_v4si
6855 || icode == CODE_FOR_altivec_vsldoi_v8hi
6856 || icode == CODE_FOR_altivec_vsldoi_v16qi)
6858 /* Only allow 4-bit unsigned literals. */
6860 if (TREE_CODE (arg2) != INTEGER_CST
6861 || TREE_INT_CST_LOW (arg2) & ~0xf)
6863 error ("argument 3 must be a 4-bit unsigned literal");
6869 || GET_MODE (target) != tmode
6870 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6871 target = gen_reg_rtx (tmode);
6873 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6874 op0 = copy_to_mode_reg (mode0, op0);
6875 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6876 op1 = copy_to_mode_reg (mode1, op1);
6877 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6878 op2 = copy_to_mode_reg (mode2, op2);
6880 pat = GEN_FCN (icode) (target, op0, op1, op2);
6888 /* Expand the lvx builtins. */
6890 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6892 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6893 tree arglist = TREE_OPERAND (exp, 1);
6894 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6896 enum machine_mode tmode, mode0;
6898 enum insn_code icode;
6902 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6903 icode = CODE_FOR_altivec_lvx_v16qi;
6905 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6906 icode = CODE_FOR_altivec_lvx_v8hi;
6908 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6909 icode = CODE_FOR_altivec_lvx_v4si;
6911 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6912 icode = CODE_FOR_altivec_lvx_v4sf;
6921 arg0 = TREE_VALUE (arglist);
6922 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6923 tmode = insn_data[icode].operand[0].mode;
6924 mode0 = insn_data[icode].operand[1].mode;
6927 || GET_MODE (target) != tmode
6928 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6929 target = gen_reg_rtx (tmode);
6931 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6932 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6934 pat = GEN_FCN (icode) (target, op0);
6941 /* Expand the stvx builtins. */
6943 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6946 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6947 tree arglist = TREE_OPERAND (exp, 1);
6948 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6950 enum machine_mode mode0, mode1;
6952 enum insn_code icode;
6956 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6957 icode = CODE_FOR_altivec_stvx_v16qi;
6959 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6960 icode = CODE_FOR_altivec_stvx_v8hi;
6962 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6963 icode = CODE_FOR_altivec_stvx_v4si;
6965 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6966 icode = CODE_FOR_altivec_stvx_v4sf;
6973 arg0 = TREE_VALUE (arglist);
6974 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6975 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6976 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6977 mode0 = insn_data[icode].operand[0].mode;
6978 mode1 = insn_data[icode].operand[1].mode;
6980 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6981 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6982 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6983 op1 = copy_to_mode_reg (mode1, op1);
6985 pat = GEN_FCN (icode) (op0, op1);
6993 /* Expand the dst builtins. */
6995 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6998 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6999 tree arglist = TREE_OPERAND (exp, 1);
7000 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7001 tree arg0, arg1, arg2;
7002 enum machine_mode mode0, mode1, mode2;
7003 rtx pat, op0, op1, op2;
7004 struct builtin_description *d;
7009 /* Handle DST variants. */
7010 d = (struct builtin_description *) bdesc_dst;
7011 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7012 if (d->code == fcode)
7014 arg0 = TREE_VALUE (arglist);
7015 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7016 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7017 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7018 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7019 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7020 mode0 = insn_data[d->icode].operand[0].mode;
7021 mode1 = insn_data[d->icode].operand[1].mode;
7022 mode2 = insn_data[d->icode].operand[2].mode;
7024 /* Invalid arguments, bail out before generating bad rtl. */
7025 if (arg0 == error_mark_node
7026 || arg1 == error_mark_node
7027 || arg2 == error_mark_node)
7032 if (TREE_CODE (arg2) != INTEGER_CST
7033 || TREE_INT_CST_LOW (arg2) & ~0x3)
7035 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7039 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7040 op0 = copy_to_mode_reg (Pmode, op0);
7041 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7042 op1 = copy_to_mode_reg (mode1, op1);
7044 pat = GEN_FCN (d->icode) (op0, op1, op2);
7054 /* Expand vec_init builtin. */
7056 altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7058 enum machine_mode tmode = TYPE_MODE (type);
7059 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7060 int i, n_elt = GET_MODE_NUNITS (tmode);
7061 rtvec v = rtvec_alloc (n_elt);
7063 gcc_assert (VECTOR_MODE_P (tmode));
7065 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7067 rtx x = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7068 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7071 gcc_assert (arglist == NULL);
7073 if (!target || !register_operand (target, tmode))
7074 target = gen_reg_rtx (tmode);
7076 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7080 /* Return the integer constant in ARG. Constrain it to be in the range
7081 of the subparts of VEC_TYPE; issue an error if not. */
7084 get_element_number (tree vec_type, tree arg)
7086 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7088 if (!host_integerp (arg, 1)
7089 || (elt = tree_low_cst (arg, 1), elt > max))
7091 error ("selector must be an integer constant in the range 0..%wi", max);
7098 /* Expand vec_set builtin. */
7100 altivec_expand_vec_set_builtin (tree arglist)
7102 enum machine_mode tmode, mode1;
7103 tree arg0, arg1, arg2;
7107 arg0 = TREE_VALUE (arglist);
7108 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7109 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7111 tmode = TYPE_MODE (TREE_TYPE (arg0));
7112 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7113 gcc_assert (VECTOR_MODE_P (tmode));
7115 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7116 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7117 elt = get_element_number (TREE_TYPE (arg0), arg2);
7119 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7120 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7122 op0 = force_reg (tmode, op0);
7123 op1 = force_reg (mode1, op1);
7125 rs6000_expand_vector_set (op0, op1, elt);
7130 /* Expand vec_ext builtin. */
7132 altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7134 enum machine_mode tmode, mode0;
7139 arg0 = TREE_VALUE (arglist);
7140 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7142 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7143 elt = get_element_number (TREE_TYPE (arg0), arg1);
7145 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7146 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7147 gcc_assert (VECTOR_MODE_P (mode0));
7149 op0 = force_reg (mode0, op0);
7151 if (optimize || !target || !register_operand (target, tmode))
7152 target = gen_reg_rtx (tmode);
7154 rs6000_expand_vector_extract (target, op0, elt);
7159 /* Expand the builtin in EXP and store the result in TARGET. Store
7160 true in *EXPANDEDP if we found a builtin to expand. */
7162 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7164 struct builtin_description *d;
7165 struct builtin_description_predicates *dp;
7167 enum insn_code icode;
7168 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7169 tree arglist = TREE_OPERAND (exp, 1);
7172 enum machine_mode tmode, mode0;
7173 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7175 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7176 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7179 error ("unresolved overload for Altivec builtin %qF", fndecl);
7183 target = altivec_expand_ld_builtin (exp, target, expandedp);
7187 target = altivec_expand_st_builtin (exp, target, expandedp);
7191 target = altivec_expand_dst_builtin (exp, target, expandedp);
7199 case ALTIVEC_BUILTIN_STVX:
7200 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7201 case ALTIVEC_BUILTIN_STVEBX:
7202 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7203 case ALTIVEC_BUILTIN_STVEHX:
7204 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7205 case ALTIVEC_BUILTIN_STVEWX:
7206 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7207 case ALTIVEC_BUILTIN_STVXL:
7208 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7210 case ALTIVEC_BUILTIN_MFVSCR:
7211 icode = CODE_FOR_altivec_mfvscr;
7212 tmode = insn_data[icode].operand[0].mode;
7215 || GET_MODE (target) != tmode
7216 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7217 target = gen_reg_rtx (tmode);
7219 pat = GEN_FCN (icode) (target);
7225 case ALTIVEC_BUILTIN_MTVSCR:
7226 icode = CODE_FOR_altivec_mtvscr;
7227 arg0 = TREE_VALUE (arglist);
7228 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7229 mode0 = insn_data[icode].operand[0].mode;
7231 /* If we got invalid arguments bail out before generating bad rtl. */
7232 if (arg0 == error_mark_node)
7235 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7236 op0 = copy_to_mode_reg (mode0, op0);
7238 pat = GEN_FCN (icode) (op0);
7243 case ALTIVEC_BUILTIN_DSSALL:
7244 emit_insn (gen_altivec_dssall ());
7247 case ALTIVEC_BUILTIN_DSS:
7248 icode = CODE_FOR_altivec_dss;
7249 arg0 = TREE_VALUE (arglist);
7251 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7252 mode0 = insn_data[icode].operand[0].mode;
7254 /* If we got invalid arguments bail out before generating bad rtl. */
7255 if (arg0 == error_mark_node)
7258 if (TREE_CODE (arg0) != INTEGER_CST
7259 || TREE_INT_CST_LOW (arg0) & ~0x3)
7261 error ("argument to dss must be a 2-bit unsigned literal");
7265 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7266 op0 = copy_to_mode_reg (mode0, op0);
7268 emit_insn (gen_altivec_dss (op0));
7271 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7272 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7273 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7274 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7275 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7277 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7278 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7279 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7280 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7281 return altivec_expand_vec_set_builtin (arglist);
7283 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7284 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7285 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7286 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7287 return altivec_expand_vec_ext_builtin (arglist, target);
7294 /* Expand abs* operations. */
7295 d = (struct builtin_description *) bdesc_abs;
7296 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7297 if (d->code == fcode)
7298 return altivec_expand_abs_builtin (d->icode, arglist, target);
7300 /* Expand the AltiVec predicates. */
7301 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7302 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7303 if (dp->code == fcode)
7304 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7307 /* LV* are funky. We initialized them differently. */
7310 case ALTIVEC_BUILTIN_LVSL:
7311 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7313 case ALTIVEC_BUILTIN_LVSR:
7314 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7316 case ALTIVEC_BUILTIN_LVEBX:
7317 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7319 case ALTIVEC_BUILTIN_LVEHX:
7320 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7322 case ALTIVEC_BUILTIN_LVEWX:
7323 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7325 case ALTIVEC_BUILTIN_LVXL:
7326 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7328 case ALTIVEC_BUILTIN_LVX:
7329 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7340 /* Binops that need to be initialized manually, but can be expanded
7341 automagically by rs6000_expand_binop_builtin. */
7342 static struct builtin_description bdesc_2arg_spe[] =
7344 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7345 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7346 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7347 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7348 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7349 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7350 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7351 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7352 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7353 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7354 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7355 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7356 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7357 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7358 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7359 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7360 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7361 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7362 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7363 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7364 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7365 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7368 /* Expand the builtin in EXP and store the result in TARGET. Store
7369 true in *EXPANDEDP if we found a builtin to expand.
7371 This expands the SPE builtins that are not simple unary and binary
7374 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7376 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7377 tree arglist = TREE_OPERAND (exp, 1);
7379 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7380 enum insn_code icode;
7381 enum machine_mode tmode, mode0;
7383 struct builtin_description *d;
7388 /* Syntax check for a 5-bit unsigned immediate. */
7391 case SPE_BUILTIN_EVSTDD:
7392 case SPE_BUILTIN_EVSTDH:
7393 case SPE_BUILTIN_EVSTDW:
7394 case SPE_BUILTIN_EVSTWHE:
7395 case SPE_BUILTIN_EVSTWHO:
7396 case SPE_BUILTIN_EVSTWWE:
7397 case SPE_BUILTIN_EVSTWWO:
7398 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7399 if (TREE_CODE (arg1) != INTEGER_CST
7400 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7402 error ("argument 2 must be a 5-bit unsigned literal");
7410 /* The evsplat*i instructions are not quite generic. */
7413 case SPE_BUILTIN_EVSPLATFI:
7414 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7416 case SPE_BUILTIN_EVSPLATI:
7417 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7423 d = (struct builtin_description *) bdesc_2arg_spe;
7424 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7425 if (d->code == fcode)
7426 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7428 d = (struct builtin_description *) bdesc_spe_predicates;
7429 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7430 if (d->code == fcode)
7431 return spe_expand_predicate_builtin (d->icode, arglist, target);
7433 d = (struct builtin_description *) bdesc_spe_evsel;
7434 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7435 if (d->code == fcode)
7436 return spe_expand_evsel_builtin (d->icode, arglist, target);
7440 case SPE_BUILTIN_EVSTDDX:
7441 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7442 case SPE_BUILTIN_EVSTDHX:
7443 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7444 case SPE_BUILTIN_EVSTDWX:
7445 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7446 case SPE_BUILTIN_EVSTWHEX:
7447 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7448 case SPE_BUILTIN_EVSTWHOX:
7449 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7450 case SPE_BUILTIN_EVSTWWEX:
7451 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7452 case SPE_BUILTIN_EVSTWWOX:
7453 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7454 case SPE_BUILTIN_EVSTDD:
7455 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7456 case SPE_BUILTIN_EVSTDH:
7457 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7458 case SPE_BUILTIN_EVSTDW:
7459 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7460 case SPE_BUILTIN_EVSTWHE:
7461 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7462 case SPE_BUILTIN_EVSTWHO:
7463 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7464 case SPE_BUILTIN_EVSTWWE:
7465 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7466 case SPE_BUILTIN_EVSTWWO:
7467 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7468 case SPE_BUILTIN_MFSPEFSCR:
7469 icode = CODE_FOR_spe_mfspefscr;
7470 tmode = insn_data[icode].operand[0].mode;
7473 || GET_MODE (target) != tmode
7474 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7475 target = gen_reg_rtx (tmode);
7477 pat = GEN_FCN (icode) (target);
7482 case SPE_BUILTIN_MTSPEFSCR:
7483 icode = CODE_FOR_spe_mtspefscr;
7484 arg0 = TREE_VALUE (arglist);
7485 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7486 mode0 = insn_data[icode].operand[0].mode;
7488 if (arg0 == error_mark_node)
7491 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7492 op0 = copy_to_mode_reg (mode0, op0);
7494 pat = GEN_FCN (icode) (op0);
7507 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7509 rtx pat, scratch, tmp;
7510 tree form = TREE_VALUE (arglist);
7511 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7512 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7513 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7514 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7515 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7516 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7520 if (TREE_CODE (form) != INTEGER_CST)
7522 error ("argument 1 of __builtin_spe_predicate must be a constant");
7526 form_int = TREE_INT_CST_LOW (form);
7528 gcc_assert (mode0 == mode1);
7530 if (arg0 == error_mark_node || arg1 == error_mark_node)
7534 || GET_MODE (target) != SImode
7535 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7536 target = gen_reg_rtx (SImode);
7538 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7539 op0 = copy_to_mode_reg (mode0, op0);
7540 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7541 op1 = copy_to_mode_reg (mode1, op1);
7543 scratch = gen_reg_rtx (CCmode);
7545 pat = GEN_FCN (icode) (scratch, op0, op1);
7550 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7551 _lower_. We use one compare, but look in different bits of the
7552 CR for each variant.
7554 There are 2 elements in each SPE simd type (upper/lower). The CR
7555 bits are set as follows:
7557 BIT0 | BIT 1 | BIT 2 | BIT 3
7558 U | L | (U | L) | (U & L)
7560 So, for an "all" relationship, BIT 3 would be set.
7561 For an "any" relationship, BIT 2 would be set. Etc.
7563 Following traditional nomenclature, these bits map to:
7565 BIT0 | BIT 1 | BIT 2 | BIT 3
7568 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7573 /* All variant. OV bit. */
7575 /* We need to get to the OV bit, which is the ORDERED bit. We
7576 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7577 that's ugly and will make validate_condition_mode die.
7578 So let's just use another pattern. */
7579 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7581 /* Any variant. EQ bit. */
7585 /* Upper variant. LT bit. */
7589 /* Lower variant. GT bit. */
7594 error ("argument 1 of __builtin_spe_predicate is out of range");
7598 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7599 emit_move_insn (target, tmp);
7604 /* The evsel builtins look like this:
7606 e = __builtin_spe_evsel_OP (a, b, c, d);
7610 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7611 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7615 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7618 tree arg0 = TREE_VALUE (arglist);
7619 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7620 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7621 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7622 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7623 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7624 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7625 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7626 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7627 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7629 gcc_assert (mode0 == mode1);
7631 if (arg0 == error_mark_node || arg1 == error_mark_node
7632 || arg2 == error_mark_node || arg3 == error_mark_node)
7636 || GET_MODE (target) != mode0
7637 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7638 target = gen_reg_rtx (mode0);
7640 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7641 op0 = copy_to_mode_reg (mode0, op0);
7642 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7643 op1 = copy_to_mode_reg (mode0, op1);
7644 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7645 op2 = copy_to_mode_reg (mode0, op2);
7646 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7647 op3 = copy_to_mode_reg (mode0, op3);
7649 /* Generate the compare. */
7650 scratch = gen_reg_rtx (CCmode);
7651 pat = GEN_FCN (icode) (scratch, op0, op1);
7656 if (mode0 == V2SImode)
7657 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7659 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7664 /* Expand an expression EXP that calls a built-in function,
7665 with result going to TARGET if that's convenient
7666 (and in mode MODE if that's convenient).
7667 SUBTARGET may be used as the target for computing one of EXP's operands.
7668 IGNORE is nonzero if the value is to be ignored. */
7671 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7672 enum machine_mode mode ATTRIBUTE_UNUSED,
7673 int ignore ATTRIBUTE_UNUSED)
7675 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7676 tree arglist = TREE_OPERAND (exp, 1);
7677 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7678 struct builtin_description *d;
7683 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7684 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7686 int icode = (int) CODE_FOR_altivec_lvsr;
7687 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7688 enum machine_mode mode = insn_data[icode].operand[1].mode;
7692 gcc_assert (TARGET_ALTIVEC);
7694 arg = TREE_VALUE (arglist);
7695 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7696 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7697 addr = memory_address (mode, op);
7698 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7702 /* For the load case need to negate the address. */
7703 op = gen_reg_rtx (GET_MODE (addr));
7704 emit_insn (gen_rtx_SET (VOIDmode, op,
7705 gen_rtx_NEG (GET_MODE (addr), addr)));
7707 op = gen_rtx_MEM (mode, op);
7710 || GET_MODE (target) != tmode
7711 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7712 target = gen_reg_rtx (tmode);
7714 /*pat = gen_altivec_lvsr (target, op);*/
7715 pat = GEN_FCN (icode) (target, op);
7725 ret = altivec_expand_builtin (exp, target, &success);
7732 ret = spe_expand_builtin (exp, target, &success);
7738 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7740 /* Handle simple unary operations. */
7741 d = (struct builtin_description *) bdesc_1arg;
7742 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7743 if (d->code == fcode)
7744 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7746 /* Handle simple binary operations. */
7747 d = (struct builtin_description *) bdesc_2arg;
7748 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7749 if (d->code == fcode)
7750 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7752 /* Handle simple ternary operations. */
7753 d = (struct builtin_description *) bdesc_3arg;
7754 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7755 if (d->code == fcode)
7756 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7762 build_opaque_vector_type (tree node, int nunits)
7764 node = copy_node (node);
7765 TYPE_MAIN_VARIANT (node) = node;
7766 return build_vector_type (node, nunits);
7770 rs6000_init_builtins (void)
7772 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7773 V2SF_type_node = build_vector_type (float_type_node, 2);
7774 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7775 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7776 V4SF_type_node = build_vector_type (float_type_node, 4);
7777 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7778 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7780 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7781 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7782 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7784 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7785 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7786 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7787 opaque_V4SI_type_node = copy_node (V4SI_type_node);
7789 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7790 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7791 'vector unsigned short'. */
7793 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7794 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7795 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7796 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7798 long_integer_type_internal_node = long_integer_type_node;
7799 long_unsigned_type_internal_node = long_unsigned_type_node;
7800 intQI_type_internal_node = intQI_type_node;
7801 uintQI_type_internal_node = unsigned_intQI_type_node;
7802 intHI_type_internal_node = intHI_type_node;
7803 uintHI_type_internal_node = unsigned_intHI_type_node;
7804 intSI_type_internal_node = intSI_type_node;
7805 uintSI_type_internal_node = unsigned_intSI_type_node;
7806 float_type_internal_node = float_type_node;
7807 void_type_internal_node = void_type_node;
7809 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7810 get_identifier ("__bool char"),
7811 bool_char_type_node));
7812 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7813 get_identifier ("__bool short"),
7814 bool_short_type_node));
7815 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7816 get_identifier ("__bool int"),
7817 bool_int_type_node));
7818 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7819 get_identifier ("__pixel"),
7822 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7823 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7824 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7825 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7827 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7828 get_identifier ("__vector unsigned char"),
7829 unsigned_V16QI_type_node));
7830 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7831 get_identifier ("__vector signed char"),
7833 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7834 get_identifier ("__vector __bool char"),
7835 bool_V16QI_type_node));
7837 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7838 get_identifier ("__vector unsigned short"),
7839 unsigned_V8HI_type_node));
7840 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7841 get_identifier ("__vector signed short"),
7843 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7844 get_identifier ("__vector __bool short"),
7845 bool_V8HI_type_node));
7847 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7848 get_identifier ("__vector unsigned int"),
7849 unsigned_V4SI_type_node));
7850 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7851 get_identifier ("__vector signed int"),
7853 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7854 get_identifier ("__vector __bool int"),
7855 bool_V4SI_type_node));
7857 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7858 get_identifier ("__vector float"),
7860 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7861 get_identifier ("__vector __pixel"),
7862 pixel_V8HI_type_node));
7865 spe_init_builtins ();
7867 altivec_init_builtins ();
7868 if (TARGET_ALTIVEC || TARGET_SPE)
7869 rs6000_common_init_builtins ();
7872 /* Search through a set of builtins and enable the mask bits.
7873 DESC is an array of builtins.
7874 SIZE is the total number of builtins.
7875 START is the builtin enum at which to start.
7876 END is the builtin enum at which to end. */
7878 enable_mask_for_builtins (struct builtin_description *desc, int size,
7879 enum rs6000_builtins start,
7880 enum rs6000_builtins end)
7884 for (i = 0; i < size; ++i)
7885 if (desc[i].code == start)
7891 for (; i < size; ++i)
7893 /* Flip all the bits on. */
7894 desc[i].mask = target_flags;
7895 if (desc[i].code == end)
7901 spe_init_builtins (void)
7903 tree endlink = void_list_node;
7904 tree puint_type_node = build_pointer_type (unsigned_type_node);
7905 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7906 struct builtin_description *d;
7909 tree v2si_ftype_4_v2si
7910 = build_function_type
7911 (opaque_V2SI_type_node,
7912 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7913 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7914 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7915 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7918 tree v2sf_ftype_4_v2sf
7919 = build_function_type
7920 (opaque_V2SF_type_node,
7921 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7922 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7923 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7924 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7927 tree int_ftype_int_v2si_v2si
7928 = build_function_type
7930 tree_cons (NULL_TREE, integer_type_node,
7931 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7932 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7935 tree int_ftype_int_v2sf_v2sf
7936 = build_function_type
7938 tree_cons (NULL_TREE, integer_type_node,
7939 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7940 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7943 tree void_ftype_v2si_puint_int
7944 = build_function_type (void_type_node,
7945 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7946 tree_cons (NULL_TREE, puint_type_node,
7947 tree_cons (NULL_TREE,
7951 tree void_ftype_v2si_puint_char
7952 = build_function_type (void_type_node,
7953 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7954 tree_cons (NULL_TREE, puint_type_node,
7955 tree_cons (NULL_TREE,
7959 tree void_ftype_v2si_pv2si_int
7960 = build_function_type (void_type_node,
7961 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7962 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7963 tree_cons (NULL_TREE,
7967 tree void_ftype_v2si_pv2si_char
7968 = build_function_type (void_type_node,
7969 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7970 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7971 tree_cons (NULL_TREE,
7976 = build_function_type (void_type_node,
7977 tree_cons (NULL_TREE, integer_type_node, endlink));
7980 = build_function_type (integer_type_node, endlink);
7982 tree v2si_ftype_pv2si_int
7983 = build_function_type (opaque_V2SI_type_node,
7984 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7985 tree_cons (NULL_TREE, integer_type_node,
7988 tree v2si_ftype_puint_int
7989 = build_function_type (opaque_V2SI_type_node,
7990 tree_cons (NULL_TREE, puint_type_node,
7991 tree_cons (NULL_TREE, integer_type_node,
7994 tree v2si_ftype_pushort_int
7995 = build_function_type (opaque_V2SI_type_node,
7996 tree_cons (NULL_TREE, pushort_type_node,
7997 tree_cons (NULL_TREE, integer_type_node,
8000 tree v2si_ftype_signed_char
8001 = build_function_type (opaque_V2SI_type_node,
8002 tree_cons (NULL_TREE, signed_char_type_node,
8005 /* The initialization of the simple binary and unary builtins is
8006 done in rs6000_common_init_builtins, but we have to enable the
8007 mask bits here manually because we have run out of `target_flags'
8008 bits. We really need to redesign this mask business. */
8010 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8011 ARRAY_SIZE (bdesc_2arg),
8014 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8015 ARRAY_SIZE (bdesc_1arg),
8017 SPE_BUILTIN_EVSUBFUSIAAW);
8018 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8019 ARRAY_SIZE (bdesc_spe_predicates),
8020 SPE_BUILTIN_EVCMPEQ,
8021 SPE_BUILTIN_EVFSTSTLT);
8022 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8023 ARRAY_SIZE (bdesc_spe_evsel),
8024 SPE_BUILTIN_EVSEL_CMPGTS,
8025 SPE_BUILTIN_EVSEL_FSTSTEQ);
8027 (*lang_hooks.decls.pushdecl)
8028 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8029 opaque_V2SI_type_node));
8031 /* Initialize irregular SPE builtins. */
8033 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8034 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8035 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8036 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8037 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8038 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8039 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8040 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8041 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8042 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8043 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8044 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8045 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8046 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8047 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8048 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8049 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8050 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8053 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8054 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8055 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8056 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8057 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8058 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8059 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8060 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8061 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8062 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8063 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8064 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8065 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8066 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8067 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8068 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8069 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8070 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8071 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8072 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8073 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8074 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8077 d = (struct builtin_description *) bdesc_spe_predicates;
8078 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8082 switch (insn_data[d->icode].operand[1].mode)
8085 type = int_ftype_int_v2si_v2si;
8088 type = int_ftype_int_v2sf_v2sf;
8094 def_builtin (d->mask, d->name, type, d->code);
8097 /* Evsel predicates. */
8098 d = (struct builtin_description *) bdesc_spe_evsel;
8099 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8103 switch (insn_data[d->icode].operand[1].mode)
8106 type = v2si_ftype_4_v2si;
8109 type = v2sf_ftype_4_v2sf;
8115 def_builtin (d->mask, d->name, type, d->code);
8120 altivec_init_builtins (void)
8122 struct builtin_description *d;
8123 struct builtin_description_predicates *dp;
8127 tree pfloat_type_node = build_pointer_type (float_type_node);
8128 tree pint_type_node = build_pointer_type (integer_type_node);
8129 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8130 tree pchar_type_node = build_pointer_type (char_type_node);
8132 tree pvoid_type_node = build_pointer_type (void_type_node);
8134 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8135 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8136 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8137 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8139 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8141 tree int_ftype_opaque
8142 = build_function_type_list (integer_type_node,
8143 opaque_V4SI_type_node, NULL_TREE);
8145 tree opaque_ftype_opaque_int
8146 = build_function_type_list (opaque_V4SI_type_node,
8147 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8148 tree opaque_ftype_opaque_opaque_int
8149 = build_function_type_list (opaque_V4SI_type_node,
8150 opaque_V4SI_type_node, opaque_V4SI_type_node,
8151 integer_type_node, NULL_TREE);
8152 tree int_ftype_int_opaque_opaque
8153 = build_function_type_list (integer_type_node,
8154 integer_type_node, opaque_V4SI_type_node,
8155 opaque_V4SI_type_node, NULL_TREE);
8156 tree int_ftype_int_v4si_v4si
8157 = build_function_type_list (integer_type_node,
8158 integer_type_node, V4SI_type_node,
8159 V4SI_type_node, NULL_TREE);
8160 tree v4sf_ftype_pcfloat
8161 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8162 tree void_ftype_pfloat_v4sf
8163 = build_function_type_list (void_type_node,
8164 pfloat_type_node, V4SF_type_node, NULL_TREE);
8165 tree v4si_ftype_pcint
8166 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8167 tree void_ftype_pint_v4si
8168 = build_function_type_list (void_type_node,
8169 pint_type_node, V4SI_type_node, NULL_TREE);
8170 tree v8hi_ftype_pcshort
8171 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8172 tree void_ftype_pshort_v8hi
8173 = build_function_type_list (void_type_node,
8174 pshort_type_node, V8HI_type_node, NULL_TREE);
8175 tree v16qi_ftype_pcchar
8176 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8177 tree void_ftype_pchar_v16qi
8178 = build_function_type_list (void_type_node,
8179 pchar_type_node, V16QI_type_node, NULL_TREE);
8180 tree void_ftype_v4si
8181 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8182 tree v8hi_ftype_void
8183 = build_function_type (V8HI_type_node, void_list_node);
8184 tree void_ftype_void
8185 = build_function_type (void_type_node, void_list_node);
8187 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8189 tree opaque_ftype_long_pcvoid
8190 = build_function_type_list (opaque_V4SI_type_node,
8191 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8192 tree v16qi_ftype_long_pcvoid
8193 = build_function_type_list (V16QI_type_node,
8194 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8195 tree v8hi_ftype_long_pcvoid
8196 = build_function_type_list (V8HI_type_node,
8197 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8198 tree v4si_ftype_long_pcvoid
8199 = build_function_type_list (V4SI_type_node,
8200 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8202 tree void_ftype_opaque_long_pvoid
8203 = build_function_type_list (void_type_node,
8204 opaque_V4SI_type_node, long_integer_type_node,
8205 pvoid_type_node, NULL_TREE);
8206 tree void_ftype_v4si_long_pvoid
8207 = build_function_type_list (void_type_node,
8208 V4SI_type_node, long_integer_type_node,
8209 pvoid_type_node, NULL_TREE);
8210 tree void_ftype_v16qi_long_pvoid
8211 = build_function_type_list (void_type_node,
8212 V16QI_type_node, long_integer_type_node,
8213 pvoid_type_node, NULL_TREE);
8214 tree void_ftype_v8hi_long_pvoid
8215 = build_function_type_list (void_type_node,
8216 V8HI_type_node, long_integer_type_node,
8217 pvoid_type_node, NULL_TREE);
8218 tree int_ftype_int_v8hi_v8hi
8219 = build_function_type_list (integer_type_node,
8220 integer_type_node, V8HI_type_node,
8221 V8HI_type_node, NULL_TREE);
8222 tree int_ftype_int_v16qi_v16qi
8223 = build_function_type_list (integer_type_node,
8224 integer_type_node, V16QI_type_node,
8225 V16QI_type_node, NULL_TREE);
8226 tree int_ftype_int_v4sf_v4sf
8227 = build_function_type_list (integer_type_node,
8228 integer_type_node, V4SF_type_node,
8229 V4SF_type_node, NULL_TREE);
8230 tree v4si_ftype_v4si
8231 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8232 tree v8hi_ftype_v8hi
8233 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8234 tree v16qi_ftype_v16qi
8235 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8236 tree v4sf_ftype_v4sf
8237 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8238 tree void_ftype_pcvoid_int_int
8239 = build_function_type_list (void_type_node,
8240 pcvoid_type_node, integer_type_node,
8241 integer_type_node, NULL_TREE);
8243 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8244 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8245 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8246 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8247 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8248 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8249 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8250 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8251 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8252 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8253 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8254 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8255 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8256 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8257 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8258 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8259 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8260 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8261 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8262 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8263 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8264 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8265 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8266 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8267 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8268 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8269 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8270 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8271 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8272 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8273 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8274 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8275 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8276 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8277 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8278 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8279 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8280 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8281 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8282 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8283 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8284 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8285 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8286 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8287 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8288 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8290 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8292 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8293 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8294 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8295 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8296 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8297 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8298 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8299 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8300 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8301 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8303 /* Add the DST variants. */
8304 d = (struct builtin_description *) bdesc_dst;
8305 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8306 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8308 /* Initialize the predicates. */
8309 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8310 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8312 enum machine_mode mode1;
8314 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8315 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8320 mode1 = insn_data[dp->icode].operand[1].mode;
8325 type = int_ftype_int_opaque_opaque;
8328 type = int_ftype_int_v4si_v4si;
8331 type = int_ftype_int_v8hi_v8hi;
8334 type = int_ftype_int_v16qi_v16qi;
8337 type = int_ftype_int_v4sf_v4sf;
8343 def_builtin (dp->mask, dp->name, type, dp->code);
8346 /* Initialize the abs* operators. */
8347 d = (struct builtin_description *) bdesc_abs;
8348 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8350 enum machine_mode mode0;
8353 mode0 = insn_data[d->icode].operand[0].mode;
8358 type = v4si_ftype_v4si;
8361 type = v8hi_ftype_v8hi;
8364 type = v16qi_ftype_v16qi;
8367 type = v4sf_ftype_v4sf;
8373 def_builtin (d->mask, d->name, type, d->code);
8380 /* Initialize target builtin that implements
8381 targetm.vectorize.builtin_mask_for_load. */
8383 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8384 v16qi_ftype_long_pcvoid,
8385 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8387 tree_cons (get_identifier ("const"),
8388 NULL_TREE, NULL_TREE));
8389 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8390 altivec_builtin_mask_for_load = decl;
8393 /* Access to the vec_init patterns. */
8394 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8395 integer_type_node, integer_type_node,
8396 integer_type_node, NULL_TREE);
8397 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8398 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8400 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8401 short_integer_type_node,
8402 short_integer_type_node,
8403 short_integer_type_node,
8404 short_integer_type_node,
8405 short_integer_type_node,
8406 short_integer_type_node,
8407 short_integer_type_node, NULL_TREE);
8408 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8409 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8411 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8412 char_type_node, char_type_node,
8413 char_type_node, char_type_node,
8414 char_type_node, char_type_node,
8415 char_type_node, char_type_node,
8416 char_type_node, char_type_node,
8417 char_type_node, char_type_node,
8418 char_type_node, char_type_node,
8419 char_type_node, NULL_TREE);
8420 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8421 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8423 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8424 float_type_node, float_type_node,
8425 float_type_node, NULL_TREE);
8426 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8427 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8429 /* Access to the vec_set patterns. */
8430 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8432 integer_type_node, NULL_TREE);
8433 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8434 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8436 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8438 integer_type_node, NULL_TREE);
8439 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8440 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8442 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8444 integer_type_node, NULL_TREE);
8445 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8446 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8448 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8450 integer_type_node, NULL_TREE);
8451 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8452 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8454 /* Access to the vec_extract patterns. */
8455 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8456 integer_type_node, NULL_TREE);
8457 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8458 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8460 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8461 integer_type_node, NULL_TREE);
8462 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8463 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8465 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8466 integer_type_node, NULL_TREE);
8467 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8468 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8470 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8471 integer_type_node, NULL_TREE);
8472 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8473 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8477 rs6000_common_init_builtins (void)
8479 struct builtin_description *d;
8482 tree v4sf_ftype_v4sf_v4sf_v16qi
8483 = build_function_type_list (V4SF_type_node,
8484 V4SF_type_node, V4SF_type_node,
8485 V16QI_type_node, NULL_TREE);
8486 tree v4si_ftype_v4si_v4si_v16qi
8487 = build_function_type_list (V4SI_type_node,
8488 V4SI_type_node, V4SI_type_node,
8489 V16QI_type_node, NULL_TREE);
8490 tree v8hi_ftype_v8hi_v8hi_v16qi
8491 = build_function_type_list (V8HI_type_node,
8492 V8HI_type_node, V8HI_type_node,
8493 V16QI_type_node, NULL_TREE);
8494 tree v16qi_ftype_v16qi_v16qi_v16qi
8495 = build_function_type_list (V16QI_type_node,
8496 V16QI_type_node, V16QI_type_node,
8497 V16QI_type_node, NULL_TREE);
8499 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8501 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8502 tree v16qi_ftype_int
8503 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8504 tree v8hi_ftype_v16qi
8505 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8506 tree v4sf_ftype_v4sf
8507 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8509 tree v2si_ftype_v2si_v2si
8510 = build_function_type_list (opaque_V2SI_type_node,
8511 opaque_V2SI_type_node,
8512 opaque_V2SI_type_node, NULL_TREE);
8514 tree v2sf_ftype_v2sf_v2sf
8515 = build_function_type_list (opaque_V2SF_type_node,
8516 opaque_V2SF_type_node,
8517 opaque_V2SF_type_node, NULL_TREE);
8519 tree v2si_ftype_int_int
8520 = build_function_type_list (opaque_V2SI_type_node,
8521 integer_type_node, integer_type_node,
8524 tree opaque_ftype_opaque
8525 = build_function_type_list (opaque_V4SI_type_node,
8526 opaque_V4SI_type_node, NULL_TREE);
8528 tree v2si_ftype_v2si
8529 = build_function_type_list (opaque_V2SI_type_node,
8530 opaque_V2SI_type_node, NULL_TREE);
8532 tree v2sf_ftype_v2sf
8533 = build_function_type_list (opaque_V2SF_type_node,
8534 opaque_V2SF_type_node, NULL_TREE);
8536 tree v2sf_ftype_v2si
8537 = build_function_type_list (opaque_V2SF_type_node,
8538 opaque_V2SI_type_node, NULL_TREE);
8540 tree v2si_ftype_v2sf
8541 = build_function_type_list (opaque_V2SI_type_node,
8542 opaque_V2SF_type_node, NULL_TREE);
8544 tree v2si_ftype_v2si_char
8545 = build_function_type_list (opaque_V2SI_type_node,
8546 opaque_V2SI_type_node,
8547 char_type_node, NULL_TREE);
8549 tree v2si_ftype_int_char
8550 = build_function_type_list (opaque_V2SI_type_node,
8551 integer_type_node, char_type_node, NULL_TREE);
8553 tree v2si_ftype_char
8554 = build_function_type_list (opaque_V2SI_type_node,
8555 char_type_node, NULL_TREE);
8557 tree int_ftype_int_int
8558 = build_function_type_list (integer_type_node,
8559 integer_type_node, integer_type_node,
8562 tree opaque_ftype_opaque_opaque
8563 = build_function_type_list (opaque_V4SI_type_node,
8564 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8565 tree v4si_ftype_v4si_v4si
8566 = build_function_type_list (V4SI_type_node,
8567 V4SI_type_node, V4SI_type_node, NULL_TREE);
8568 tree v4sf_ftype_v4si_int
8569 = build_function_type_list (V4SF_type_node,
8570 V4SI_type_node, integer_type_node, NULL_TREE);
8571 tree v4si_ftype_v4sf_int
8572 = build_function_type_list (V4SI_type_node,
8573 V4SF_type_node, integer_type_node, NULL_TREE);
8574 tree v4si_ftype_v4si_int
8575 = build_function_type_list (V4SI_type_node,
8576 V4SI_type_node, integer_type_node, NULL_TREE);
8577 tree v8hi_ftype_v8hi_int
8578 = build_function_type_list (V8HI_type_node,
8579 V8HI_type_node, integer_type_node, NULL_TREE);
8580 tree v16qi_ftype_v16qi_int
8581 = build_function_type_list (V16QI_type_node,
8582 V16QI_type_node, integer_type_node, NULL_TREE);
8583 tree v16qi_ftype_v16qi_v16qi_int
8584 = build_function_type_list (V16QI_type_node,
8585 V16QI_type_node, V16QI_type_node,
8586 integer_type_node, NULL_TREE);
8587 tree v8hi_ftype_v8hi_v8hi_int
8588 = build_function_type_list (V8HI_type_node,
8589 V8HI_type_node, V8HI_type_node,
8590 integer_type_node, NULL_TREE);
8591 tree v4si_ftype_v4si_v4si_int
8592 = build_function_type_list (V4SI_type_node,
8593 V4SI_type_node, V4SI_type_node,
8594 integer_type_node, NULL_TREE);
8595 tree v4sf_ftype_v4sf_v4sf_int
8596 = build_function_type_list (V4SF_type_node,
8597 V4SF_type_node, V4SF_type_node,
8598 integer_type_node, NULL_TREE);
8599 tree v4sf_ftype_v4sf_v4sf
8600 = build_function_type_list (V4SF_type_node,
8601 V4SF_type_node, V4SF_type_node, NULL_TREE);
8602 tree opaque_ftype_opaque_opaque_opaque
8603 = build_function_type_list (opaque_V4SI_type_node,
8604 opaque_V4SI_type_node, opaque_V4SI_type_node,
8605 opaque_V4SI_type_node, NULL_TREE);
8606 tree v4sf_ftype_v4sf_v4sf_v4si
8607 = build_function_type_list (V4SF_type_node,
8608 V4SF_type_node, V4SF_type_node,
8609 V4SI_type_node, NULL_TREE);
8610 tree v4sf_ftype_v4sf_v4sf_v4sf
8611 = build_function_type_list (V4SF_type_node,
8612 V4SF_type_node, V4SF_type_node,
8613 V4SF_type_node, NULL_TREE);
8614 tree v4si_ftype_v4si_v4si_v4si
8615 = build_function_type_list (V4SI_type_node,
8616 V4SI_type_node, V4SI_type_node,
8617 V4SI_type_node, NULL_TREE);
8618 tree v8hi_ftype_v8hi_v8hi
8619 = build_function_type_list (V8HI_type_node,
8620 V8HI_type_node, V8HI_type_node, NULL_TREE);
8621 tree v8hi_ftype_v8hi_v8hi_v8hi
8622 = build_function_type_list (V8HI_type_node,
8623 V8HI_type_node, V8HI_type_node,
8624 V8HI_type_node, NULL_TREE);
8625 tree v4si_ftype_v8hi_v8hi_v4si
8626 = build_function_type_list (V4SI_type_node,
8627 V8HI_type_node, V8HI_type_node,
8628 V4SI_type_node, NULL_TREE);
8629 tree v4si_ftype_v16qi_v16qi_v4si
8630 = build_function_type_list (V4SI_type_node,
8631 V16QI_type_node, V16QI_type_node,
8632 V4SI_type_node, NULL_TREE);
8633 tree v16qi_ftype_v16qi_v16qi
8634 = build_function_type_list (V16QI_type_node,
8635 V16QI_type_node, V16QI_type_node, NULL_TREE);
8636 tree v4si_ftype_v4sf_v4sf
8637 = build_function_type_list (V4SI_type_node,
8638 V4SF_type_node, V4SF_type_node, NULL_TREE);
8639 tree v8hi_ftype_v16qi_v16qi
8640 = build_function_type_list (V8HI_type_node,
8641 V16QI_type_node, V16QI_type_node, NULL_TREE);
8642 tree v4si_ftype_v8hi_v8hi
8643 = build_function_type_list (V4SI_type_node,
8644 V8HI_type_node, V8HI_type_node, NULL_TREE);
8645 tree v8hi_ftype_v4si_v4si
8646 = build_function_type_list (V8HI_type_node,
8647 V4SI_type_node, V4SI_type_node, NULL_TREE);
8648 tree v16qi_ftype_v8hi_v8hi
8649 = build_function_type_list (V16QI_type_node,
8650 V8HI_type_node, V8HI_type_node, NULL_TREE);
8651 tree v4si_ftype_v16qi_v4si
8652 = build_function_type_list (V4SI_type_node,
8653 V16QI_type_node, V4SI_type_node, NULL_TREE);
8654 tree v4si_ftype_v16qi_v16qi
8655 = build_function_type_list (V4SI_type_node,
8656 V16QI_type_node, V16QI_type_node, NULL_TREE);
8657 tree v4si_ftype_v8hi_v4si
8658 = build_function_type_list (V4SI_type_node,
8659 V8HI_type_node, V4SI_type_node, NULL_TREE);
8660 tree v4si_ftype_v8hi
8661 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8662 tree int_ftype_v4si_v4si
8663 = build_function_type_list (integer_type_node,
8664 V4SI_type_node, V4SI_type_node, NULL_TREE);
8665 tree int_ftype_v4sf_v4sf
8666 = build_function_type_list (integer_type_node,
8667 V4SF_type_node, V4SF_type_node, NULL_TREE);
8668 tree int_ftype_v16qi_v16qi
8669 = build_function_type_list (integer_type_node,
8670 V16QI_type_node, V16QI_type_node, NULL_TREE);
8671 tree int_ftype_v8hi_v8hi
8672 = build_function_type_list (integer_type_node,
8673 V8HI_type_node, V8HI_type_node, NULL_TREE);
8675 /* Add the simple ternary operators. */
8676 d = (struct builtin_description *) bdesc_3arg;
8677 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8679 enum machine_mode mode0, mode1, mode2, mode3;
8681 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8682 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8693 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8696 mode0 = insn_data[d->icode].operand[0].mode;
8697 mode1 = insn_data[d->icode].operand[1].mode;
8698 mode2 = insn_data[d->icode].operand[2].mode;
8699 mode3 = insn_data[d->icode].operand[3].mode;
8702 /* When all four are of the same mode. */
8703 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8708 type = opaque_ftype_opaque_opaque_opaque;
8711 type = v4si_ftype_v4si_v4si_v4si;
8714 type = v4sf_ftype_v4sf_v4sf_v4sf;
8717 type = v8hi_ftype_v8hi_v8hi_v8hi;
8720 type = v16qi_ftype_v16qi_v16qi_v16qi;
8726 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8731 type = v4si_ftype_v4si_v4si_v16qi;
8734 type = v4sf_ftype_v4sf_v4sf_v16qi;
8737 type = v8hi_ftype_v8hi_v8hi_v16qi;
8740 type = v16qi_ftype_v16qi_v16qi_v16qi;
8746 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8747 && mode3 == V4SImode)
8748 type = v4si_ftype_v16qi_v16qi_v4si;
8749 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8750 && mode3 == V4SImode)
8751 type = v4si_ftype_v8hi_v8hi_v4si;
8752 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8753 && mode3 == V4SImode)
8754 type = v4sf_ftype_v4sf_v4sf_v4si;
8756 /* vchar, vchar, vchar, 4 bit literal. */
8757 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8759 type = v16qi_ftype_v16qi_v16qi_int;
8761 /* vshort, vshort, vshort, 4 bit literal. */
8762 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8764 type = v8hi_ftype_v8hi_v8hi_int;
8766 /* vint, vint, vint, 4 bit literal. */
8767 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8769 type = v4si_ftype_v4si_v4si_int;
8771 /* vfloat, vfloat, vfloat, 4 bit literal. */
8772 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8774 type = v4sf_ftype_v4sf_v4sf_int;
8779 def_builtin (d->mask, d->name, type, d->code);
8782 /* Add the simple binary operators. */
8783 d = (struct builtin_description *) bdesc_2arg;
8784 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8786 enum machine_mode mode0, mode1, mode2;
8788 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8789 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8799 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8802 mode0 = insn_data[d->icode].operand[0].mode;
8803 mode1 = insn_data[d->icode].operand[1].mode;
8804 mode2 = insn_data[d->icode].operand[2].mode;
8807 /* When all three operands are of the same mode. */
8808 if (mode0 == mode1 && mode1 == mode2)
8813 type = opaque_ftype_opaque_opaque;
8816 type = v4sf_ftype_v4sf_v4sf;
8819 type = v4si_ftype_v4si_v4si;
8822 type = v16qi_ftype_v16qi_v16qi;
8825 type = v8hi_ftype_v8hi_v8hi;
8828 type = v2si_ftype_v2si_v2si;
8831 type = v2sf_ftype_v2sf_v2sf;
8834 type = int_ftype_int_int;
8841 /* A few other combos we really don't want to do manually. */
8843 /* vint, vfloat, vfloat. */
8844 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8845 type = v4si_ftype_v4sf_v4sf;
8847 /* vshort, vchar, vchar. */
8848 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8849 type = v8hi_ftype_v16qi_v16qi;
8851 /* vint, vshort, vshort. */
8852 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8853 type = v4si_ftype_v8hi_v8hi;
8855 /* vshort, vint, vint. */
8856 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8857 type = v8hi_ftype_v4si_v4si;
8859 /* vchar, vshort, vshort. */
8860 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8861 type = v16qi_ftype_v8hi_v8hi;
8863 /* vint, vchar, vint. */
8864 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8865 type = v4si_ftype_v16qi_v4si;
8867 /* vint, vchar, vchar. */
8868 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8869 type = v4si_ftype_v16qi_v16qi;
8871 /* vint, vshort, vint. */
8872 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8873 type = v4si_ftype_v8hi_v4si;
8875 /* vint, vint, 5 bit literal. */
8876 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8877 type = v4si_ftype_v4si_int;
8879 /* vshort, vshort, 5 bit literal. */
8880 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8881 type = v8hi_ftype_v8hi_int;
8883 /* vchar, vchar, 5 bit literal. */
8884 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8885 type = v16qi_ftype_v16qi_int;
8887 /* vfloat, vint, 5 bit literal. */
8888 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8889 type = v4sf_ftype_v4si_int;
8891 /* vint, vfloat, 5 bit literal. */
8892 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8893 type = v4si_ftype_v4sf_int;
8895 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8896 type = v2si_ftype_int_int;
8898 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8899 type = v2si_ftype_v2si_char;
8901 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8902 type = v2si_ftype_int_char;
8907 gcc_assert (mode0 == SImode);
8911 type = int_ftype_v4si_v4si;
8914 type = int_ftype_v4sf_v4sf;
8917 type = int_ftype_v16qi_v16qi;
8920 type = int_ftype_v8hi_v8hi;
8927 def_builtin (d->mask, d->name, type, d->code);
8930 /* Add the simple unary operators. */
8931 d = (struct builtin_description *) bdesc_1arg;
8932 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8934 enum machine_mode mode0, mode1;
8936 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8937 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8946 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8949 mode0 = insn_data[d->icode].operand[0].mode;
8950 mode1 = insn_data[d->icode].operand[1].mode;
8953 if (mode0 == V4SImode && mode1 == QImode)
8954 type = v4si_ftype_int;
8955 else if (mode0 == V8HImode && mode1 == QImode)
8956 type = v8hi_ftype_int;
8957 else if (mode0 == V16QImode && mode1 == QImode)
8958 type = v16qi_ftype_int;
8959 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8960 type = opaque_ftype_opaque;
8961 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8962 type = v4sf_ftype_v4sf;
8963 else if (mode0 == V8HImode && mode1 == V16QImode)
8964 type = v8hi_ftype_v16qi;
8965 else if (mode0 == V4SImode && mode1 == V8HImode)
8966 type = v4si_ftype_v8hi;
8967 else if (mode0 == V2SImode && mode1 == V2SImode)
8968 type = v2si_ftype_v2si;
8969 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8970 type = v2sf_ftype_v2sf;
8971 else if (mode0 == V2SFmode && mode1 == V2SImode)
8972 type = v2sf_ftype_v2si;
8973 else if (mode0 == V2SImode && mode1 == V2SFmode)
8974 type = v2si_ftype_v2sf;
8975 else if (mode0 == V2SImode && mode1 == QImode)
8976 type = v2si_ftype_char;
8980 def_builtin (d->mask, d->name, type, d->code);
8985 rs6000_init_libfuncs (void)
8987 if (!TARGET_HARD_FLOAT)
8990 if (DEFAULT_ABI != ABI_V4)
8992 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8994 /* AIX library routines for float->int conversion. */
8995 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8996 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8997 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8998 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9001 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9002 if (!TARGET_XL_COMPAT)
9004 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9005 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9006 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9007 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9011 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9012 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9013 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9014 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9019 /* 32-bit SVR4 quad floating point routines. */
9021 set_optab_libfunc (add_optab, TFmode, "_q_add");
9022 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9023 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9024 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9025 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9026 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9027 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9029 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9030 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9031 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9032 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9033 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9034 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9036 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9037 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9038 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9039 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9040 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9041 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9042 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9047 /* Expand a block clear operation, and return 1 if successful. Return 0
9048 if we should let the compiler generate normal code.
9050 operands[0] is the destination
9051 operands[1] is the length
9052 operands[3] is the alignment */
9055 expand_block_clear (rtx operands[])
9057 rtx orig_dest = operands[0];
9058 rtx bytes_rtx = operands[1];
9059 rtx align_rtx = operands[3];
9060 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9061 HOST_WIDE_INT align;
9062 HOST_WIDE_INT bytes;
9067 /* If this is not a fixed size move, just call memcpy */
9071 /* This must be a fixed size alignment */
9072 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9073 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9075 /* Anything to clear? */
9076 bytes = INTVAL (bytes_rtx);
9080 /* Use the builtin memset after a point, to avoid huge code bloat.
9081 When optimize_size, avoid any significant code bloat; calling
9082 memset is about 4 instructions, so allow for one instruction to
9083 load zero and three to do clearing. */
9084 if (TARGET_ALTIVEC && align >= 128)
9086 else if (TARGET_POWERPC64 && align >= 32)
9091 if (optimize_size && bytes > 3 * clear_step)
9093 if (! optimize_size && bytes > 8 * clear_step)
9096 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9098 enum machine_mode mode = BLKmode;
9101 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9106 else if (bytes >= 8 && TARGET_POWERPC64
9107 /* 64-bit loads and stores require word-aligned
9109 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9114 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9115 { /* move 4 bytes */
9119 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
9120 { /* move 2 bytes */
9124 else /* move 1 byte at a time */
9130 dest = adjust_address (orig_dest, mode, offset);
9132 emit_move_insn (dest, CONST0_RTX (mode));
9139 /* Expand a block move operation, and return 1 if successful. Return 0
9140 if we should let the compiler generate normal code.
9142 operands[0] is the destination
9143 operands[1] is the source
9144 operands[2] is the length
9145 operands[3] is the alignment */
9147 #define MAX_MOVE_REG 4
9150 expand_block_move (rtx operands[])
9152 rtx orig_dest = operands[0];
9153 rtx orig_src = operands[1];
9154 rtx bytes_rtx = operands[2];
9155 rtx align_rtx = operands[3];
9156 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
9161 rtx stores[MAX_MOVE_REG];
9164 /* If this is not a fixed size move, just call memcpy */
9168 /* This must be a fixed size alignment */
9169 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9170 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9172 /* Anything to move? */
9173 bytes = INTVAL (bytes_rtx);
9177 /* store_one_arg depends on expand_block_move to handle at least the size of
9178 reg_parm_stack_space. */
9179 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9182 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9185 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9186 rtx (*mov) (rtx, rtx);
9188 enum machine_mode mode = BLKmode;
9191 /* Altivec first, since it will be faster than a string move
9192 when it applies, and usually not significantly larger. */
9193 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9197 gen_func.mov = gen_movv4si;
9199 else if (TARGET_STRING
9200 && bytes > 24 /* move up to 32 bytes at a time */
9208 && ! fixed_regs[12])
9210 move_bytes = (bytes > 32) ? 32 : bytes;
9211 gen_func.movmemsi = gen_movmemsi_8reg;
9213 else if (TARGET_STRING
9214 && bytes > 16 /* move up to 24 bytes at a time */
9220 && ! fixed_regs[10])
9222 move_bytes = (bytes > 24) ? 24 : bytes;
9223 gen_func.movmemsi = gen_movmemsi_6reg;
9225 else if (TARGET_STRING
9226 && bytes > 8 /* move up to 16 bytes at a time */
9232 move_bytes = (bytes > 16) ? 16 : bytes;
9233 gen_func.movmemsi = gen_movmemsi_4reg;
9235 else if (bytes >= 8 && TARGET_POWERPC64
9236 /* 64-bit loads and stores require word-aligned
9238 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9242 gen_func.mov = gen_movdi;
9244 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9245 { /* move up to 8 bytes at a time */
9246 move_bytes = (bytes > 8) ? 8 : bytes;
9247 gen_func.movmemsi = gen_movmemsi_2reg;
9249 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9250 { /* move 4 bytes */
9253 gen_func.mov = gen_movsi;
9255 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
9256 { /* move 2 bytes */
9259 gen_func.mov = gen_movhi;
9261 else if (TARGET_STRING && bytes > 1)
9262 { /* move up to 4 bytes at a time */
9263 move_bytes = (bytes > 4) ? 4 : bytes;
9264 gen_func.movmemsi = gen_movmemsi_1reg;
9266 else /* move 1 byte at a time */
9270 gen_func.mov = gen_movqi;
9273 src = adjust_address (orig_src, mode, offset);
9274 dest = adjust_address (orig_dest, mode, offset);
9276 if (mode != BLKmode)
9278 rtx tmp_reg = gen_reg_rtx (mode);
9280 emit_insn ((*gen_func.mov) (tmp_reg, src));
9281 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9284 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9287 for (i = 0; i < num_reg; i++)
9288 emit_insn (stores[i]);
9292 if (mode == BLKmode)
9294 /* Move the address into scratch registers. The movmemsi
9295 patterns require zero offset. */
9296 if (!REG_P (XEXP (src, 0)))
9298 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9299 src = replace_equiv_address (src, src_reg);
9301 set_mem_size (src, GEN_INT (move_bytes));
9303 if (!REG_P (XEXP (dest, 0)))
9305 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9306 dest = replace_equiv_address (dest, dest_reg);
9308 set_mem_size (dest, GEN_INT (move_bytes));
9310 emit_insn ((*gen_func.movmemsi) (dest, src,
9311 GEN_INT (move_bytes & 31),
9320 /* Return a string to perform a load_multiple operation.
9321 operands[0] is the vector.
9322 operands[1] is the source address.
9323 operands[2] is the first destination register. */
9326 rs6000_output_load_multiple (rtx operands[3])
9328 /* We have to handle the case where the pseudo used to contain the address
9329 is assigned to one of the output registers. */
9331 int words = XVECLEN (operands[0], 0);
9334 if (XVECLEN (operands[0], 0) == 1)
9335 return "{l|lwz} %2,0(%1)";
9337 for (i = 0; i < words; i++)
9338 if (refers_to_regno_p (REGNO (operands[2]) + i,
9339 REGNO (operands[2]) + i + 1, operands[1], 0))
9343 xop[0] = GEN_INT (4 * (words-1));
9344 xop[1] = operands[1];
9345 xop[2] = operands[2];
9346 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9351 xop[0] = GEN_INT (4 * (words-1));
9352 xop[1] = operands[1];
9353 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9354 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9359 for (j = 0; j < words; j++)
9362 xop[0] = GEN_INT (j * 4);
9363 xop[1] = operands[1];
9364 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9365 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9367 xop[0] = GEN_INT (i * 4);
9368 xop[1] = operands[1];
9369 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9374 return "{lsi|lswi} %2,%1,%N0";
9378 /* A validation routine: say whether CODE, a condition code, and MODE
9379 match. The other alternatives either don't make sense or should
9380 never be generated. */
9383 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9385 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9386 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9387 && GET_MODE_CLASS (mode) == MODE_CC);
9389 /* These don't make sense. */
9390 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9391 || mode != CCUNSmode);
9393 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9394 || mode == CCUNSmode);
9396 gcc_assert (mode == CCFPmode
9397 || (code != ORDERED && code != UNORDERED
9398 && code != UNEQ && code != LTGT
9399 && code != UNGT && code != UNLT
9400 && code != UNGE && code != UNLE));
9402 /* These should never be generated except for
9403 flag_finite_math_only. */
9404 gcc_assert (mode != CCFPmode
9405 || flag_finite_math_only
9406 || (code != LE && code != GE
9407 && code != UNEQ && code != LTGT
9408 && code != UNGT && code != UNLT));
9410 /* These are invalid; the information is not there. */
9411 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9415 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9416 mask required to convert the result of a rotate insn into a shift
9417 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9420 includes_lshift_p (rtx shiftop, rtx andop)
9422 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9424 shift_mask <<= INTVAL (shiftop);
9426 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9429 /* Similar, but for right shift. */
9432 includes_rshift_p (rtx shiftop, rtx andop)
9434 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9436 shift_mask >>= INTVAL (shiftop);
9438 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9441 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9442 to perform a left shift. It must have exactly SHIFTOP least
9443 significant 0's, then one or more 1's, then zero or more 0's. */
9446 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9448 if (GET_CODE (andop) == CONST_INT)
9450 HOST_WIDE_INT c, lsb, shift_mask;
9453 if (c == 0 || c == ~0)
9457 shift_mask <<= INTVAL (shiftop);
9459 /* Find the least significant one bit. */
9462 /* It must coincide with the LSB of the shift mask. */
9463 if (-lsb != shift_mask)
9466 /* Invert to look for the next transition (if any). */
9469 /* Remove the low group of ones (originally low group of zeros). */
9472 /* Again find the lsb, and check we have all 1's above. */
9476 else if (GET_CODE (andop) == CONST_DOUBLE
9477 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9479 HOST_WIDE_INT low, high, lsb;
9480 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9482 low = CONST_DOUBLE_LOW (andop);
9483 if (HOST_BITS_PER_WIDE_INT < 64)
9484 high = CONST_DOUBLE_HIGH (andop);
9486 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9487 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9490 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9492 shift_mask_high = ~0;
9493 if (INTVAL (shiftop) > 32)
9494 shift_mask_high <<= INTVAL (shiftop) - 32;
9498 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9505 return high == -lsb;
9508 shift_mask_low = ~0;
9509 shift_mask_low <<= INTVAL (shiftop);
9513 if (-lsb != shift_mask_low)
9516 if (HOST_BITS_PER_WIDE_INT < 64)
9521 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9524 return high == -lsb;
9528 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9534 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9535 to perform a left shift. It must have SHIFTOP or more least
9536 significant 0's, with the remainder of the word 1's. */
9539 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9541 if (GET_CODE (andop) == CONST_INT)
9543 HOST_WIDE_INT c, lsb, shift_mask;
9546 shift_mask <<= INTVAL (shiftop);
9549 /* Find the least significant one bit. */
9552 /* It must be covered by the shift mask.
9553 This test also rejects c == 0. */
9554 if ((lsb & shift_mask) == 0)
9557 /* Check we have all 1's above the transition, and reject all 1's. */
9558 return c == -lsb && lsb != 1;
9560 else if (GET_CODE (andop) == CONST_DOUBLE
9561 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9563 HOST_WIDE_INT low, lsb, shift_mask_low;
9565 low = CONST_DOUBLE_LOW (andop);
9567 if (HOST_BITS_PER_WIDE_INT < 64)
9569 HOST_WIDE_INT high, shift_mask_high;
9571 high = CONST_DOUBLE_HIGH (andop);
9575 shift_mask_high = ~0;
9576 if (INTVAL (shiftop) > 32)
9577 shift_mask_high <<= INTVAL (shiftop) - 32;
9581 if ((lsb & shift_mask_high) == 0)
9584 return high == -lsb;
9590 shift_mask_low = ~0;
9591 shift_mask_low <<= INTVAL (shiftop);
9595 if ((lsb & shift_mask_low) == 0)
9598 return low == -lsb && lsb != 1;
9604 /* Return 1 if operands will generate a valid arguments to rlwimi
9605 instruction for insert with right shift in 64-bit mode. The mask may
9606 not start on the first bit or stop on the last bit because wrap-around
9607 effects of instruction do not correspond to semantics of RTL insn. */
9610 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9612 if (INTVAL (startop) < 64
9613 && INTVAL (startop) > 32
9614 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9615 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9616 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9617 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9618 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9624 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9625 for lfq and stfq insns iff the registers are hard registers. */
9628 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9630 /* We might have been passed a SUBREG. */
9631 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9634 /* We might have been passed non floating point registers. */
9635 if (!FP_REGNO_P (REGNO (reg1))
9636 || !FP_REGNO_P (REGNO (reg2)))
9639 return (REGNO (reg1) == REGNO (reg2) - 1);
9642 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9643 addr1 and addr2 must be in consecutive memory locations
9644 (addr2 == addr1 + 8). */
9647 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9653 /* The mems cannot be volatile. */
9654 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9657 addr1 = XEXP (mem1, 0);
9658 addr2 = XEXP (mem2, 0);
9660 /* Extract an offset (if used) from the first addr. */
9661 if (GET_CODE (addr1) == PLUS)
9663 /* If not a REG, return zero. */
9664 if (GET_CODE (XEXP (addr1, 0)) != REG)
9668 reg1 = REGNO (XEXP (addr1, 0));
9669 /* The offset must be constant! */
9670 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9672 offset1 = INTVAL (XEXP (addr1, 1));
9675 else if (GET_CODE (addr1) != REG)
9679 reg1 = REGNO (addr1);
9680 /* This was a simple (mem (reg)) expression. Offset is 0. */
9684 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9685 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9686 register as addr1. */
9687 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9689 if (GET_CODE (addr2) != PLUS)
9692 if (GET_CODE (XEXP (addr2, 0)) != REG
9693 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9696 if (reg1 != REGNO (XEXP (addr2, 0)))
9699 /* The offset for the second addr must be 8 more than the first addr. */
9700 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9703 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9708 /* Return the register class of a scratch register needed to copy IN into
9709 or out of a register in CLASS in MODE. If it can be done directly,
9710 NO_REGS is returned. */
9713 secondary_reload_class (enum reg_class class,
9714 enum machine_mode mode ATTRIBUTE_UNUSED,
9719 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9721 && MACHOPIC_INDIRECT
9725 /* We cannot copy a symbolic operand directly into anything
9726 other than BASE_REGS for TARGET_ELF. So indicate that a
9727 register from BASE_REGS is needed as an intermediate
9730 On Darwin, pic addresses require a load from memory, which
9731 needs a base register. */
9732 if (class != BASE_REGS
9733 && (GET_CODE (in) == SYMBOL_REF
9734 || GET_CODE (in) == HIGH
9735 || GET_CODE (in) == LABEL_REF
9736 || GET_CODE (in) == CONST))
9740 if (GET_CODE (in) == REG)
9743 if (regno >= FIRST_PSEUDO_REGISTER)
9745 regno = true_regnum (in);
9746 if (regno >= FIRST_PSEUDO_REGISTER)
9750 else if (GET_CODE (in) == SUBREG)
9752 regno = true_regnum (in);
9753 if (regno >= FIRST_PSEUDO_REGISTER)
9759 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9761 if (class == GENERAL_REGS || class == BASE_REGS
9762 || (regno >= 0 && INT_REGNO_P (regno)))
9765 /* Constants, memory, and FP registers can go into FP registers. */
9766 if ((regno == -1 || FP_REGNO_P (regno))
9767 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9770 /* Memory, and AltiVec registers can go into AltiVec registers. */
9771 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9772 && class == ALTIVEC_REGS)
9775 /* We can copy among the CR registers. */
9776 if ((class == CR_REGS || class == CR0_REGS)
9777 && regno >= 0 && CR_REGNO_P (regno))
9780 /* Otherwise, we need GENERAL_REGS. */
9781 return GENERAL_REGS;
9784 /* Given a comparison operation, return the bit number in CCR to test. We
9785 know this is a valid comparison.
9787 SCC_P is 1 if this is for an scc. That means that %D will have been
9788 used instead of %C, so the bits will be in different places.
9790 Return -1 if OP isn't a valid comparison for some reason. */
9793 ccr_bit (rtx op, int scc_p)
9795 enum rtx_code code = GET_CODE (op);
9796 enum machine_mode cc_mode;
9801 if (!COMPARISON_P (op))
9806 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9808 cc_mode = GET_MODE (reg);
9809 cc_regnum = REGNO (reg);
9810 base_bit = 4 * (cc_regnum - CR0_REGNO);
9812 validate_condition_mode (code, cc_mode);
9814 /* When generating a sCOND operation, only positive conditions are
9817 || code == EQ || code == GT || code == LT || code == UNORDERED
9818 || code == GTU || code == LTU);
9823 return scc_p ? base_bit + 3 : base_bit + 2;
9825 return base_bit + 2;
9826 case GT: case GTU: case UNLE:
9827 return base_bit + 1;
9828 case LT: case LTU: case UNGE:
9830 case ORDERED: case UNORDERED:
9831 return base_bit + 3;
9834 /* If scc, we will have done a cror to put the bit in the
9835 unordered position. So test that bit. For integer, this is ! LT
9836 unless this is an scc insn. */
9837 return scc_p ? base_bit + 3 : base_bit;
9840 return scc_p ? base_bit + 3 : base_bit + 1;
9847 /* Return the GOT register. */
9850 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9852 /* The second flow pass currently (June 1999) can't update
9853 regs_ever_live without disturbing other parts of the compiler, so
9854 update it here to make the prolog/epilogue code happy. */
9855 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9856 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9858 current_function_uses_pic_offset_table = 1;
9860 return pic_offset_table_rtx;
9863 /* Function to init struct machine_function.
9864 This will be called, via a pointer variable,
9865 from push_function_context. */
9867 static struct machine_function *
9868 rs6000_init_machine_status (void)
9870 return ggc_alloc_cleared (sizeof (machine_function));
9873 /* These macros test for integers and extract the low-order bits. */
9875 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9876 && GET_MODE (X) == VOIDmode)
9878 #define INT_LOWPART(X) \
9879 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9885 unsigned long val = INT_LOWPART (op);
9887 /* If the high bit is zero, the value is the first 1 bit we find
9889 if ((val & 0x80000000) == 0)
9891 gcc_assert (val & 0xffffffff);
9894 while (((val <<= 1) & 0x80000000) == 0)
9899 /* If the high bit is set and the low bit is not, or the mask is all
9900 1's, the value is zero. */
9901 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9904 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9907 while (((val >>= 1) & 1) != 0)
9917 unsigned long val = INT_LOWPART (op);
9919 /* If the low bit is zero, the value is the first 1 bit we find from
9923 gcc_assert (val & 0xffffffff);
9926 while (((val >>= 1) & 1) == 0)
9932 /* If the low bit is set and the high bit is not, or the mask is all
9933 1's, the value is 31. */
9934 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9937 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9940 while (((val <<= 1) & 0x80000000) != 0)
9946 /* Locate some local-dynamic symbol still in use by this function
9947 so that we can print its name in some tls_ld pattern. */
9950 rs6000_get_some_local_dynamic_name (void)
9954 if (cfun->machine->some_ld_name)
9955 return cfun->machine->some_ld_name;
9957 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9959 && for_each_rtx (&PATTERN (insn),
9960 rs6000_get_some_local_dynamic_name_1, 0))
9961 return cfun->machine->some_ld_name;
9966 /* Helper function for rs6000_get_some_local_dynamic_name. */
9969 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9973 if (GET_CODE (x) == SYMBOL_REF)
9975 const char *str = XSTR (x, 0);
9976 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9978 cfun->machine->some_ld_name = str;
9986 /* Write out a function code label. */
9989 rs6000_output_function_entry (FILE *file, const char *fname)
9991 if (fname[0] != '.')
9993 switch (DEFAULT_ABI)
10002 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10011 RS6000_OUTPUT_BASENAME (file, fname);
10013 assemble_name (file, fname);
10016 /* Print an operand. Recognize special options, documented below. */
10019 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10020 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10022 #define SMALL_DATA_RELOC "sda21"
10023 #define SMALL_DATA_REG 0
10027 print_operand (FILE *file, rtx x, int code)
10031 unsigned HOST_WIDE_INT uval;
10036 /* Write out an instruction after the call which may be replaced
10037 with glue code by the loader. This depends on the AIX version. */
10038 asm_fprintf (file, RS6000_CALL_GLUE);
10041 /* %a is output_address. */
10044 /* If X is a constant integer whose low-order 5 bits are zero,
10045 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10046 in the AIX assembler where "sri" with a zero shift count
10047 writes a trash instruction. */
10048 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10055 /* If constant, low-order 16 bits of constant, unsigned.
10056 Otherwise, write normally. */
10058 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10060 print_operand (file, x, 0);
10064 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10065 for 64-bit mask direction. */
10066 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10069 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10073 /* X is a CR register. Print the number of the GT bit of the CR. */
10074 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10075 output_operand_lossage ("invalid %%E value");
10077 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10081 /* Like 'J' but get to the EQ bit. */
10082 gcc_assert (GET_CODE (x) == REG);
10084 /* Bit 1 is EQ bit. */
10085 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
10087 fprintf (file, "%d", i);
10091 /* X is a CR register. Print the number of the EQ bit of the CR */
10092 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10093 output_operand_lossage ("invalid %%E value");
10095 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10099 /* X is a CR register. Print the shift count needed to move it
10100 to the high-order four bits. */
10101 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10102 output_operand_lossage ("invalid %%f value");
10104 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10108 /* Similar, but print the count for the rotate in the opposite
10110 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10111 output_operand_lossage ("invalid %%F value");
10113 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10117 /* X is a constant integer. If it is negative, print "m",
10118 otherwise print "z". This is to make an aze or ame insn. */
10119 if (GET_CODE (x) != CONST_INT)
10120 output_operand_lossage ("invalid %%G value");
10121 else if (INTVAL (x) >= 0)
10128 /* If constant, output low-order five bits. Otherwise, write
10131 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10133 print_operand (file, x, 0);
10137 /* If constant, output low-order six bits. Otherwise, write
10140 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10142 print_operand (file, x, 0);
10146 /* Print `i' if this is a constant, else nothing. */
10152 /* Write the bit number in CCR for jump. */
10153 i = ccr_bit (x, 0);
10155 output_operand_lossage ("invalid %%j code");
10157 fprintf (file, "%d", i);
10161 /* Similar, but add one for shift count in rlinm for scc and pass
10162 scc flag to `ccr_bit'. */
10163 i = ccr_bit (x, 1);
10165 output_operand_lossage ("invalid %%J code");
10167 /* If we want bit 31, write a shift count of zero, not 32. */
10168 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10172 /* X must be a constant. Write the 1's complement of the
10175 output_operand_lossage ("invalid %%k value");
10177 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10181 /* X must be a symbolic constant on ELF. Write an
10182 expression suitable for an 'addi' that adds in the low 16
10183 bits of the MEM. */
10184 if (GET_CODE (x) != CONST)
10186 print_operand_address (file, x);
10187 fputs ("@l", file);
10191 if (GET_CODE (XEXP (x, 0)) != PLUS
10192 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10193 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10194 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10195 output_operand_lossage ("invalid %%K value");
10196 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10197 fputs ("@l", file);
10198 /* For GNU as, there must be a non-alphanumeric character
10199 between 'l' and the number. The '-' is added by
10200 print_operand() already. */
10201 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10203 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10207 /* %l is output_asm_label. */
10210 /* Write second word of DImode or DFmode reference. Works on register
10211 or non-indexed memory only. */
10212 if (GET_CODE (x) == REG)
10213 fputs (reg_names[REGNO (x) + 1], file);
10214 else if (GET_CODE (x) == MEM)
10216 /* Handle possible auto-increment. Since it is pre-increment and
10217 we have already done it, we can just use an offset of word. */
10218 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10219 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10220 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10223 output_address (XEXP (adjust_address_nv (x, SImode,
10227 if (small_data_operand (x, GET_MODE (x)))
10228 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10229 reg_names[SMALL_DATA_REG]);
10234 /* MB value for a mask operand. */
10235 if (! mask_operand (x, SImode))
10236 output_operand_lossage ("invalid %%m value");
10238 fprintf (file, "%d", extract_MB (x));
10242 /* ME value for a mask operand. */
10243 if (! mask_operand (x, SImode))
10244 output_operand_lossage ("invalid %%M value");
10246 fprintf (file, "%d", extract_ME (x));
10249 /* %n outputs the negative of its operand. */
10252 /* Write the number of elements in the vector times 4. */
10253 if (GET_CODE (x) != PARALLEL)
10254 output_operand_lossage ("invalid %%N value");
10256 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10260 /* Similar, but subtract 1 first. */
10261 if (GET_CODE (x) != PARALLEL)
10262 output_operand_lossage ("invalid %%O value");
10264 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10268 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10270 || INT_LOWPART (x) < 0
10271 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10272 output_operand_lossage ("invalid %%p value");
10274 fprintf (file, "%d", i);
10278 /* The operand must be an indirect memory reference. The result
10279 is the register name. */
10280 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10281 || REGNO (XEXP (x, 0)) >= 32)
10282 output_operand_lossage ("invalid %%P value");
10284 fputs (reg_names[REGNO (XEXP (x, 0))], file);
10288 /* This outputs the logical code corresponding to a boolean
10289 expression. The expression may have one or both operands
10290 negated (if one, only the first one). For condition register
10291 logical operations, it will also treat the negated
10292 CR codes as NOTs, but not handle NOTs of them. */
10294 const char *const *t = 0;
10296 enum rtx_code code = GET_CODE (x);
10297 static const char * const tbl[3][3] = {
10298 { "and", "andc", "nor" },
10299 { "or", "orc", "nand" },
10300 { "xor", "eqv", "xor" } };
10304 else if (code == IOR)
10306 else if (code == XOR)
10309 output_operand_lossage ("invalid %%q value");
10311 if (GET_CODE (XEXP (x, 0)) != NOT)
10315 if (GET_CODE (XEXP (x, 1)) == NOT)
10333 /* X is a CR register. Print the mask for `mtcrf'. */
10334 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10335 output_operand_lossage ("invalid %%R value");
10337 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10341 /* Low 5 bits of 32 - value */
10343 output_operand_lossage ("invalid %%s value");
10345 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10349 /* PowerPC64 mask position. All 0's is excluded.
10350 CONST_INT 32-bit mask is considered sign-extended so any
10351 transition must occur within the CONST_INT, not on the boundary. */
10352 if (! mask_operand (x, DImode))
10353 output_operand_lossage ("invalid %%S value");
10355 uval = INT_LOWPART (x);
10357 if (uval & 1) /* Clear Left */
10359 #if HOST_BITS_PER_WIDE_INT > 64
10360 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10364 else /* Clear Right */
10367 #if HOST_BITS_PER_WIDE_INT > 64
10368 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10374 gcc_assert (i >= 0);
10375 fprintf (file, "%d", i);
10379 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10380 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10382 /* Bit 3 is OV bit. */
10383 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10385 /* If we want bit 31, write a shift count of zero, not 32. */
10386 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10390 /* Print the symbolic name of a branch target register. */
10391 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10392 && REGNO (x) != COUNT_REGISTER_REGNUM))
10393 output_operand_lossage ("invalid %%T value");
10394 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10395 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10397 fputs ("ctr", file);
10401 /* High-order 16 bits of constant for use in unsigned operand. */
10403 output_operand_lossage ("invalid %%u value");
10405 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10406 (INT_LOWPART (x) >> 16) & 0xffff);
10410 /* High-order 16 bits of constant for use in signed operand. */
10412 output_operand_lossage ("invalid %%v value");
10414 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10415 (INT_LOWPART (x) >> 16) & 0xffff);
10419 /* Print `u' if this has an auto-increment or auto-decrement. */
10420 if (GET_CODE (x) == MEM
10421 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10422 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10427 /* Print the trap code for this operand. */
10428 switch (GET_CODE (x))
10431 fputs ("eq", file); /* 4 */
10434 fputs ("ne", file); /* 24 */
10437 fputs ("lt", file); /* 16 */
10440 fputs ("le", file); /* 20 */
10443 fputs ("gt", file); /* 8 */
10446 fputs ("ge", file); /* 12 */
10449 fputs ("llt", file); /* 2 */
10452 fputs ("lle", file); /* 6 */
10455 fputs ("lgt", file); /* 1 */
10458 fputs ("lge", file); /* 5 */
10461 gcc_unreachable ();
10466 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10469 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10470 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10472 print_operand (file, x, 0);
10476 /* MB value for a PowerPC64 rldic operand. */
10477 val = (GET_CODE (x) == CONST_INT
10478 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10483 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10484 if ((val <<= 1) < 0)
10487 #if HOST_BITS_PER_WIDE_INT == 32
10488 if (GET_CODE (x) == CONST_INT && i >= 0)
10489 i += 32; /* zero-extend high-part was all 0's */
10490 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10492 val = CONST_DOUBLE_LOW (x);
10498 for ( ; i < 64; i++)
10499 if ((val <<= 1) < 0)
10504 fprintf (file, "%d", i + 1);
10508 if (GET_CODE (x) == MEM
10509 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10514 /* Like 'L', for third word of TImode */
10515 if (GET_CODE (x) == REG)
10516 fputs (reg_names[REGNO (x) + 2], file);
10517 else if (GET_CODE (x) == MEM)
10519 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10520 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10521 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10523 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10524 if (small_data_operand (x, GET_MODE (x)))
10525 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10526 reg_names[SMALL_DATA_REG]);
10531 /* X is a SYMBOL_REF. Write out the name preceded by a
10532 period and without any trailing data in brackets. Used for function
10533 names. If we are configured for System V (or the embedded ABI) on
10534 the PowerPC, do not emit the period, since those systems do not use
10535 TOCs and the like. */
10536 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10538 /* Mark the decl as referenced so that cgraph will output the
10540 if (SYMBOL_REF_DECL (x))
10541 mark_decl_referenced (SYMBOL_REF_DECL (x));
10543 /* For macho, check to see if we need a stub. */
10546 const char *name = XSTR (x, 0);
10548 if (MACHOPIC_INDIRECT
10549 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10550 name = machopic_indirection_name (x, /*stub_p=*/true);
10552 assemble_name (file, name);
10554 else if (!DOT_SYMBOLS)
10555 assemble_name (file, XSTR (x, 0));
10557 rs6000_output_function_entry (file, XSTR (x, 0));
10561 /* Like 'L', for last word of TImode. */
10562 if (GET_CODE (x) == REG)
10563 fputs (reg_names[REGNO (x) + 3], file);
10564 else if (GET_CODE (x) == MEM)
10566 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10567 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10568 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10570 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10571 if (small_data_operand (x, GET_MODE (x)))
10572 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10573 reg_names[SMALL_DATA_REG]);
10577 /* Print AltiVec or SPE memory operand. */
10582 gcc_assert (GET_CODE (x) == MEM);
10588 /* Handle [reg]. */
10589 if (GET_CODE (tmp) == REG)
10591 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10594 /* Handle [reg+UIMM]. */
10595 else if (GET_CODE (tmp) == PLUS &&
10596 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10600 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10602 x = INTVAL (XEXP (tmp, 1));
10603 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10607 /* Fall through. Must be [reg+reg]. */
10610 && GET_CODE (tmp) == AND
10611 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10612 && INTVAL (XEXP (tmp, 1)) == -16)
10613 tmp = XEXP (tmp, 0);
10614 if (GET_CODE (tmp) == REG)
10615 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10618 gcc_assert (GET_CODE (tmp) == PLUS
10619 && GET_CODE (XEXP (tmp, 1)) == REG);
10621 if (REGNO (XEXP (tmp, 0)) == 0)
10622 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10623 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10625 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10626 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10632 if (GET_CODE (x) == REG)
10633 fprintf (file, "%s", reg_names[REGNO (x)]);
10634 else if (GET_CODE (x) == MEM)
10636 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10637 know the width from the mode. */
10638 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10639 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10640 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10641 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10642 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10643 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10645 output_address (XEXP (x, 0));
10648 output_addr_const (file, x);
10652 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10656 output_operand_lossage ("invalid %%xn code");
10660 /* Print the address of an operand. */
10663 print_operand_address (FILE *file, rtx x)
10665 if (GET_CODE (x) == REG)
10666 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10667 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10668 || GET_CODE (x) == LABEL_REF)
10670 output_addr_const (file, x);
10671 if (small_data_operand (x, GET_MODE (x)))
10672 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10673 reg_names[SMALL_DATA_REG]);
10675 gcc_assert (!TARGET_TOC);
10677 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10679 if (REGNO (XEXP (x, 0)) == 0)
10680 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10681 reg_names[ REGNO (XEXP (x, 0)) ]);
10683 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10684 reg_names[ REGNO (XEXP (x, 1)) ]);
10686 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10687 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10688 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10690 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10691 && CONSTANT_P (XEXP (x, 1)))
10693 output_addr_const (file, XEXP (x, 1));
10694 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10698 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10699 && CONSTANT_P (XEXP (x, 1)))
10701 fprintf (file, "lo16(");
10702 output_addr_const (file, XEXP (x, 1));
10703 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10706 else if (legitimate_constant_pool_address_p (x))
10708 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10710 rtx contains_minus = XEXP (x, 1);
10714 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10715 turn it into (sym) for output_addr_const. */
10716 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10717 contains_minus = XEXP (contains_minus, 0);
10719 minus = XEXP (contains_minus, 0);
10720 symref = XEXP (minus, 0);
10721 XEXP (contains_minus, 0) = symref;
10726 name = XSTR (symref, 0);
10727 newname = alloca (strlen (name) + sizeof ("@toc"));
10728 strcpy (newname, name);
10729 strcat (newname, "@toc");
10730 XSTR (symref, 0) = newname;
10732 output_addr_const (file, XEXP (x, 1));
10734 XSTR (symref, 0) = name;
10735 XEXP (contains_minus, 0) = minus;
10738 output_addr_const (file, XEXP (x, 1));
10740 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10743 gcc_unreachable ();
10746 /* Target hook for assembling integer objects. The PowerPC version has
10747 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10748 is defined. It also needs to handle DI-mode objects on 64-bit
10752 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10754 #ifdef RELOCATABLE_NEEDS_FIXUP
10755 /* Special handling for SI values. */
10756 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
10758 extern int in_toc_section (void);
10759 static int recurse = 0;
10761 /* For -mrelocatable, we mark all addresses that need to be fixed up
10762 in the .fixup section. */
10763 if (TARGET_RELOCATABLE
10764 && !in_toc_section ()
10765 && !in_text_section ()
10766 && !in_unlikely_text_section ()
10768 && GET_CODE (x) != CONST_INT
10769 && GET_CODE (x) != CONST_DOUBLE
10775 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10777 ASM_OUTPUT_LABEL (asm_out_file, buf);
10778 fprintf (asm_out_file, "\t.long\t(");
10779 output_addr_const (asm_out_file, x);
10780 fprintf (asm_out_file, ")@fixup\n");
10781 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10782 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10783 fprintf (asm_out_file, "\t.long\t");
10784 assemble_name (asm_out_file, buf);
10785 fprintf (asm_out_file, "\n\t.previous\n");
10789 /* Remove initial .'s to turn a -mcall-aixdesc function
10790 address into the address of the descriptor, not the function
10792 else if (GET_CODE (x) == SYMBOL_REF
10793 && XSTR (x, 0)[0] == '.'
10794 && DEFAULT_ABI == ABI_AIX)
10796 const char *name = XSTR (x, 0);
10797 while (*name == '.')
10800 fprintf (asm_out_file, "\t.long\t%s\n", name);
10804 #endif /* RELOCATABLE_NEEDS_FIXUP */
10805 return default_assemble_integer (x, size, aligned_p);
10808 #ifdef HAVE_GAS_HIDDEN
10809 /* Emit an assembler directive to set symbol visibility for DECL to
10810 VISIBILITY_TYPE. */
10813 rs6000_assemble_visibility (tree decl, int vis)
10815 /* Functions need to have their entry point symbol visibility set as
10816 well as their descriptor symbol visibility. */
10817 if (DEFAULT_ABI == ABI_AIX
10819 && TREE_CODE (decl) == FUNCTION_DECL)
10821 static const char * const visibility_types[] = {
10822 NULL, "internal", "hidden", "protected"
10825 const char *name, *type;
10827 name = ((* targetm.strip_name_encoding)
10828 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10829 type = visibility_types[vis];
10831 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10832 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10835 default_assemble_visibility (decl, vis);
10840 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10842 /* Reversal of FP compares takes care -- an ordered compare
10843 becomes an unordered compare and vice versa. */
10844 if (mode == CCFPmode
10845 && (!flag_finite_math_only
10846 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10847 || code == UNEQ || code == LTGT))
10848 return reverse_condition_maybe_unordered (code);
10850 return reverse_condition (code);
10853 /* Generate a compare for CODE. Return a brand-new rtx that
10854 represents the result of the compare. */
10857 rs6000_generate_compare (enum rtx_code code)
10859 enum machine_mode comp_mode;
10860 rtx compare_result;
10862 if (rs6000_compare_fp_p)
10863 comp_mode = CCFPmode;
10864 else if (code == GTU || code == LTU
10865 || code == GEU || code == LEU)
10866 comp_mode = CCUNSmode;
10867 else if ((code == EQ || code == NE)
10868 && GET_CODE (rs6000_compare_op0) == SUBREG
10869 && GET_CODE (rs6000_compare_op1) == SUBREG
10870 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10871 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10872 /* These are unsigned values, perhaps there will be a later
10873 ordering compare that can be shared with this one.
10874 Unfortunately we cannot detect the signedness of the operands
10875 for non-subregs. */
10876 comp_mode = CCUNSmode;
10878 comp_mode = CCmode;
10880 /* First, the compare. */
10881 compare_result = gen_reg_rtx (comp_mode);
10883 /* SPE FP compare instructions on the GPRs. Yuck! */
10884 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10885 && rs6000_compare_fp_p)
10887 rtx cmp, or_result, compare_result2;
10888 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10890 if (op_mode == VOIDmode)
10891 op_mode = GET_MODE (rs6000_compare_op1);
10893 /* Note: The E500 comparison instructions set the GT bit (x +
10894 1), on success. This explains the mess. */
10898 case EQ: case UNEQ: case NE: case LTGT:
10902 cmp = flag_unsafe_math_optimizations
10903 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10904 rs6000_compare_op1)
10905 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10906 rs6000_compare_op1);
10910 cmp = flag_unsafe_math_optimizations
10911 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10912 rs6000_compare_op1)
10913 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10914 rs6000_compare_op1);
10918 gcc_unreachable ();
10922 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10926 cmp = flag_unsafe_math_optimizations
10927 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10928 rs6000_compare_op1)
10929 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10930 rs6000_compare_op1);
10934 cmp = flag_unsafe_math_optimizations
10935 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10936 rs6000_compare_op1)
10937 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10938 rs6000_compare_op1);
10942 gcc_unreachable ();
10946 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10950 cmp = flag_unsafe_math_optimizations
10951 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10952 rs6000_compare_op1)
10953 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10954 rs6000_compare_op1);
10958 cmp = flag_unsafe_math_optimizations
10959 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
10960 rs6000_compare_op1)
10961 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
10962 rs6000_compare_op1);
10966 gcc_unreachable ();
10970 gcc_unreachable ();
10973 /* Synthesize LE and GE from LT/GT || EQ. */
10974 if (code == LE || code == GE || code == LEU || code == GEU)
10980 case LE: code = LT; break;
10981 case GE: code = GT; break;
10982 case LEU: code = LT; break;
10983 case GEU: code = GT; break;
10984 default: gcc_unreachable ();
10987 compare_result2 = gen_reg_rtx (CCFPmode);
10993 cmp = flag_unsafe_math_optimizations
10994 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10995 rs6000_compare_op1)
10996 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10997 rs6000_compare_op1);
11001 cmp = flag_unsafe_math_optimizations
11002 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11003 rs6000_compare_op1)
11004 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11005 rs6000_compare_op1);
11009 gcc_unreachable ();
11013 /* OR them together. */
11014 or_result = gen_reg_rtx (CCFPmode);
11015 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11017 compare_result = or_result;
11022 if (code == NE || code == LTGT)
11032 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11033 CLOBBERs to match cmptf_internal2 pattern. */
11034 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11035 && GET_MODE (rs6000_compare_op0) == TFmode
11036 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
11037 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11038 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11040 gen_rtx_SET (VOIDmode,
11042 gen_rtx_COMPARE (comp_mode,
11043 rs6000_compare_op0,
11044 rs6000_compare_op1)),
11045 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11046 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11047 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11048 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11049 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11050 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11051 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11052 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11053 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11054 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11056 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11057 comp_mode = CCEQmode;
11058 compare_result = gen_reg_rtx (CCEQmode);
11060 emit_insn (gen_stack_protect_testdi (compare_result,
11061 rs6000_compare_op0, op1));
11063 emit_insn (gen_stack_protect_testsi (compare_result,
11064 rs6000_compare_op0, op1));
11067 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11068 gen_rtx_COMPARE (comp_mode,
11069 rs6000_compare_op0,
11070 rs6000_compare_op1)));
11073 /* Some kinds of FP comparisons need an OR operation;
11074 under flag_finite_math_only we don't bother. */
11075 if (rs6000_compare_fp_p
11076 && !flag_finite_math_only
11077 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11078 && (code == LE || code == GE
11079 || code == UNEQ || code == LTGT
11080 || code == UNGT || code == UNLT))
11082 enum rtx_code or1, or2;
11083 rtx or1_rtx, or2_rtx, compare2_rtx;
11084 rtx or_result = gen_reg_rtx (CCEQmode);
11088 case LE: or1 = LT; or2 = EQ; break;
11089 case GE: or1 = GT; or2 = EQ; break;
11090 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11091 case LTGT: or1 = LT; or2 = GT; break;
11092 case UNGT: or1 = UNORDERED; or2 = GT; break;
11093 case UNLT: or1 = UNORDERED; or2 = LT; break;
11094 default: gcc_unreachable ();
11096 validate_condition_mode (or1, comp_mode);
11097 validate_condition_mode (or2, comp_mode);
11098 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11099 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11100 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11101 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11103 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11105 compare_result = or_result;
11109 validate_condition_mode (code, GET_MODE (compare_result));
11111 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11115 /* Emit the RTL for an sCOND pattern. */
11118 rs6000_emit_sCOND (enum rtx_code code, rtx result)
11121 enum machine_mode op_mode;
11122 enum rtx_code cond_code;
11124 condition_rtx = rs6000_generate_compare (code);
11125 cond_code = GET_CODE (condition_rtx);
11127 if (TARGET_E500 && rs6000_compare_fp_p
11128 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11132 PUT_MODE (condition_rtx, SImode);
11133 t = XEXP (condition_rtx, 0);
11135 gcc_assert (cond_code == NE || cond_code == EQ);
11137 if (cond_code == NE)
11138 emit_insn (gen_e500_flip_gt_bit (t, t));
11140 emit_insn (gen_move_from_CR_gt_bit (result, t));
11144 if (cond_code == NE
11145 || cond_code == GE || cond_code == LE
11146 || cond_code == GEU || cond_code == LEU
11147 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11149 rtx not_result = gen_reg_rtx (CCEQmode);
11150 rtx not_op, rev_cond_rtx;
11151 enum machine_mode cc_mode;
11153 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11155 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11156 SImode, XEXP (condition_rtx, 0), const0_rtx);
11157 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11158 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11159 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11162 op_mode = GET_MODE (rs6000_compare_op0);
11163 if (op_mode == VOIDmode)
11164 op_mode = GET_MODE (rs6000_compare_op1);
11166 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11168 PUT_MODE (condition_rtx, DImode);
11169 convert_move (result, condition_rtx, 0);
11173 PUT_MODE (condition_rtx, SImode);
11174 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11178 /* Emit a branch of kind CODE to location LOC. */
11181 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11183 rtx condition_rtx, loc_ref;
11185 condition_rtx = rs6000_generate_compare (code);
11186 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11187 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11188 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11189 loc_ref, pc_rtx)));
11192 /* Return the string to output a conditional branch to LABEL, which is
11193 the operand number of the label, or -1 if the branch is really a
11194 conditional return.
11196 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11197 condition code register and its mode specifies what kind of
11198 comparison we made.
11200 REVERSED is nonzero if we should reverse the sense of the comparison.
11202 INSN is the insn. */
11205 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11207 static char string[64];
11208 enum rtx_code code = GET_CODE (op);
11209 rtx cc_reg = XEXP (op, 0);
11210 enum machine_mode mode = GET_MODE (cc_reg);
11211 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11212 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11213 int really_reversed = reversed ^ need_longbranch;
11219 validate_condition_mode (code, mode);
11221 /* Work out which way this really branches. We could use
11222 reverse_condition_maybe_unordered here always but this
11223 makes the resulting assembler clearer. */
11224 if (really_reversed)
11226 /* Reversal of FP compares takes care -- an ordered compare
11227 becomes an unordered compare and vice versa. */
11228 if (mode == CCFPmode)
11229 code = reverse_condition_maybe_unordered (code);
11231 code = reverse_condition (code);
11234 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11236 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11241 /* Opposite of GT. */
11250 gcc_unreachable ();
11256 /* Not all of these are actually distinct opcodes, but
11257 we distinguish them for clarity of the resulting assembler. */
11258 case NE: case LTGT:
11259 ccode = "ne"; break;
11260 case EQ: case UNEQ:
11261 ccode = "eq"; break;
11263 ccode = "ge"; break;
11264 case GT: case GTU: case UNGT:
11265 ccode = "gt"; break;
11267 ccode = "le"; break;
11268 case LT: case LTU: case UNLT:
11269 ccode = "lt"; break;
11270 case UNORDERED: ccode = "un"; break;
11271 case ORDERED: ccode = "nu"; break;
11272 case UNGE: ccode = "nl"; break;
11273 case UNLE: ccode = "ng"; break;
11275 gcc_unreachable ();
11278 /* Maybe we have a guess as to how likely the branch is.
11279 The old mnemonics don't have a way to specify this information. */
11281 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11282 if (note != NULL_RTX)
11284 /* PROB is the difference from 50%. */
11285 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11287 /* Only hint for highly probable/improbable branches on newer
11288 cpus as static prediction overrides processor dynamic
11289 prediction. For older cpus we may as well always hint, but
11290 assume not taken for branches that are very close to 50% as a
11291 mispredicted taken branch is more expensive than a
11292 mispredicted not-taken branch. */
11293 if (rs6000_always_hint
11294 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11296 if (abs (prob) > REG_BR_PROB_BASE / 20
11297 && ((prob > 0) ^ need_longbranch))
11305 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11307 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11309 /* We need to escape any '%' characters in the reg_names string.
11310 Assume they'd only be the first character.... */
11311 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11313 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11317 /* If the branch distance was too far, we may have to use an
11318 unconditional branch to go the distance. */
11319 if (need_longbranch)
11320 s += sprintf (s, ",$+8\n\tb %s", label);
11322 s += sprintf (s, ",%s", label);
11328 /* Return the string to flip the GT bit on a CR. */
11330 output_e500_flip_gt_bit (rtx dst, rtx src)
11332 static char string[64];
11335 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11336 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11339 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11340 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11342 sprintf (string, "crnot %d,%d", a, b);
11346 /* Return insn index for the vector compare instruction for given CODE,
11347 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11351 get_vec_cmp_insn (enum rtx_code code,
11352 enum machine_mode dest_mode,
11353 enum machine_mode op_mode)
11355 if (!TARGET_ALTIVEC)
11356 return INSN_NOT_AVAILABLE;
11361 if (dest_mode == V16QImode && op_mode == V16QImode)
11362 return UNSPEC_VCMPEQUB;
11363 if (dest_mode == V8HImode && op_mode == V8HImode)
11364 return UNSPEC_VCMPEQUH;
11365 if (dest_mode == V4SImode && op_mode == V4SImode)
11366 return UNSPEC_VCMPEQUW;
11367 if (dest_mode == V4SImode && op_mode == V4SFmode)
11368 return UNSPEC_VCMPEQFP;
11371 if (dest_mode == V4SImode && op_mode == V4SFmode)
11372 return UNSPEC_VCMPGEFP;
11374 if (dest_mode == V16QImode && op_mode == V16QImode)
11375 return UNSPEC_VCMPGTSB;
11376 if (dest_mode == V8HImode && op_mode == V8HImode)
11377 return UNSPEC_VCMPGTSH;
11378 if (dest_mode == V4SImode && op_mode == V4SImode)
11379 return UNSPEC_VCMPGTSW;
11380 if (dest_mode == V4SImode && op_mode == V4SFmode)
11381 return UNSPEC_VCMPGTFP;
11384 if (dest_mode == V16QImode && op_mode == V16QImode)
11385 return UNSPEC_VCMPGTUB;
11386 if (dest_mode == V8HImode && op_mode == V8HImode)
11387 return UNSPEC_VCMPGTUH;
11388 if (dest_mode == V4SImode && op_mode == V4SImode)
11389 return UNSPEC_VCMPGTUW;
11394 return INSN_NOT_AVAILABLE;
11397 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11398 DMODE is expected destination mode. This is a recursive function. */
11401 rs6000_emit_vector_compare (enum rtx_code rcode,
11403 enum machine_mode dmode)
11407 enum machine_mode dest_mode;
11408 enum machine_mode op_mode = GET_MODE (op1);
11410 gcc_assert (TARGET_ALTIVEC);
11411 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11413 /* Floating point vector compare instructions uses destination V4SImode.
11414 Move destination to appropriate mode later. */
11415 if (dmode == V4SFmode)
11416 dest_mode = V4SImode;
11420 mask = gen_reg_rtx (dest_mode);
11421 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11423 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11425 bool swap_operands = false;
11426 bool try_again = false;
11431 swap_operands = true;
11436 swap_operands = true;
11440 /* Treat A != B as ~(A==B). */
11442 enum insn_code nor_code;
11443 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11446 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11447 gcc_assert (nor_code != CODE_FOR_nothing);
11448 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11450 if (dmode != dest_mode)
11452 rtx temp = gen_reg_rtx (dest_mode);
11453 convert_move (temp, mask, 0);
11463 /* Try GT/GTU/LT/LTU OR EQ */
11466 enum insn_code ior_code;
11467 enum rtx_code new_code;
11488 gcc_unreachable ();
11491 c_rtx = rs6000_emit_vector_compare (new_code,
11492 op0, op1, dest_mode);
11493 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11496 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11497 gcc_assert (ior_code != CODE_FOR_nothing);
11498 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11499 if (dmode != dest_mode)
11501 rtx temp = gen_reg_rtx (dest_mode);
11502 convert_move (temp, mask, 0);
11509 gcc_unreachable ();
11514 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11515 /* You only get two chances. */
11516 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11528 emit_insn (gen_rtx_SET (VOIDmode, mask,
11529 gen_rtx_UNSPEC (dest_mode,
11530 gen_rtvec (2, op0, op1),
11532 if (dmode != dest_mode)
11534 rtx temp = gen_reg_rtx (dest_mode);
11535 convert_move (temp, mask, 0);
11541 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11542 valid insn doesn exist for given mode. */
11545 get_vsel_insn (enum machine_mode mode)
11550 return UNSPEC_VSEL4SI;
11553 return UNSPEC_VSEL4SF;
11556 return UNSPEC_VSEL8HI;
11559 return UNSPEC_VSEL16QI;
11562 return INSN_NOT_AVAILABLE;
11565 return INSN_NOT_AVAILABLE;
11568 /* Emit vector select insn where DEST is destination using
11569 operands OP1, OP2 and MASK. */
11572 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11575 enum machine_mode dest_mode = GET_MODE (dest);
11576 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11578 temp = gen_reg_rtx (dest_mode);
11580 /* For each vector element, select op1 when mask is 1 otherwise
11582 t = gen_rtx_SET (VOIDmode, temp,
11583 gen_rtx_UNSPEC (dest_mode,
11584 gen_rtvec (3, op2, op1, mask),
11587 emit_move_insn (dest, temp);
11591 /* Emit vector conditional expression.
11592 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11593 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11596 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11597 rtx cond, rtx cc_op0, rtx cc_op1)
11599 enum machine_mode dest_mode = GET_MODE (dest);
11600 enum rtx_code rcode = GET_CODE (cond);
11603 if (!TARGET_ALTIVEC)
11606 /* Get the vector mask for the given relational operations. */
11607 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11609 rs6000_emit_vector_select (dest, op1, op2, mask);
11614 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11615 operands of the last comparison is nonzero/true, FALSE_COND if it
11616 is zero/false. Return 0 if the hardware has no such operation. */
11619 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11621 enum rtx_code code = GET_CODE (op);
11622 rtx op0 = rs6000_compare_op0;
11623 rtx op1 = rs6000_compare_op1;
11624 REAL_VALUE_TYPE c1;
11625 enum machine_mode compare_mode = GET_MODE (op0);
11626 enum machine_mode result_mode = GET_MODE (dest);
11628 bool is_against_zero;
11630 /* These modes should always match. */
11631 if (GET_MODE (op1) != compare_mode
11632 /* In the isel case however, we can use a compare immediate, so
11633 op1 may be a small constant. */
11634 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11636 if (GET_MODE (true_cond) != result_mode)
11638 if (GET_MODE (false_cond) != result_mode)
11641 /* First, work out if the hardware can do this at all, or
11642 if it's too slow.... */
11643 if (! rs6000_compare_fp_p)
11646 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11649 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11650 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11653 is_against_zero = op1 == CONST0_RTX (compare_mode);
11655 /* A floating-point subtract might overflow, underflow, or produce
11656 an inexact result, thus changing the floating-point flags, so it
11657 can't be generated if we care about that. It's safe if one side
11658 of the construct is zero, since then no subtract will be
11660 if (GET_MODE_CLASS (compare_mode) == MODE_FLOAT
11661 && flag_trapping_math && ! is_against_zero)
11664 /* Eliminate half of the comparisons by switching operands, this
11665 makes the remaining code simpler. */
11666 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11667 || code == LTGT || code == LT || code == UNLE)
11669 code = reverse_condition_maybe_unordered (code);
11671 true_cond = false_cond;
11675 /* UNEQ and LTGT take four instructions for a comparison with zero,
11676 it'll probably be faster to use a branch here too. */
11677 if (code == UNEQ && HONOR_NANS (compare_mode))
11680 if (GET_CODE (op1) == CONST_DOUBLE)
11681 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11683 /* We're going to try to implement comparisons by performing
11684 a subtract, then comparing against zero. Unfortunately,
11685 Inf - Inf is NaN which is not zero, and so if we don't
11686 know that the operand is finite and the comparison
11687 would treat EQ different to UNORDERED, we can't do it. */
11688 if (HONOR_INFINITIES (compare_mode)
11689 && code != GT && code != UNGE
11690 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11691 /* Constructs of the form (a OP b ? a : b) are safe. */
11692 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11693 || (! rtx_equal_p (op0, true_cond)
11694 && ! rtx_equal_p (op1, true_cond))))
11697 /* At this point we know we can use fsel. */
11699 /* Reduce the comparison to a comparison against zero. */
11700 if (! is_against_zero)
11702 temp = gen_reg_rtx (compare_mode);
11703 emit_insn (gen_rtx_SET (VOIDmode, temp,
11704 gen_rtx_MINUS (compare_mode, op0, op1)));
11706 op1 = CONST0_RTX (compare_mode);
11709 /* If we don't care about NaNs we can reduce some of the comparisons
11710 down to faster ones. */
11711 if (! HONOR_NANS (compare_mode))
11717 true_cond = false_cond;
11730 /* Now, reduce everything down to a GE. */
11737 temp = gen_reg_rtx (compare_mode);
11738 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11743 temp = gen_reg_rtx (compare_mode);
11744 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11749 temp = gen_reg_rtx (compare_mode);
11750 emit_insn (gen_rtx_SET (VOIDmode, temp,
11751 gen_rtx_NEG (compare_mode,
11752 gen_rtx_ABS (compare_mode, op0))));
11757 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11758 temp = gen_reg_rtx (result_mode);
11759 emit_insn (gen_rtx_SET (VOIDmode, temp,
11760 gen_rtx_IF_THEN_ELSE (result_mode,
11761 gen_rtx_GE (VOIDmode,
11763 true_cond, false_cond)));
11764 false_cond = true_cond;
11767 temp = gen_reg_rtx (compare_mode);
11768 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11773 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11774 temp = gen_reg_rtx (result_mode);
11775 emit_insn (gen_rtx_SET (VOIDmode, temp,
11776 gen_rtx_IF_THEN_ELSE (result_mode,
11777 gen_rtx_GE (VOIDmode,
11779 true_cond, false_cond)));
11780 true_cond = false_cond;
11783 temp = gen_reg_rtx (compare_mode);
11784 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11789 gcc_unreachable ();
11792 emit_insn (gen_rtx_SET (VOIDmode, dest,
11793 gen_rtx_IF_THEN_ELSE (result_mode,
11794 gen_rtx_GE (VOIDmode,
11796 true_cond, false_cond)));
11800 /* Same as above, but for ints (isel). */
11803 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11805 rtx condition_rtx, cr;
11807 /* All isel implementations thus far are 32-bits. */
11808 if (GET_MODE (rs6000_compare_op0) != SImode)
11811 /* We still have to do the compare, because isel doesn't do a
11812 compare, it just looks at the CRx bits set by a previous compare
11814 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11815 cr = XEXP (condition_rtx, 0);
11817 if (GET_MODE (cr) == CCmode)
11818 emit_insn (gen_isel_signed (dest, condition_rtx,
11819 true_cond, false_cond, cr));
11821 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11822 true_cond, false_cond, cr));
11828 output_isel (rtx *operands)
11830 enum rtx_code code;
11832 code = GET_CODE (operands[1]);
11833 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11835 PUT_CODE (operands[1], reverse_condition (code));
11836 return "isel %0,%3,%2,%j1";
11839 return "isel %0,%2,%3,%j1";
11843 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11845 enum machine_mode mode = GET_MODE (op0);
11849 if (code == SMAX || code == SMIN)
11854 if (code == SMAX || code == UMAX)
11855 target = emit_conditional_move (dest, c, op0, op1, mode,
11856 op0, op1, mode, 0);
11858 target = emit_conditional_move (dest, c, op0, op1, mode,
11859 op1, op0, mode, 0);
11860 gcc_assert (target);
11861 if (target != dest)
11862 emit_move_insn (dest, target);
11865 /* Emit instructions to perform a load-reserved/store-conditional operation.
11866 The operation performed is an atomic
11867 (set M (CODE:MODE M OP))
11868 If not NULL, BEFORE is atomically set to M before the operation, and
11869 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11870 If SYNC_P then a memory barrier is emitted before the operation.
11871 Either OP or M may be wrapped in a NOT operation. */
11874 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11875 rtx m, rtx op, rtx before_param, rtx after_param,
11878 enum machine_mode used_mode;
11879 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11882 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11883 rtx shift = NULL_RTX;
11886 emit_insn (gen_memory_barrier ());
11888 if (GET_CODE (m) == NOT)
11889 used_m = XEXP (m, 0);
11893 /* If this is smaller than SImode, we'll have to use SImode with
11895 if (mode == QImode || mode == HImode)
11899 if (MEM_ALIGN (used_m) >= 32)
11902 if (BYTES_BIG_ENDIAN)
11903 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
11905 shift = GEN_INT (ishift);
11909 rtx addrSI, aligned_addr;
11910 int shift_mask = mode == QImode ? 0x18 : 0x10;
11912 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11913 XEXP (used_m, 0)));
11914 shift = gen_reg_rtx (SImode);
11916 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
11917 GEN_INT (shift_mask)));
11918 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
11920 aligned_addr = expand_binop (Pmode, and_optab,
11922 GEN_INT (-4), NULL_RTX,
11923 1, OPTAB_LIB_WIDEN);
11924 used_m = change_address (used_m, SImode, aligned_addr);
11925 set_mem_align (used_m, 32);
11926 /* It's safe to keep the old alias set of USED_M, because
11927 the operation is atomic and only affects the original
11929 if (GET_CODE (m) == NOT)
11930 m = gen_rtx_NOT (SImode, used_m);
11935 if (GET_CODE (op) == NOT)
11937 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11938 oldop = gen_rtx_NOT (SImode, oldop);
11941 oldop = lowpart_subreg (SImode, op, mode);
11947 newop = expand_binop (SImode, and_optab,
11948 oldop, GEN_INT (imask), NULL_RTX,
11949 1, OPTAB_LIB_WIDEN);
11950 emit_insn (gen_ashlsi3 (newop, newop, shift));
11954 newop = expand_binop (SImode, ior_optab,
11955 oldop, GEN_INT (~imask), NULL_RTX,
11956 1, OPTAB_LIB_WIDEN);
11957 emit_insn (gen_rotlsi3 (newop, newop, shift));
11965 newop = expand_binop (SImode, and_optab,
11966 oldop, GEN_INT (imask), NULL_RTX,
11967 1, OPTAB_LIB_WIDEN);
11968 emit_insn (gen_ashlsi3 (newop, newop, shift));
11970 mask = gen_reg_rtx (SImode);
11971 emit_move_insn (mask, GEN_INT (imask));
11972 emit_insn (gen_ashlsi3 (mask, mask, shift));
11975 newop = gen_rtx_PLUS (SImode, m, newop);
11977 newop = gen_rtx_MINUS (SImode, m, newop);
11978 newop = gen_rtx_AND (SImode, newop, mask);
11979 newop = gen_rtx_IOR (SImode, newop,
11980 gen_rtx_AND (SImode,
11981 gen_rtx_NOT (SImode, mask),
11987 gcc_unreachable ();
11990 if (GET_CODE (m) == NOT)
11994 mask = gen_reg_rtx (SImode);
11995 emit_move_insn (mask, GEN_INT (imask));
11996 emit_insn (gen_ashlsi3 (mask, mask, shift));
11998 xorm = gen_rtx_XOR (SImode, used_m, mask);
11999 /* Depending on the value of 'op', the XOR or the operation might
12000 be able to be simplified away. */
12001 newop = simplify_gen_binary (code, SImode, xorm, newop);
12004 used_mode = SImode;
12005 before = gen_reg_rtx (used_mode);
12006 after = gen_reg_rtx (used_mode);
12011 before = before_param;
12012 after = after_param;
12014 if (before == NULL_RTX)
12015 before = gen_reg_rtx (used_mode);
12016 if (after == NULL_RTX)
12017 after = gen_reg_rtx (used_mode);
12020 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12021 && used_mode != mode)
12022 the_op = op; /* Computed above. */
12023 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12024 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12026 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12028 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12029 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12030 set_atomic = gen_rtx_SET (VOIDmode, used_m,
12031 gen_rtx_UNSPEC (used_mode,
12032 gen_rtvec (1, the_op),
12034 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12036 if ((code == PLUS || code == MINUS) && used_mode != mode)
12037 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12038 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12040 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12041 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12043 /* Shift and mask the return values properly. */
12044 if (used_mode != mode && before_param)
12046 emit_insn (gen_lshrsi3 (before, before, shift));
12047 convert_move (before_param, before, 1);
12050 if (used_mode != mode && after_param)
12052 emit_insn (gen_lshrsi3 (after, after, shift));
12053 convert_move (after_param, after, 1);
12056 /* The previous sequence will end with a branch that's dependent on
12057 the conditional store, so placing an isync will ensure that no
12058 other instructions (especially, no load or store instructions)
12059 can start before the atomic operation completes. */
12061 emit_insn (gen_isync ());
12064 /* A subroutine of the atomic operation splitters. Jump to LABEL if
12065 COND is true. Mark the jump as unlikely to be taken. */
12068 emit_unlikely_jump (rtx cond, rtx label)
12070 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12073 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12074 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12075 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12078 /* A subroutine of the atomic operation splitters. Emit a load-locked
12079 instruction in MODE. */
12082 emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12084 rtx (*fn) (rtx, rtx) = NULL;
12085 if (mode == SImode)
12086 fn = gen_load_locked_si;
12087 else if (mode == DImode)
12088 fn = gen_load_locked_di;
12089 emit_insn (fn (reg, mem));
12092 /* A subroutine of the atomic operation splitters. Emit a store-conditional
12093 instruction in MODE. */
12096 emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12098 rtx (*fn) (rtx, rtx, rtx) = NULL;
12099 if (mode == SImode)
12100 fn = gen_store_conditional_si;
12101 else if (mode == DImode)
12102 fn = gen_store_conditional_di;
12104 /* Emit sync before stwcx. to address PPC405 Erratum. */
12105 if (PPC405_ERRATUM77)
12106 emit_insn (gen_memory_barrier ());
12108 emit_insn (fn (res, mem, val));
12111 /* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
12112 to perform. MEM is the memory on which to operate. VAL is the second
12113 operand of the binary operator. BEFORE and AFTER are optional locations to
12114 return the value of MEM either before of after the operation. SCRATCH is
12115 a scratch register. */
12118 rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12119 rtx before, rtx after, rtx scratch)
12121 enum machine_mode mode = GET_MODE (mem);
12122 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12124 emit_insn (gen_memory_barrier ());
12126 label = gen_label_rtx ();
12127 emit_label (label);
12128 label = gen_rtx_LABEL_REF (VOIDmode, label);
12130 if (before == NULL_RTX)
12132 emit_load_locked (mode, before, mem);
12135 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12136 else if (code == AND)
12137 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12139 x = gen_rtx_fmt_ee (code, mode, before, val);
12141 if (after != NULL_RTX)
12142 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12143 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12145 emit_store_conditional (mode, cond, mem, scratch);
12147 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12148 emit_unlikely_jump (x, label);
12150 emit_insn (gen_isync ());
12153 /* Expand an atomic compare and swap operation. MEM is the memory on which
12154 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12155 value to be stored. SCRATCH is a scratch GPR. */
12158 rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12161 enum machine_mode mode = GET_MODE (mem);
12162 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12164 emit_insn (gen_memory_barrier ());
12166 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12167 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12168 emit_label (XEXP (label1, 0));
12170 emit_load_locked (mode, retval, mem);
12172 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12173 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12175 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12176 emit_unlikely_jump (x, label2);
12178 emit_move_insn (scratch, newval);
12179 emit_store_conditional (mode, cond, mem, scratch);
12181 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12182 emit_unlikely_jump (x, label1);
12184 emit_insn (gen_isync ());
12185 emit_label (XEXP (label2, 0));
12188 /* Expand an atomic test and set operation. MEM is the memory on which
12189 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12192 rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12194 enum machine_mode mode = GET_MODE (mem);
12195 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12197 emit_insn (gen_memory_barrier ());
12199 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12200 emit_label (XEXP (label, 0));
12202 emit_load_locked (mode, retval, mem);
12203 emit_move_insn (scratch, val);
12204 emit_store_conditional (mode, cond, mem, scratch);
12206 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12207 emit_unlikely_jump (x, label);
12209 emit_insn (gen_isync ());
12212 /* Emit instructions to move SRC to DST. Called by splitters for
12213 multi-register moves. It will emit at most one instruction for
12214 each register that is accessed; that is, it won't emit li/lis pairs
12215 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12219 rs6000_split_multireg_move (rtx dst, rtx src)
12221 /* The register number of the first register being moved. */
12223 /* The mode that is to be moved. */
12224 enum machine_mode mode;
12225 /* The mode that the move is being done in, and its size. */
12226 enum machine_mode reg_mode;
12228 /* The number of registers that will be moved. */
12231 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12232 mode = GET_MODE (dst);
12233 nregs = hard_regno_nregs[reg][mode];
12234 if (FP_REGNO_P (reg))
12236 else if (ALTIVEC_REGNO_P (reg))
12237 reg_mode = V16QImode;
12239 reg_mode = word_mode;
12240 reg_mode_size = GET_MODE_SIZE (reg_mode);
12242 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12244 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12246 /* Move register range backwards, if we might have destructive
12249 for (i = nregs - 1; i >= 0; i--)
12250 emit_insn (gen_rtx_SET (VOIDmode,
12251 simplify_gen_subreg (reg_mode, dst, mode,
12252 i * reg_mode_size),
12253 simplify_gen_subreg (reg_mode, src, mode,
12254 i * reg_mode_size)));
12260 bool used_update = false;
12262 if (MEM_P (src) && INT_REGNO_P (reg))
12266 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12267 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12270 breg = XEXP (XEXP (src, 0), 0);
12271 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12272 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12273 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12274 emit_insn (TARGET_32BIT
12275 ? gen_addsi3 (breg, breg, delta_rtx)
12276 : gen_adddi3 (breg, breg, delta_rtx));
12277 src = gen_rtx_MEM (mode, breg);
12279 else if (! offsettable_memref_p (src))
12281 rtx newsrc, basereg;
12282 basereg = gen_rtx_REG (Pmode, reg);
12283 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12284 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
12285 MEM_COPY_ATTRIBUTES (newsrc, src);
12289 breg = XEXP (src, 0);
12290 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12291 breg = XEXP (breg, 0);
12293 /* If the base register we are using to address memory is
12294 also a destination reg, then change that register last. */
12296 && REGNO (breg) >= REGNO (dst)
12297 && REGNO (breg) < REGNO (dst) + nregs)
12298 j = REGNO (breg) - REGNO (dst);
12301 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12305 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12306 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12309 breg = XEXP (XEXP (dst, 0), 0);
12310 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12311 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12312 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12314 /* We have to update the breg before doing the store.
12315 Use store with update, if available. */
12319 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12320 emit_insn (TARGET_32BIT
12321 ? (TARGET_POWERPC64
12322 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12323 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12324 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12325 used_update = true;
12328 emit_insn (TARGET_32BIT
12329 ? gen_addsi3 (breg, breg, delta_rtx)
12330 : gen_adddi3 (breg, breg, delta_rtx));
12331 dst = gen_rtx_MEM (mode, breg);
12334 gcc_assert (offsettable_memref_p (dst));
12337 for (i = 0; i < nregs; i++)
12339 /* Calculate index to next subword. */
12344 /* If compiler already emitted move of first word by
12345 store with update, no need to do anything. */
12346 if (j == 0 && used_update)
12349 emit_insn (gen_rtx_SET (VOIDmode,
12350 simplify_gen_subreg (reg_mode, dst, mode,
12351 j * reg_mode_size),
12352 simplify_gen_subreg (reg_mode, src, mode,
12353 j * reg_mode_size)));
12359 /* This page contains routines that are used to determine what the
12360 function prologue and epilogue code will do and write them out. */
12362 /* Return the first fixed-point register that is required to be
12363 saved. 32 if none. */
12366 first_reg_to_save (void)
12370 /* Find lowest numbered live register. */
12371 for (first_reg = 13; first_reg <= 31; first_reg++)
12372 if (regs_ever_live[first_reg]
12373 && (! call_used_regs[first_reg]
12374 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12375 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12376 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12377 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12382 && current_function_uses_pic_offset_table
12383 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12384 return RS6000_PIC_OFFSET_TABLE_REGNUM;
12390 /* Similar, for FP regs. */
12393 first_fp_reg_to_save (void)
12397 /* Find lowest numbered live register. */
12398 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12399 if (regs_ever_live[first_reg])
12405 /* Similar, for AltiVec regs. */
12408 first_altivec_reg_to_save (void)
12412 /* Stack frame remains as is unless we are in AltiVec ABI. */
12413 if (! TARGET_ALTIVEC_ABI)
12414 return LAST_ALTIVEC_REGNO + 1;
12416 /* Find lowest numbered live register. */
12417 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12418 if (regs_ever_live[i])
12424 /* Return a 32-bit mask of the AltiVec registers we need to set in
12425 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12426 the 32-bit word is 0. */
12428 static unsigned int
12429 compute_vrsave_mask (void)
12431 unsigned int i, mask = 0;
12433 /* First, find out if we use _any_ altivec registers. */
12434 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12435 if (regs_ever_live[i])
12436 mask |= ALTIVEC_REG_BIT (i);
12441 /* Next, remove the argument registers from the set. These must
12442 be in the VRSAVE mask set by the caller, so we don't need to add
12443 them in again. More importantly, the mask we compute here is
12444 used to generate CLOBBERs in the set_vrsave insn, and we do not
12445 wish the argument registers to die. */
12446 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12447 mask &= ~ALTIVEC_REG_BIT (i);
12449 /* Similarly, remove the return value from the set. */
12452 diddle_return_value (is_altivec_return_reg, &yes);
12454 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12460 /* For a very restricted set of circumstances, we can cut down the
12461 size of prologues/epilogues by calling our own save/restore-the-world
12465 compute_save_world_info (rs6000_stack_t *info_ptr)
12467 info_ptr->world_save_p = 1;
12468 info_ptr->world_save_p
12469 = (WORLD_SAVE_P (info_ptr)
12470 && DEFAULT_ABI == ABI_DARWIN
12471 && ! (current_function_calls_setjmp && flag_exceptions)
12472 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12473 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12474 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12475 && info_ptr->cr_save_p);
12477 /* This will not work in conjunction with sibcalls. Make sure there
12478 are none. (This check is expensive, but seldom executed.) */
12479 if (WORLD_SAVE_P (info_ptr))
12482 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12483 if ( GET_CODE (insn) == CALL_INSN
12484 && SIBLING_CALL_P (insn))
12486 info_ptr->world_save_p = 0;
12491 if (WORLD_SAVE_P (info_ptr))
12493 /* Even if we're not touching VRsave, make sure there's room on the
12494 stack for it, if it looks like we're calling SAVE_WORLD, which
12495 will attempt to save it. */
12496 info_ptr->vrsave_size = 4;
12498 /* "Save" the VRsave register too if we're saving the world. */
12499 if (info_ptr->vrsave_mask == 0)
12500 info_ptr->vrsave_mask = compute_vrsave_mask ();
12502 /* Because the Darwin register save/restore routines only handle
12503 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12505 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12506 && (info_ptr->first_altivec_reg_save
12507 >= FIRST_SAVED_ALTIVEC_REGNO));
12514 is_altivec_return_reg (rtx reg, void *xyes)
12516 bool *yes = (bool *) xyes;
12517 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12522 /* Calculate the stack information for the current function. This is
12523 complicated by having two separate calling sequences, the AIX calling
12524 sequence and the V.4 calling sequence.
12526 AIX (and Darwin/Mac OS X) stack frames look like:
12528 SP----> +---------------------------------------+
12529 | back chain to caller | 0 0
12530 +---------------------------------------+
12531 | saved CR | 4 8 (8-11)
12532 +---------------------------------------+
12534 +---------------------------------------+
12535 | reserved for compilers | 12 24
12536 +---------------------------------------+
12537 | reserved for binders | 16 32
12538 +---------------------------------------+
12539 | saved TOC pointer | 20 40
12540 +---------------------------------------+
12541 | Parameter save area (P) | 24 48
12542 +---------------------------------------+
12543 | Alloca space (A) | 24+P etc.
12544 +---------------------------------------+
12545 | Local variable space (L) | 24+P+A
12546 +---------------------------------------+
12547 | Float/int conversion temporary (X) | 24+P+A+L
12548 +---------------------------------------+
12549 | Save area for AltiVec registers (W) | 24+P+A+L+X
12550 +---------------------------------------+
12551 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12552 +---------------------------------------+
12553 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
12554 +---------------------------------------+
12555 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12556 +---------------------------------------+
12557 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
12558 +---------------------------------------+
12559 old SP->| back chain to caller's caller |
12560 +---------------------------------------+
12562 The required alignment for AIX configurations is two words (i.e., 8
12566 V.4 stack frames look like:
12568 SP----> +---------------------------------------+
12569 | back chain to caller | 0
12570 +---------------------------------------+
12571 | caller's saved LR | 4
12572 +---------------------------------------+
12573 | Parameter save area (P) | 8
12574 +---------------------------------------+
12575 | Alloca space (A) | 8+P
12576 +---------------------------------------+
12577 | Varargs save area (V) | 8+P+A
12578 +---------------------------------------+
12579 | Local variable space (L) | 8+P+A+V
12580 +---------------------------------------+
12581 | Float/int conversion temporary (X) | 8+P+A+V+L
12582 +---------------------------------------+
12583 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12584 +---------------------------------------+
12585 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12586 +---------------------------------------+
12587 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12588 +---------------------------------------+
12589 | SPE: area for 64-bit GP registers |
12590 +---------------------------------------+
12591 | SPE alignment padding |
12592 +---------------------------------------+
12593 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
12594 +---------------------------------------+
12595 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
12596 +---------------------------------------+
12597 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
12598 +---------------------------------------+
12599 old SP->| back chain to caller's caller |
12600 +---------------------------------------+
12602 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12603 given. (But note below and in sysv4.h that we require only 8 and
12604 may round up the size of our stack frame anyways. The historical
12605 reason is early versions of powerpc-linux which didn't properly
12606 align the stack at program startup. A happy side-effect is that
12607 -mno-eabi libraries can be used with -meabi programs.)
12609 The EABI configuration defaults to the V.4 layout. However,
12610 the stack alignment requirements may differ. If -mno-eabi is not
12611 given, the required stack alignment is 8 bytes; if -mno-eabi is
12612 given, the required alignment is 16 bytes. (But see V.4 comment
12615 #ifndef ABI_STACK_BOUNDARY
12616 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
12619 static rs6000_stack_t *
12620 rs6000_stack_info (void)
12622 static rs6000_stack_t info, zero_info;
12623 rs6000_stack_t *info_ptr = &info;
12624 int reg_size = TARGET_32BIT ? 4 : 8;
12627 HOST_WIDE_INT non_fixed_size;
12629 /* Zero all fields portably. */
12634 /* Cache value so we don't rescan instruction chain over and over. */
12635 if (cfun->machine->insn_chain_scanned_p == 0)
12636 cfun->machine->insn_chain_scanned_p
12637 = spe_func_has_64bit_regs_p () + 1;
12638 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
12641 /* Select which calling sequence. */
12642 info_ptr->abi = DEFAULT_ABI;
12644 /* Calculate which registers need to be saved & save area size. */
12645 info_ptr->first_gp_reg_save = first_reg_to_save ();
12646 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12647 even if it currently looks like we won't. */
12648 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
12649 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12650 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
12651 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12652 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
12654 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
12656 /* For the SPE, we have an additional upper 32-bits on each GPR.
12657 Ideally we should save the entire 64-bits only when the upper
12658 half is used in SIMD instructions. Since we only record
12659 registers live (not the size they are used in), this proves
12660 difficult because we'd have to traverse the instruction chain at
12661 the right time, taking reload into account. This is a real pain,
12662 so we opt to save the GPRs in 64-bits always if but one register
12663 gets used in 64-bits. Otherwise, all the registers in the frame
12664 get saved in 32-bits.
12666 So... since when we save all GPRs (except the SP) in 64-bits, the
12667 traditional GP save area will be empty. */
12668 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12669 info_ptr->gp_size = 0;
12671 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12672 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12674 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12675 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12676 - info_ptr->first_altivec_reg_save);
12678 /* Does this function call anything? */
12679 info_ptr->calls_p = (! current_function_is_leaf
12680 || cfun->machine->ra_needs_full_frame);
12682 /* Determine if we need to save the link register. */
12683 if (rs6000_ra_ever_killed ()
12684 || (DEFAULT_ABI == ABI_AIX
12685 && current_function_profile
12686 && !TARGET_PROFILE_KERNEL)
12687 #ifdef TARGET_RELOCATABLE
12688 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12690 || (info_ptr->first_fp_reg_save != 64
12691 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
12692 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
12693 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
12694 || info_ptr->calls_p)
12696 info_ptr->lr_save_p = 1;
12697 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
12700 /* Determine if we need to save the condition code registers. */
12701 if (regs_ever_live[CR2_REGNO]
12702 || regs_ever_live[CR3_REGNO]
12703 || regs_ever_live[CR4_REGNO])
12705 info_ptr->cr_save_p = 1;
12706 if (DEFAULT_ABI == ABI_V4)
12707 info_ptr->cr_size = reg_size;
12710 /* If the current function calls __builtin_eh_return, then we need
12711 to allocate stack space for registers that will hold data for
12712 the exception handler. */
12713 if (current_function_calls_eh_return)
12716 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12719 /* SPE saves EH registers in 64-bits. */
12720 ehrd_size = i * (TARGET_SPE_ABI
12721 && info_ptr->spe_64bit_regs_used != 0
12722 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12727 /* Determine various sizes. */
12728 info_ptr->reg_size = reg_size;
12729 info_ptr->fixed_size = RS6000_SAVE_AREA;
12730 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12731 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12732 TARGET_ALTIVEC ? 16 : 8);
12733 if (FRAME_GROWS_DOWNWARD)
12734 info_ptr->vars_size
12735 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
12736 + info_ptr->parm_size,
12737 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
12738 - (info_ptr->fixed_size + info_ptr->vars_size
12739 + info_ptr->parm_size);
12741 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12742 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12744 info_ptr->spe_gp_size = 0;
12746 if (TARGET_ALTIVEC_ABI)
12747 info_ptr->vrsave_mask = compute_vrsave_mask ();
12749 info_ptr->vrsave_mask = 0;
12751 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12752 info_ptr->vrsave_size = 4;
12754 info_ptr->vrsave_size = 0;
12756 compute_save_world_info (info_ptr);
12758 /* Calculate the offsets. */
12759 switch (DEFAULT_ABI)
12763 gcc_unreachable ();
12767 info_ptr->fp_save_offset = - info_ptr->fp_size;
12768 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12770 if (TARGET_ALTIVEC_ABI)
12772 info_ptr->vrsave_save_offset
12773 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12775 /* Align stack so vector save area is on a quadword boundary. */
12776 if (info_ptr->altivec_size != 0)
12777 info_ptr->altivec_padding_size
12778 = 16 - (-info_ptr->vrsave_save_offset % 16);
12780 info_ptr->altivec_padding_size = 0;
12782 info_ptr->altivec_save_offset
12783 = info_ptr->vrsave_save_offset
12784 - info_ptr->altivec_padding_size
12785 - info_ptr->altivec_size;
12787 /* Adjust for AltiVec case. */
12788 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12791 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12792 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12793 info_ptr->lr_save_offset = 2*reg_size;
12797 info_ptr->fp_save_offset = - info_ptr->fp_size;
12798 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12799 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12801 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12803 /* Align stack so SPE GPR save area is aligned on a
12804 double-word boundary. */
12805 if (info_ptr->spe_gp_size != 0)
12806 info_ptr->spe_padding_size
12807 = 8 - (-info_ptr->cr_save_offset % 8);
12809 info_ptr->spe_padding_size = 0;
12811 info_ptr->spe_gp_save_offset
12812 = info_ptr->cr_save_offset
12813 - info_ptr->spe_padding_size
12814 - info_ptr->spe_gp_size;
12816 /* Adjust for SPE case. */
12817 info_ptr->toc_save_offset
12818 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12820 else if (TARGET_ALTIVEC_ABI)
12822 info_ptr->vrsave_save_offset
12823 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12825 /* Align stack so vector save area is on a quadword boundary. */
12826 if (info_ptr->altivec_size != 0)
12827 info_ptr->altivec_padding_size
12828 = 16 - (-info_ptr->vrsave_save_offset % 16);
12830 info_ptr->altivec_padding_size = 0;
12832 info_ptr->altivec_save_offset
12833 = info_ptr->vrsave_save_offset
12834 - info_ptr->altivec_padding_size
12835 - info_ptr->altivec_size;
12837 /* Adjust for AltiVec case. */
12838 info_ptr->toc_save_offset
12839 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12842 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12843 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12844 info_ptr->lr_save_offset = reg_size;
12848 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12849 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12850 + info_ptr->gp_size
12851 + info_ptr->altivec_size
12852 + info_ptr->altivec_padding_size
12853 + info_ptr->spe_gp_size
12854 + info_ptr->spe_padding_size
12856 + info_ptr->cr_size
12857 + info_ptr->lr_size
12858 + info_ptr->vrsave_size
12859 + info_ptr->toc_size,
12862 non_fixed_size = (info_ptr->vars_size
12863 + info_ptr->parm_size
12864 + info_ptr->save_size);
12866 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12867 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12869 /* Determine if we need to allocate any stack frame:
12871 For AIX we need to push the stack if a frame pointer is needed
12872 (because the stack might be dynamically adjusted), if we are
12873 debugging, if we make calls, or if the sum of fp_save, gp_save,
12874 and local variables are more than the space needed to save all
12875 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12876 + 18*8 = 288 (GPR13 reserved).
12878 For V.4 we don't have the stack cushion that AIX uses, but assume
12879 that the debugger can handle stackless frames. */
12881 if (info_ptr->calls_p)
12882 info_ptr->push_p = 1;
12884 else if (DEFAULT_ABI == ABI_V4)
12885 info_ptr->push_p = non_fixed_size != 0;
12887 else if (frame_pointer_needed)
12888 info_ptr->push_p = 1;
12890 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12891 info_ptr->push_p = 1;
12894 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12896 /* Zero offsets if we're not saving those registers. */
12897 if (info_ptr->fp_size == 0)
12898 info_ptr->fp_save_offset = 0;
12900 if (info_ptr->gp_size == 0)
12901 info_ptr->gp_save_offset = 0;
12903 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12904 info_ptr->altivec_save_offset = 0;
12906 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12907 info_ptr->vrsave_save_offset = 0;
12909 if (! TARGET_SPE_ABI
12910 || info_ptr->spe_64bit_regs_used == 0
12911 || info_ptr->spe_gp_size == 0)
12912 info_ptr->spe_gp_save_offset = 0;
12914 if (! info_ptr->lr_save_p)
12915 info_ptr->lr_save_offset = 0;
12917 if (! info_ptr->cr_save_p)
12918 info_ptr->cr_save_offset = 0;
12920 if (! info_ptr->toc_save_p)
12921 info_ptr->toc_save_offset = 0;
12926 /* Return true if the current function uses any GPRs in 64-bit SIMD
12930 spe_func_has_64bit_regs_p (void)
12934 /* Functions that save and restore all the call-saved registers will
12935 need to save/restore the registers in 64-bits. */
12936 if (current_function_calls_eh_return
12937 || current_function_calls_setjmp
12938 || current_function_has_nonlocal_goto)
12941 insns = get_insns ();
12943 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12949 /* FIXME: This should be implemented with attributes...
12951 (set_attr "spe64" "true")....then,
12952 if (get_spe64(insn)) return true;
12954 It's the only reliable way to do the stuff below. */
12956 i = PATTERN (insn);
12957 if (GET_CODE (i) == SET)
12959 enum machine_mode mode = GET_MODE (SET_SRC (i));
12961 if (SPE_VECTOR_MODE (mode))
12963 if (TARGET_E500_DOUBLE && mode == DFmode)
12973 debug_stack_info (rs6000_stack_t *info)
12975 const char *abi_string;
12978 info = rs6000_stack_info ();
12980 fprintf (stderr, "\nStack information for function %s:\n",
12981 ((current_function_decl && DECL_NAME (current_function_decl))
12982 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12987 default: abi_string = "Unknown"; break;
12988 case ABI_NONE: abi_string = "NONE"; break;
12989 case ABI_AIX: abi_string = "AIX"; break;
12990 case ABI_DARWIN: abi_string = "Darwin"; break;
12991 case ABI_V4: abi_string = "V.4"; break;
12994 fprintf (stderr, "\tABI = %5s\n", abi_string);
12996 if (TARGET_ALTIVEC_ABI)
12997 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12999 if (TARGET_SPE_ABI)
13000 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13002 if (info->first_gp_reg_save != 32)
13003 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13005 if (info->first_fp_reg_save != 64)
13006 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
13008 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13009 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13010 info->first_altivec_reg_save);
13012 if (info->lr_save_p)
13013 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
13015 if (info->cr_save_p)
13016 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13018 if (info->toc_save_p)
13019 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
13021 if (info->vrsave_mask)
13022 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13025 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13028 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13030 if (info->gp_save_offset)
13031 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13033 if (info->fp_save_offset)
13034 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13036 if (info->altivec_save_offset)
13037 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13038 info->altivec_save_offset);
13040 if (info->spe_gp_save_offset)
13041 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13042 info->spe_gp_save_offset);
13044 if (info->vrsave_save_offset)
13045 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13046 info->vrsave_save_offset);
13048 if (info->lr_save_offset)
13049 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13051 if (info->cr_save_offset)
13052 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13054 if (info->toc_save_offset)
13055 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
13057 if (info->varargs_save_offset)
13058 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13060 if (info->total_size)
13061 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13064 if (info->vars_size)
13065 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13068 if (info->parm_size)
13069 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13071 if (info->fixed_size)
13072 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13075 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13077 if (info->spe_gp_size)
13078 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13081 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13083 if (info->altivec_size)
13084 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13086 if (info->vrsave_size)
13087 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13089 if (info->altivec_padding_size)
13090 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13091 info->altivec_padding_size);
13093 if (info->spe_padding_size)
13094 fprintf (stderr, "\tspe_padding_size = %5d\n",
13095 info->spe_padding_size);
13098 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
13101 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13103 if (info->toc_size)
13104 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
13106 if (info->save_size)
13107 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13109 if (info->reg_size != 4)
13110 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13112 fprintf (stderr, "\n");
13116 rs6000_return_addr (int count, rtx frame)
13118 /* Currently we don't optimize very well between prolog and body
13119 code and for PIC code the code can be actually quite bad, so
13120 don't try to be too clever here. */
13121 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13123 cfun->machine->ra_needs_full_frame = 1;
13130 plus_constant (copy_to_reg
13131 (gen_rtx_MEM (Pmode,
13132 memory_address (Pmode, frame))),
13133 RETURN_ADDRESS_OFFSET)));
13136 cfun->machine->ra_need_lr = 1;
13137 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13140 /* Say whether a function is a candidate for sibcall handling or not.
13141 We do not allow indirect calls to be optimized into sibling calls.
13142 Also, we can't do it if there are any vector parameters; there's
13143 nowhere to put the VRsave code so it works; note that functions with
13144 vector parameters are required to have a prototype, so the argument
13145 type info must be available here. (The tail recursion case can work
13146 with vector parameters, but there's no way to distinguish here.) */
13148 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13153 if (TARGET_ALTIVEC_VRSAVE)
13155 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13156 type; type = TREE_CHAIN (type))
13158 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13162 if (DEFAULT_ABI == ABI_DARWIN
13163 || (*targetm.binds_local_p) (decl))
13165 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13167 if (!lookup_attribute ("longcall", attr_list)
13168 || lookup_attribute ("shortcall", attr_list))
13175 /* NULL if INSN insn is valid within a low-overhead loop.
13176 Otherwise return why doloop cannot be applied.
13177 PowerPC uses the COUNT register for branch on table instructions. */
13179 static const char *
13180 rs6000_invalid_within_doloop (rtx insn)
13183 return "Function call in the loop.";
13186 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13187 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13188 return "Computed branch in the loop.";
13194 rs6000_ra_ever_killed (void)
13200 if (current_function_is_thunk)
13203 /* regs_ever_live has LR marked as used if any sibcalls are present,
13204 but this should not force saving and restoring in the
13205 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
13206 clobbers LR, so that is inappropriate. */
13208 /* Also, the prologue can generate a store into LR that
13209 doesn't really count, like this:
13212 bcl to set PIC register
13216 When we're called from the epilogue, we need to avoid counting
13217 this as a store. */
13219 push_topmost_sequence ();
13220 top = get_insns ();
13221 pop_topmost_sequence ();
13222 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13224 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13228 if (FIND_REG_INC_NOTE (insn, reg))
13230 else if (GET_CODE (insn) == CALL_INSN
13231 && !SIBLING_CALL_P (insn))
13233 else if (set_of (reg, insn) != NULL_RTX
13234 && !prologue_epilogue_contains (insn))
13241 /* Add a REG_MAYBE_DEAD note to the insn. */
13243 rs6000_maybe_dead (rtx insn)
13245 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13250 /* Emit instructions needed to load the TOC register.
13251 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13252 a constant pool; or for SVR4 -fpic. */
13255 rs6000_emit_load_toc_table (int fromprolog)
13258 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13260 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13263 rtx lab, tmp1, tmp2, got, tempLR;
13265 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13266 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13268 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13270 got = rs6000_got_sym ();
13271 tmp1 = tmp2 = dest;
13274 tmp1 = gen_reg_rtx (Pmode);
13275 tmp2 = gen_reg_rtx (Pmode);
13277 tempLR = (fromprolog
13278 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13279 : gen_reg_rtx (Pmode));
13280 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13282 rs6000_maybe_dead (insn);
13283 insn = emit_move_insn (tmp1, tempLR);
13285 rs6000_maybe_dead (insn);
13286 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13288 rs6000_maybe_dead (insn);
13289 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13291 rs6000_maybe_dead (insn);
13293 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13295 rtx tempLR = (fromprolog
13296 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13297 : gen_reg_rtx (Pmode));
13299 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13301 rs6000_maybe_dead (insn);
13302 insn = emit_move_insn (dest, tempLR);
13304 rs6000_maybe_dead (insn);
13306 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13309 rtx tempLR = (fromprolog
13310 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13311 : gen_reg_rtx (Pmode));
13312 rtx temp0 = (fromprolog
13313 ? gen_rtx_REG (Pmode, 0)
13314 : gen_reg_rtx (Pmode));
13320 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13321 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13323 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13324 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13326 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13328 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13329 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13337 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13338 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13339 emit_move_insn (dest, tempLR);
13340 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13342 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13344 rs6000_maybe_dead (insn);
13346 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13348 /* This is for AIX code running in non-PIC ELF32. */
13351 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13352 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13354 insn = emit_insn (gen_elf_high (dest, realsym));
13356 rs6000_maybe_dead (insn);
13357 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13359 rs6000_maybe_dead (insn);
13363 gcc_assert (DEFAULT_ABI == ABI_AIX);
13366 insn = emit_insn (gen_load_toc_aix_si (dest));
13368 insn = emit_insn (gen_load_toc_aix_di (dest));
13370 rs6000_maybe_dead (insn);
13374 /* Emit instructions to restore the link register after determining where
13375 its value has been stored. */
13378 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13380 rs6000_stack_t *info = rs6000_stack_info ();
13383 operands[0] = source;
13384 operands[1] = scratch;
13386 if (info->lr_save_p)
13388 rtx frame_rtx = stack_pointer_rtx;
13389 HOST_WIDE_INT sp_offset = 0;
13392 if (frame_pointer_needed
13393 || current_function_calls_alloca
13394 || info->total_size > 32767)
13396 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
13397 frame_rtx = operands[1];
13399 else if (info->push_p)
13400 sp_offset = info->total_size;
13402 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13403 tmp = gen_rtx_MEM (Pmode, tmp);
13404 emit_move_insn (tmp, operands[0]);
13407 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13410 static GTY(()) int set = -1;
13413 get_TOC_alias_set (void)
13416 set = new_alias_set ();
13420 /* This returns nonzero if the current function uses the TOC. This is
13421 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13422 is generated by the ABI_V4 load_toc_* patterns. */
13429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13432 rtx pat = PATTERN (insn);
13435 if (GET_CODE (pat) == PARALLEL)
13436 for (i = 0; i < XVECLEN (pat, 0); i++)
13438 rtx sub = XVECEXP (pat, 0, i);
13439 if (GET_CODE (sub) == USE)
13441 sub = XEXP (sub, 0);
13442 if (GET_CODE (sub) == UNSPEC
13443 && XINT (sub, 1) == UNSPEC_TOC)
13453 create_TOC_reference (rtx symbol)
13455 return gen_rtx_PLUS (Pmode,
13456 gen_rtx_REG (Pmode, TOC_REGISTER),
13457 gen_rtx_CONST (Pmode,
13458 gen_rtx_MINUS (Pmode, symbol,
13459 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13462 /* If _Unwind_* has been called from within the same module,
13463 toc register is not guaranteed to be saved to 40(1) on function
13464 entry. Save it there in that case. */
13467 rs6000_aix_emit_builtin_unwind_init (void)
13470 rtx stack_top = gen_reg_rtx (Pmode);
13471 rtx opcode_addr = gen_reg_rtx (Pmode);
13472 rtx opcode = gen_reg_rtx (SImode);
13473 rtx tocompare = gen_reg_rtx (SImode);
13474 rtx no_toc_save_needed = gen_label_rtx ();
13476 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
13477 emit_move_insn (stack_top, mem);
13479 mem = gen_rtx_MEM (Pmode,
13480 gen_rtx_PLUS (Pmode, stack_top,
13481 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13482 emit_move_insn (opcode_addr, mem);
13483 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13484 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13485 : 0xE8410028, SImode));
13487 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13488 SImode, NULL_RTX, NULL_RTX,
13489 no_toc_save_needed);
13491 mem = gen_rtx_MEM (Pmode,
13492 gen_rtx_PLUS (Pmode, stack_top,
13493 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13494 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13495 emit_label (no_toc_save_needed);
13498 /* This ties together stack memory (MEM with an alias set of
13499 rs6000_sr_alias_set) and the change to the stack pointer. */
13502 rs6000_emit_stack_tie (void)
13504 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13506 set_mem_alias_set (mem, rs6000_sr_alias_set);
13507 emit_insn (gen_stack_tie (mem));
13510 /* Emit the correct code for allocating stack space, as insns.
13511 If COPY_R12, make sure a copy of the old frame is left in r12.
13512 The generated code may use hard register 0 as a temporary. */
13515 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13518 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13519 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13520 rtx todec = gen_int_mode (-size, Pmode);
13522 if (INTVAL (todec) != -size)
13524 warning (0, "stack frame too large");
13525 emit_insn (gen_trap ());
13529 if (current_function_limit_stack)
13531 if (REG_P (stack_limit_rtx)
13532 && REGNO (stack_limit_rtx) > 1
13533 && REGNO (stack_limit_rtx) <= 31)
13535 emit_insn (TARGET_32BIT
13536 ? gen_addsi3 (tmp_reg,
13539 : gen_adddi3 (tmp_reg,
13543 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13546 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13548 && DEFAULT_ABI == ABI_V4)
13550 rtx toload = gen_rtx_CONST (VOIDmode,
13551 gen_rtx_PLUS (Pmode,
13555 emit_insn (gen_elf_high (tmp_reg, toload));
13556 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13557 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13561 warning (0, "stack limit expression is not supported");
13564 if (copy_r12 || ! TARGET_UPDATE)
13565 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13571 /* Need a note here so that try_split doesn't get confused. */
13572 if (get_last_insn () == NULL_RTX)
13573 emit_note (NOTE_INSN_DELETED);
13574 insn = emit_move_insn (tmp_reg, todec);
13575 try_split (PATTERN (insn), insn, 0);
13579 insn = emit_insn (TARGET_32BIT
13580 ? gen_movsi_update (stack_reg, stack_reg,
13582 : gen_movdi_di_update (stack_reg, stack_reg,
13583 todec, stack_reg));
13587 insn = emit_insn (TARGET_32BIT
13588 ? gen_addsi3 (stack_reg, stack_reg, todec)
13589 : gen_adddi3 (stack_reg, stack_reg, todec));
13590 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13591 gen_rtx_REG (Pmode, 12));
13594 RTX_FRAME_RELATED_P (insn) = 1;
13596 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13597 gen_rtx_SET (VOIDmode, stack_reg,
13598 gen_rtx_PLUS (Pmode, stack_reg,
13603 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13604 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13605 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13606 deduce these equivalences by itself so it wasn't necessary to hold
13607 its hand so much. */
13610 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
13611 rtx reg2, rtx rreg)
13615 /* copy_rtx will not make unique copies of registers, so we need to
13616 ensure we don't have unwanted sharing here. */
13618 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13621 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13623 real = copy_rtx (PATTERN (insn));
13625 if (reg2 != NULL_RTX)
13626 real = replace_rtx (real, reg2, rreg);
13628 real = replace_rtx (real, reg,
13629 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13630 STACK_POINTER_REGNUM),
13633 /* We expect that 'real' is either a SET or a PARALLEL containing
13634 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13635 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13637 if (GET_CODE (real) == SET)
13641 temp = simplify_rtx (SET_SRC (set));
13643 SET_SRC (set) = temp;
13644 temp = simplify_rtx (SET_DEST (set));
13646 SET_DEST (set) = temp;
13647 if (GET_CODE (SET_DEST (set)) == MEM)
13649 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13651 XEXP (SET_DEST (set), 0) = temp;
13658 gcc_assert (GET_CODE (real) == PARALLEL);
13659 for (i = 0; i < XVECLEN (real, 0); i++)
13660 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13662 rtx set = XVECEXP (real, 0, i);
13664 temp = simplify_rtx (SET_SRC (set));
13666 SET_SRC (set) = temp;
13667 temp = simplify_rtx (SET_DEST (set));
13669 SET_DEST (set) = temp;
13670 if (GET_CODE (SET_DEST (set)) == MEM)
13672 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13674 XEXP (SET_DEST (set), 0) = temp;
13676 RTX_FRAME_RELATED_P (set) = 1;
13681 real = spe_synthesize_frame_save (real);
13683 RTX_FRAME_RELATED_P (insn) = 1;
13684 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13689 /* Given an SPE frame note, return a PARALLEL of SETs with the
13690 original note, plus a synthetic register save. */
13693 spe_synthesize_frame_save (rtx real)
13695 rtx synth, offset, reg, real2;
13697 if (GET_CODE (real) != SET
13698 || GET_MODE (SET_SRC (real)) != V2SImode)
13701 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13702 frame related note. The parallel contains a set of the register
13703 being saved, and another set to a synthetic register (n+1200).
13704 This is so we can differentiate between 64-bit and 32-bit saves.
13705 Words cannot describe this nastiness. */
13707 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13708 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13709 && GET_CODE (SET_SRC (real)) == REG);
13712 (set (mem (plus (reg x) (const y)))
13715 (set (mem (plus (reg x) (const y+4)))
13719 real2 = copy_rtx (real);
13720 PUT_MODE (SET_DEST (real2), SImode);
13721 reg = SET_SRC (real2);
13722 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13723 synth = copy_rtx (real2);
13725 if (BYTES_BIG_ENDIAN)
13727 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13728 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13731 reg = SET_SRC (synth);
13733 synth = replace_rtx (synth, reg,
13734 gen_rtx_REG (SImode, REGNO (reg) + 1200));
13736 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13737 synth = replace_rtx (synth, offset,
13738 GEN_INT (INTVAL (offset)
13739 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13741 RTX_FRAME_RELATED_P (synth) = 1;
13742 RTX_FRAME_RELATED_P (real2) = 1;
13743 if (BYTES_BIG_ENDIAN)
13744 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13746 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13751 /* Returns an insn that has a vrsave set operation with the
13752 appropriate CLOBBERs. */
13755 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
13758 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
13759 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13762 = gen_rtx_SET (VOIDmode,
13764 gen_rtx_UNSPEC_VOLATILE (SImode,
13765 gen_rtvec (2, reg, vrsave),
13766 UNSPECV_SET_VRSAVE));
13770 /* We need to clobber the registers in the mask so the scheduler
13771 does not move sets to VRSAVE before sets of AltiVec registers.
13773 However, if the function receives nonlocal gotos, reload will set
13774 all call saved registers live. We will end up with:
13776 (set (reg 999) (mem))
13777 (parallel [ (set (reg vrsave) (unspec blah))
13778 (clobber (reg 999))])
13780 The clobber will cause the store into reg 999 to be dead, and
13781 flow will attempt to delete an epilogue insn. In this case, we
13782 need an unspec use/set of the register. */
13784 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13785 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13787 if (!epiloguep || call_used_regs [i])
13788 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13789 gen_rtx_REG (V4SImode, i));
13792 rtx reg = gen_rtx_REG (V4SImode, i);
13795 = gen_rtx_SET (VOIDmode,
13797 gen_rtx_UNSPEC (V4SImode,
13798 gen_rtvec (1, reg), 27));
13802 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13804 for (i = 0; i < nclobs; ++i)
13805 XVECEXP (insn, 0, i) = clobs[i];
13810 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13811 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13814 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13815 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13817 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13818 rtx replacea, replaceb;
13820 int_rtx = GEN_INT (offset);
13822 /* Some cases that need register indexed addressing. */
13823 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13824 || (TARGET_E500_DOUBLE && mode == DFmode)
13826 && SPE_VECTOR_MODE (mode)
13827 && !SPE_CONST_OFFSET_OK (offset)))
13829 /* Whomever calls us must make sure r11 is available in the
13830 flow path of instructions in the prologue. */
13831 offset_rtx = gen_rtx_REG (Pmode, 11);
13832 emit_move_insn (offset_rtx, int_rtx);
13834 replacea = offset_rtx;
13835 replaceb = int_rtx;
13839 offset_rtx = int_rtx;
13840 replacea = NULL_RTX;
13841 replaceb = NULL_RTX;
13844 reg = gen_rtx_REG (mode, regno);
13845 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13846 mem = gen_rtx_MEM (mode, addr);
13847 set_mem_alias_set (mem, rs6000_sr_alias_set);
13849 insn = emit_move_insn (mem, reg);
13851 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13854 /* Emit an offset memory reference suitable for a frame store, while
13855 converting to a valid addressing mode. */
13858 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13860 rtx int_rtx, offset_rtx;
13862 int_rtx = GEN_INT (offset);
13864 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13865 || (TARGET_E500_DOUBLE && mode == DFmode))
13867 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13868 emit_move_insn (offset_rtx, int_rtx);
13871 offset_rtx = int_rtx;
13873 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13876 /* Look for user-defined global regs. We should not save and restore these,
13877 and cannot use stmw/lmw if there are any in its range. */
13880 no_global_regs_above (int first_greg)
13883 for (i = 0; i < 32 - first_greg; i++)
13884 if (global_regs[first_greg + i])
13889 #ifndef TARGET_FIX_AND_CONTINUE
13890 #define TARGET_FIX_AND_CONTINUE 0
13893 /* Emit function prologue as insns. */
13896 rs6000_emit_prologue (void)
13898 rs6000_stack_t *info = rs6000_stack_info ();
13899 enum machine_mode reg_mode = Pmode;
13900 int reg_size = TARGET_32BIT ? 4 : 8;
13901 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13902 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13903 rtx frame_reg_rtx = sp_reg_rtx;
13904 rtx cr_save_rtx = NULL_RTX;
13906 int saving_FPRs_inline;
13907 int using_store_multiple;
13908 HOST_WIDE_INT sp_offset = 0;
13910 if (TARGET_FIX_AND_CONTINUE)
13912 /* gdb on darwin arranges to forward a function from the old
13913 address by modifying the first 5 instructions of the function
13914 to branch to the overriding function. This is necessary to
13915 permit function pointers that point to the old function to
13916 actually forward to the new function. */
13917 emit_insn (gen_nop ());
13918 emit_insn (gen_nop ());
13919 emit_insn (gen_nop ());
13920 emit_insn (gen_nop ());
13921 emit_insn (gen_nop ());
13924 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13926 reg_mode = V2SImode;
13930 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13931 && (!TARGET_SPE_ABI
13932 || info->spe_64bit_regs_used == 0)
13933 && info->first_gp_reg_save < 31
13934 && no_global_regs_above (info->first_gp_reg_save));
13935 saving_FPRs_inline = (info->first_fp_reg_save == 64
13936 || FP_SAVE_INLINE (info->first_fp_reg_save)
13937 || current_function_calls_eh_return
13938 || cfun->machine->ra_need_lr);
13940 /* For V.4, update stack before we do any saving and set back pointer. */
13942 && (DEFAULT_ABI == ABI_V4
13943 || current_function_calls_eh_return))
13945 if (info->total_size < 32767)
13946 sp_offset = info->total_size;
13948 frame_reg_rtx = frame_ptr_rtx;
13949 rs6000_emit_allocate_stack (info->total_size,
13950 (frame_reg_rtx != sp_reg_rtx
13951 && (info->cr_save_p
13953 || info->first_fp_reg_save < 64
13954 || info->first_gp_reg_save < 32
13956 if (frame_reg_rtx != sp_reg_rtx)
13957 rs6000_emit_stack_tie ();
13960 /* Handle world saves specially here. */
13961 if (WORLD_SAVE_P (info))
13967 /* save_world expects lr in r0. */
13968 if (info->lr_save_p)
13970 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13971 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13972 RTX_FRAME_RELATED_P (insn) = 1;
13975 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13976 assumptions about the offsets of various bits of the stack
13978 gcc_assert (info->gp_save_offset == -220
13979 && info->fp_save_offset == -144
13980 && info->lr_save_offset == 8
13981 && info->cr_save_offset == 4
13984 && (!current_function_calls_eh_return
13985 || info->ehrd_offset == -432)
13986 && info->vrsave_save_offset == -224
13987 && info->altivec_save_offset == (-224 -16 -192));
13989 treg = gen_rtx_REG (SImode, 11);
13990 emit_move_insn (treg, GEN_INT (-info->total_size));
13992 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13993 in R11. It also clobbers R12, so beware! */
13995 /* Preserve CR2 for save_world prologues */
13997 sz += 32 - info->first_gp_reg_save;
13998 sz += 64 - info->first_fp_reg_save;
13999 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14000 p = rtvec_alloc (sz);
14002 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14003 gen_rtx_REG (Pmode,
14004 LINK_REGISTER_REGNUM));
14005 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14006 gen_rtx_SYMBOL_REF (Pmode,
14008 /* We do floats first so that the instruction pattern matches
14010 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14012 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14013 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14014 GEN_INT (info->fp_save_offset
14015 + sp_offset + 8 * i));
14016 rtx mem = gen_rtx_MEM (DFmode, addr);
14017 set_mem_alias_set (mem, rs6000_sr_alias_set);
14019 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14021 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14023 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14024 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14025 GEN_INT (info->altivec_save_offset
14026 + sp_offset + 16 * i));
14027 rtx mem = gen_rtx_MEM (V4SImode, addr);
14028 set_mem_alias_set (mem, rs6000_sr_alias_set);
14030 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14032 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14034 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14035 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14036 GEN_INT (info->gp_save_offset
14037 + sp_offset + reg_size * i));
14038 rtx mem = gen_rtx_MEM (reg_mode, addr);
14039 set_mem_alias_set (mem, rs6000_sr_alias_set);
14041 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14045 /* CR register traditionally saved as CR2. */
14046 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14047 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14048 GEN_INT (info->cr_save_offset
14050 rtx mem = gen_rtx_MEM (reg_mode, addr);
14051 set_mem_alias_set (mem, rs6000_sr_alias_set);
14053 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14055 /* Prevent any attempt to delete the setting of r0 and treg! */
14056 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14057 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14058 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14060 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14061 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14062 NULL_RTX, NULL_RTX);
14064 if (current_function_calls_eh_return)
14069 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14070 if (regno == INVALID_REGNUM)
14072 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14073 info->ehrd_offset + sp_offset
14074 + reg_size * (int) i,
14080 /* Save AltiVec registers if needed. */
14081 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14085 /* There should be a non inline version of this, for when we
14086 are saving lots of vector registers. */
14087 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14088 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14090 rtx areg, savereg, mem;
14093 offset = info->altivec_save_offset + sp_offset
14094 + 16 * (i - info->first_altivec_reg_save);
14096 savereg = gen_rtx_REG (V4SImode, i);
14098 areg = gen_rtx_REG (Pmode, 0);
14099 emit_move_insn (areg, GEN_INT (offset));
14101 /* AltiVec addressing mode is [reg+reg]. */
14102 mem = gen_rtx_MEM (V4SImode,
14103 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14105 set_mem_alias_set (mem, rs6000_sr_alias_set);
14107 insn = emit_move_insn (mem, savereg);
14109 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14110 areg, GEN_INT (offset));
14114 /* VRSAVE is a bit vector representing which AltiVec registers
14115 are used. The OS uses this to determine which vector
14116 registers to save on a context switch. We need to save
14117 VRSAVE on the stack frame, add whatever AltiVec registers we
14118 used in this function, and do the corresponding magic in the
14121 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14122 && info->vrsave_mask != 0)
14124 rtx reg, mem, vrsave;
14127 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14128 as frame_reg_rtx and r11 as the static chain pointer for
14129 nested functions. */
14130 reg = gen_rtx_REG (SImode, 0);
14131 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14133 emit_insn (gen_get_vrsave_internal (reg));
14135 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14137 if (!WORLD_SAVE_P (info))
14140 offset = info->vrsave_save_offset + sp_offset;
14142 = gen_rtx_MEM (SImode,
14143 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
14144 set_mem_alias_set (mem, rs6000_sr_alias_set);
14145 insn = emit_move_insn (mem, reg);
14148 /* Include the registers in the mask. */
14149 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14151 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14154 /* If we use the link register, get it into r0. */
14155 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14157 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14158 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14159 RTX_FRAME_RELATED_P (insn) = 1;
14162 /* If we need to save CR, put it into r12. */
14163 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14167 cr_save_rtx = gen_rtx_REG (SImode, 12);
14168 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14169 RTX_FRAME_RELATED_P (insn) = 1;
14170 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14171 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14172 But that's OK. All we have to do is specify that _one_ condition
14173 code register is saved in this stack slot. The thrower's epilogue
14174 will then restore all the call-saved registers.
14175 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14176 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14177 gen_rtx_REG (SImode, CR2_REGNO));
14178 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14183 /* Do any required saving of fpr's. If only one or two to save, do
14184 it ourselves. Otherwise, call function. */
14185 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14188 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14189 if ((regs_ever_live[info->first_fp_reg_save+i]
14190 && ! call_used_regs[info->first_fp_reg_save+i]))
14191 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14192 info->first_fp_reg_save + i,
14193 info->fp_save_offset + sp_offset + 8 * i,
14196 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14200 const char *alloc_rname;
14202 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14204 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14205 gen_rtx_REG (Pmode,
14206 LINK_REGISTER_REGNUM));
14207 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14208 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14209 alloc_rname = ggc_strdup (rname);
14210 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14211 gen_rtx_SYMBOL_REF (Pmode,
14213 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14215 rtx addr, reg, mem;
14216 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14217 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14218 GEN_INT (info->fp_save_offset
14219 + sp_offset + 8*i));
14220 mem = gen_rtx_MEM (DFmode, addr);
14221 set_mem_alias_set (mem, rs6000_sr_alias_set);
14223 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14225 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14226 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14227 NULL_RTX, NULL_RTX);
14230 /* Save GPRs. This is done as a PARALLEL if we are using
14231 the store-multiple instructions. */
14232 if (!WORLD_SAVE_P (info) && using_store_multiple)
14236 p = rtvec_alloc (32 - info->first_gp_reg_save);
14237 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14239 rtx addr, reg, mem;
14240 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14241 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14242 GEN_INT (info->gp_save_offset
14245 mem = gen_rtx_MEM (reg_mode, addr);
14246 set_mem_alias_set (mem, rs6000_sr_alias_set);
14248 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14250 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14251 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14252 NULL_RTX, NULL_RTX);
14254 else if (!WORLD_SAVE_P (info))
14257 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14258 if ((regs_ever_live[info->first_gp_reg_save + i]
14259 && (!call_used_regs[info->first_gp_reg_save + i]
14260 || (i + info->first_gp_reg_save
14261 == RS6000_PIC_OFFSET_TABLE_REGNUM
14262 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14263 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14264 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14265 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14267 rtx addr, reg, mem;
14268 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14270 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14272 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14275 if (!SPE_CONST_OFFSET_OK (offset))
14277 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14278 emit_move_insn (b, GEN_INT (offset));
14281 b = GEN_INT (offset);
14283 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14284 mem = gen_rtx_MEM (V2SImode, addr);
14285 set_mem_alias_set (mem, rs6000_sr_alias_set);
14286 insn = emit_move_insn (mem, reg);
14288 if (GET_CODE (b) == CONST_INT)
14289 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14290 NULL_RTX, NULL_RTX);
14292 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14293 b, GEN_INT (offset));
14297 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14298 GEN_INT (info->gp_save_offset
14301 mem = gen_rtx_MEM (reg_mode, addr);
14302 set_mem_alias_set (mem, rs6000_sr_alias_set);
14304 insn = emit_move_insn (mem, reg);
14305 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14306 NULL_RTX, NULL_RTX);
14311 /* ??? There's no need to emit actual instructions here, but it's the
14312 easiest way to get the frame unwind information emitted. */
14313 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
14315 unsigned int i, regno;
14317 /* In AIX ABI we need to pretend we save r2 here. */
14320 rtx addr, reg, mem;
14322 reg = gen_rtx_REG (reg_mode, 2);
14323 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14324 GEN_INT (sp_offset + 5 * reg_size));
14325 mem = gen_rtx_MEM (reg_mode, addr);
14326 set_mem_alias_set (mem, rs6000_sr_alias_set);
14328 insn = emit_move_insn (mem, reg);
14329 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14330 NULL_RTX, NULL_RTX);
14331 PATTERN (insn) = gen_blockage ();
14336 regno = EH_RETURN_DATA_REGNO (i);
14337 if (regno == INVALID_REGNUM)
14340 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14341 info->ehrd_offset + sp_offset
14342 + reg_size * (int) i,
14347 /* Save lr if we used it. */
14348 if (!WORLD_SAVE_P (info) && info->lr_save_p)
14350 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14351 GEN_INT (info->lr_save_offset + sp_offset));
14352 rtx reg = gen_rtx_REG (Pmode, 0);
14353 rtx mem = gen_rtx_MEM (Pmode, addr);
14354 /* This should not be of rs6000_sr_alias_set, because of
14355 __builtin_return_address. */
14357 insn = emit_move_insn (mem, reg);
14358 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14359 NULL_RTX, NULL_RTX);
14362 /* Save CR if we use any that must be preserved. */
14363 if (!WORLD_SAVE_P (info) && info->cr_save_p)
14365 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14366 GEN_INT (info->cr_save_offset + sp_offset));
14367 rtx mem = gen_rtx_MEM (SImode, addr);
14368 /* See the large comment above about why CR2_REGNO is used. */
14369 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14371 set_mem_alias_set (mem, rs6000_sr_alias_set);
14373 /* If r12 was used to hold the original sp, copy cr into r0 now
14375 if (REGNO (frame_reg_rtx) == 12)
14379 cr_save_rtx = gen_rtx_REG (SImode, 0);
14380 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14381 RTX_FRAME_RELATED_P (insn) = 1;
14382 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14383 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14388 insn = emit_move_insn (mem, cr_save_rtx);
14390 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14391 NULL_RTX, NULL_RTX);
14394 /* Update stack and set back pointer unless this is V.4,
14395 for which it was done previously. */
14396 if (!WORLD_SAVE_P (info) && info->push_p
14397 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14398 rs6000_emit_allocate_stack (info->total_size, FALSE);
14400 /* Set frame pointer, if needed. */
14401 if (frame_pointer_needed)
14403 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14405 RTX_FRAME_RELATED_P (insn) = 1;
14408 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
14409 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14410 || (DEFAULT_ABI == ABI_V4
14411 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14412 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14414 /* If emit_load_toc_table will use the link register, we need to save
14415 it. We use R12 for this purpose because emit_load_toc_table
14416 can use register 0. This allows us to use a plain 'blr' to return
14417 from the procedure more often. */
14418 int save_LR_around_toc_setup = (TARGET_ELF
14419 && DEFAULT_ABI != ABI_AIX
14421 && ! info->lr_save_p
14422 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14423 if (save_LR_around_toc_setup)
14425 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14427 insn = emit_move_insn (frame_ptr_rtx, lr);
14428 rs6000_maybe_dead (insn);
14429 RTX_FRAME_RELATED_P (insn) = 1;
14431 rs6000_emit_load_toc_table (TRUE);
14433 insn = emit_move_insn (lr, frame_ptr_rtx);
14434 rs6000_maybe_dead (insn);
14435 RTX_FRAME_RELATED_P (insn) = 1;
14438 rs6000_emit_load_toc_table (TRUE);
14442 if (DEFAULT_ABI == ABI_DARWIN
14443 && flag_pic && current_function_uses_pic_offset_table)
14445 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14446 rtx src = machopic_function_base_sym ();
14448 /* Save and restore LR locally around this call (in R0). */
14449 if (!info->lr_save_p)
14450 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14452 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14454 insn = emit_move_insn (gen_rtx_REG (Pmode,
14455 RS6000_PIC_OFFSET_TABLE_REGNUM),
14457 rs6000_maybe_dead (insn);
14459 if (!info->lr_save_p)
14460 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14465 /* Write function prologue. */
14468 rs6000_output_function_prologue (FILE *file,
14469 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14471 rs6000_stack_t *info = rs6000_stack_info ();
14473 if (TARGET_DEBUG_STACK)
14474 debug_stack_info (info);
14476 /* Write .extern for any function we will call to save and restore
14478 if (info->first_fp_reg_save < 64
14479 && !FP_SAVE_INLINE (info->first_fp_reg_save))
14480 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14481 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14482 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14483 RESTORE_FP_SUFFIX);
14485 /* Write .extern for AIX common mode routines, if needed. */
14486 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14488 fputs ("\t.extern __mulh\n", file);
14489 fputs ("\t.extern __mull\n", file);
14490 fputs ("\t.extern __divss\n", file);
14491 fputs ("\t.extern __divus\n", file);
14492 fputs ("\t.extern __quoss\n", file);
14493 fputs ("\t.extern __quous\n", file);
14494 common_mode_defined = 1;
14497 if (! HAVE_prologue)
14501 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14502 the "toplevel" insn chain. */
14503 emit_note (NOTE_INSN_DELETED);
14504 rs6000_emit_prologue ();
14505 emit_note (NOTE_INSN_DELETED);
14507 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14511 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14513 INSN_ADDRESSES_NEW (insn, addr);
14518 if (TARGET_DEBUG_STACK)
14519 debug_rtx_list (get_insns (), 100);
14520 final (get_insns (), file, FALSE);
14524 rs6000_pic_labelno++;
14527 /* Emit function epilogue as insns.
14529 At present, dwarf2out_frame_debug_expr doesn't understand
14530 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14531 anywhere in the epilogue. Most of the insns below would in any case
14532 need special notes to explain where r11 is in relation to the stack. */
14535 rs6000_emit_epilogue (int sibcall)
14537 rs6000_stack_t *info;
14538 int restoring_FPRs_inline;
14539 int using_load_multiple;
14540 int using_mfcr_multiple;
14541 int use_backchain_to_restore_sp;
14543 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14544 rtx frame_reg_rtx = sp_reg_rtx;
14545 enum machine_mode reg_mode = Pmode;
14546 int reg_size = TARGET_32BIT ? 4 : 8;
14549 info = rs6000_stack_info ();
14551 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14553 reg_mode = V2SImode;
14557 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14558 && (!TARGET_SPE_ABI
14559 || info->spe_64bit_regs_used == 0)
14560 && info->first_gp_reg_save < 31
14561 && no_global_regs_above (info->first_gp_reg_save));
14562 restoring_FPRs_inline = (sibcall
14563 || current_function_calls_eh_return
14564 || info->first_fp_reg_save == 64
14565 || FP_SAVE_INLINE (info->first_fp_reg_save));
14566 use_backchain_to_restore_sp = (frame_pointer_needed
14567 || current_function_calls_alloca
14568 || info->total_size > 32767);
14569 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14570 || rs6000_cpu == PROCESSOR_PPC603
14571 || rs6000_cpu == PROCESSOR_PPC750
14574 if (WORLD_SAVE_P (info))
14578 const char *alloc_rname;
14581 /* eh_rest_world_r10 will return to the location saved in the LR
14582 stack slot (which is not likely to be our caller.)
14583 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14584 rest_world is similar, except any R10 parameter is ignored.
14585 The exception-handling stuff that was here in 2.95 is no
14586 longer necessary. */
14590 + 32 - info->first_gp_reg_save
14591 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14592 + 63 + 1 - info->first_fp_reg_save);
14594 strcpy (rname, ((current_function_calls_eh_return) ?
14595 "*eh_rest_world_r10" : "*rest_world"));
14596 alloc_rname = ggc_strdup (rname);
14599 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14600 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14601 gen_rtx_REG (Pmode,
14602 LINK_REGISTER_REGNUM));
14604 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14605 /* The instruction pattern requires a clobber here;
14606 it is shared with the restVEC helper. */
14608 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14611 /* CR register traditionally saved as CR2. */
14612 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14613 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14614 GEN_INT (info->cr_save_offset));
14615 rtx mem = gen_rtx_MEM (reg_mode, addr);
14616 set_mem_alias_set (mem, rs6000_sr_alias_set);
14618 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14621 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14623 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14624 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14625 GEN_INT (info->gp_save_offset
14627 rtx mem = gen_rtx_MEM (reg_mode, addr);
14628 set_mem_alias_set (mem, rs6000_sr_alias_set);
14630 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14632 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14634 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14635 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14636 GEN_INT (info->altivec_save_offset
14638 rtx mem = gen_rtx_MEM (V4SImode, addr);
14639 set_mem_alias_set (mem, rs6000_sr_alias_set);
14641 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14643 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
14645 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14646 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14647 GEN_INT (info->fp_save_offset
14649 rtx mem = gen_rtx_MEM (DFmode, addr);
14650 set_mem_alias_set (mem, rs6000_sr_alias_set);
14652 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14655 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
14657 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
14659 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
14661 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
14663 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
14664 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14669 /* If we have a frame pointer, a call to alloca, or a large stack
14670 frame, restore the old stack pointer using the backchain. Otherwise,
14671 we know what size to update it with. */
14672 if (use_backchain_to_restore_sp)
14674 /* Under V.4, don't reset the stack pointer until after we're done
14675 loading the saved registers. */
14676 if (DEFAULT_ABI == ABI_V4)
14677 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
14679 emit_move_insn (frame_reg_rtx,
14680 gen_rtx_MEM (Pmode, sp_reg_rtx));
14683 else if (info->push_p)
14685 if (DEFAULT_ABI == ABI_V4
14686 || current_function_calls_eh_return)
14687 sp_offset = info->total_size;
14690 emit_insn (TARGET_32BIT
14691 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14692 GEN_INT (info->total_size))
14693 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14694 GEN_INT (info->total_size)));
14698 /* Restore AltiVec registers if needed. */
14699 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14703 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14704 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14706 rtx addr, areg, mem;
14708 areg = gen_rtx_REG (Pmode, 0);
14710 (areg, GEN_INT (info->altivec_save_offset
14712 + 16 * (i - info->first_altivec_reg_save)));
14714 /* AltiVec addressing mode is [reg+reg]. */
14715 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14716 mem = gen_rtx_MEM (V4SImode, addr);
14717 set_mem_alias_set (mem, rs6000_sr_alias_set);
14719 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14723 /* Restore VRSAVE if needed. */
14724 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14725 && info->vrsave_mask != 0)
14727 rtx addr, mem, reg;
14729 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14730 GEN_INT (info->vrsave_save_offset + sp_offset));
14731 mem = gen_rtx_MEM (SImode, addr);
14732 set_mem_alias_set (mem, rs6000_sr_alias_set);
14733 reg = gen_rtx_REG (SImode, 12);
14734 emit_move_insn (reg, mem);
14736 emit_insn (generate_set_vrsave (reg, info, 1));
14739 /* Get the old lr if we saved it. */
14740 if (info->lr_save_p)
14742 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14743 info->lr_save_offset + sp_offset);
14745 set_mem_alias_set (mem, rs6000_sr_alias_set);
14747 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
14750 /* Get the old cr if we saved it. */
14751 if (info->cr_save_p)
14753 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14754 GEN_INT (info->cr_save_offset + sp_offset));
14755 rtx mem = gen_rtx_MEM (SImode, addr);
14757 set_mem_alias_set (mem, rs6000_sr_alias_set);
14759 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14762 /* Set LR here to try to overlap restores below. */
14763 if (info->lr_save_p)
14764 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14765 gen_rtx_REG (Pmode, 0));
14767 /* Load exception handler data registers, if needed. */
14768 if (current_function_calls_eh_return)
14770 unsigned int i, regno;
14774 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14775 GEN_INT (sp_offset + 5 * reg_size));
14776 rtx mem = gen_rtx_MEM (reg_mode, addr);
14778 set_mem_alias_set (mem, rs6000_sr_alias_set);
14780 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14787 regno = EH_RETURN_DATA_REGNO (i);
14788 if (regno == INVALID_REGNUM)
14791 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14792 info->ehrd_offset + sp_offset
14793 + reg_size * (int) i);
14794 set_mem_alias_set (mem, rs6000_sr_alias_set);
14796 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14800 /* Restore GPRs. This is done as a PARALLEL if we are using
14801 the load-multiple instructions. */
14802 if (using_load_multiple)
14805 p = rtvec_alloc (32 - info->first_gp_reg_save);
14806 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14808 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14809 GEN_INT (info->gp_save_offset
14812 rtx mem = gen_rtx_MEM (reg_mode, addr);
14814 set_mem_alias_set (mem, rs6000_sr_alias_set);
14817 gen_rtx_SET (VOIDmode,
14818 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14821 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14824 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14825 if ((regs_ever_live[info->first_gp_reg_save + i]
14826 && (!call_used_regs[info->first_gp_reg_save + i]
14827 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14828 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14829 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14830 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14831 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14833 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14834 GEN_INT (info->gp_save_offset
14837 rtx mem = gen_rtx_MEM (reg_mode, addr);
14839 /* Restore 64-bit quantities for SPE. */
14840 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14842 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14845 if (!SPE_CONST_OFFSET_OK (offset))
14847 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14848 emit_move_insn (b, GEN_INT (offset));
14851 b = GEN_INT (offset);
14853 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14854 mem = gen_rtx_MEM (V2SImode, addr);
14857 set_mem_alias_set (mem, rs6000_sr_alias_set);
14859 emit_move_insn (gen_rtx_REG (reg_mode,
14860 info->first_gp_reg_save + i), mem);
14863 /* Restore fpr's if we need to do it without calling a function. */
14864 if (restoring_FPRs_inline)
14865 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14866 if ((regs_ever_live[info->first_fp_reg_save+i]
14867 && ! call_used_regs[info->first_fp_reg_save+i]))
14870 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14871 GEN_INT (info->fp_save_offset
14874 mem = gen_rtx_MEM (DFmode, addr);
14875 set_mem_alias_set (mem, rs6000_sr_alias_set);
14877 emit_move_insn (gen_rtx_REG (DFmode,
14878 info->first_fp_reg_save + i),
14882 /* If we saved cr, restore it here. Just those that were used. */
14883 if (info->cr_save_p)
14885 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14888 if (using_mfcr_multiple)
14890 for (i = 0; i < 8; i++)
14891 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14893 gcc_assert (count);
14896 if (using_mfcr_multiple && count > 1)
14901 p = rtvec_alloc (count);
14904 for (i = 0; i < 8; i++)
14905 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14907 rtvec r = rtvec_alloc (2);
14908 RTVEC_ELT (r, 0) = r12_rtx;
14909 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14910 RTVEC_ELT (p, ndx) =
14911 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14912 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14915 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14916 gcc_assert (ndx == count);
14919 for (i = 0; i < 8; i++)
14920 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14922 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14928 /* If this is V.4, unwind the stack pointer after all of the loads
14929 have been done. We need to emit a block here so that sched
14930 doesn't decide to move the sp change before the register restores
14931 (which may not have any obvious dependency on the stack). This
14932 doesn't hurt performance, because there is no scheduling that can
14933 be done after this point. */
14934 if (DEFAULT_ABI == ABI_V4
14935 || current_function_calls_eh_return)
14937 if (frame_reg_rtx != sp_reg_rtx)
14938 rs6000_emit_stack_tie ();
14940 if (use_backchain_to_restore_sp)
14942 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14944 else if (sp_offset != 0)
14946 emit_insn (TARGET_32BIT
14947 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14948 GEN_INT (sp_offset))
14949 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14950 GEN_INT (sp_offset)));
14954 if (current_function_calls_eh_return)
14956 rtx sa = EH_RETURN_STACKADJ_RTX;
14957 emit_insn (TARGET_32BIT
14958 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14959 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14965 if (! restoring_FPRs_inline)
14966 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14968 p = rtvec_alloc (2);
14970 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14971 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14972 gen_rtx_REG (Pmode,
14973 LINK_REGISTER_REGNUM));
14975 /* If we have to restore more than two FP registers, branch to the
14976 restore function. It will return to our caller. */
14977 if (! restoring_FPRs_inline)
14981 const char *alloc_rname;
14983 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
14984 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
14985 alloc_rname = ggc_strdup (rname);
14986 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14987 gen_rtx_SYMBOL_REF (Pmode,
14990 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14993 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
14994 GEN_INT (info->fp_save_offset + 8*i));
14995 mem = gen_rtx_MEM (DFmode, addr);
14996 set_mem_alias_set (mem, rs6000_sr_alias_set);
14998 RTVEC_ELT (p, i+3) =
14999 gen_rtx_SET (VOIDmode,
15000 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15005 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15009 /* Write function epilogue. */
15012 rs6000_output_function_epilogue (FILE *file,
15013 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15015 rs6000_stack_t *info = rs6000_stack_info ();
15017 if (! HAVE_epilogue)
15019 rtx insn = get_last_insn ();
15020 /* If the last insn was a BARRIER, we don't have to write anything except
15021 the trace table. */
15022 if (GET_CODE (insn) == NOTE)
15023 insn = prev_nonnote_insn (insn);
15024 if (insn == 0 || GET_CODE (insn) != BARRIER)
15026 /* This is slightly ugly, but at least we don't have two
15027 copies of the epilogue-emitting code. */
15030 /* A NOTE_INSN_DELETED is supposed to be at the start
15031 and end of the "toplevel" insn chain. */
15032 emit_note (NOTE_INSN_DELETED);
15033 rs6000_emit_epilogue (FALSE);
15034 emit_note (NOTE_INSN_DELETED);
15036 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15040 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15042 INSN_ADDRESSES_NEW (insn, addr);
15047 if (TARGET_DEBUG_STACK)
15048 debug_rtx_list (get_insns (), 100);
15049 final (get_insns (), file, FALSE);
15055 macho_branch_islands ();
15056 /* Mach-O doesn't support labels at the end of objects, so if
15057 it looks like we might want one, insert a NOP. */
15059 rtx insn = get_last_insn ();
15062 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15063 insn = PREV_INSN (insn);
15067 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15068 fputs ("\tnop\n", file);
15072 /* Output a traceback table here. See /usr/include/sys/debug.h for info
15075 We don't output a traceback table if -finhibit-size-directive was
15076 used. The documentation for -finhibit-size-directive reads
15077 ``don't output a @code{.size} assembler directive, or anything
15078 else that would cause trouble if the function is split in the
15079 middle, and the two halves are placed at locations far apart in
15080 memory.'' The traceback table has this property, since it
15081 includes the offset from the start of the function to the
15082 traceback table itself.
15084 System V.4 Powerpc's (and the embedded ABI derived from it) use a
15085 different traceback table. */
15086 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15087 && rs6000_traceback != traceback_none)
15089 const char *fname = NULL;
15090 const char *language_string = lang_hooks.name;
15091 int fixed_parms = 0, float_parms = 0, parm_info = 0;
15093 int optional_tbtab;
15095 if (rs6000_traceback == traceback_full)
15096 optional_tbtab = 1;
15097 else if (rs6000_traceback == traceback_part)
15098 optional_tbtab = 0;
15100 optional_tbtab = !optimize_size && !TARGET_ELF;
15102 if (optional_tbtab)
15104 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15105 while (*fname == '.') /* V.4 encodes . in the name */
15108 /* Need label immediately before tbtab, so we can compute
15109 its offset from the function start. */
15110 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15111 ASM_OUTPUT_LABEL (file, fname);
15114 /* The .tbtab pseudo-op can only be used for the first eight
15115 expressions, since it can't handle the possibly variable
15116 length fields that follow. However, if you omit the optional
15117 fields, the assembler outputs zeros for all optional fields
15118 anyways, giving each variable length field is minimum length
15119 (as defined in sys/debug.h). Thus we can not use the .tbtab
15120 pseudo-op at all. */
15122 /* An all-zero word flags the start of the tbtab, for debuggers
15123 that have to find it by searching forward from the entry
15124 point or from the current pc. */
15125 fputs ("\t.long 0\n", file);
15127 /* Tbtab format type. Use format type 0. */
15128 fputs ("\t.byte 0,", file);
15130 /* Language type. Unfortunately, there does not seem to be any
15131 official way to discover the language being compiled, so we
15132 use language_string.
15133 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15134 Java is 13. Objective-C is 14. */
15135 if (! strcmp (language_string, "GNU C"))
15137 else if (! strcmp (language_string, "GNU F77")
15138 || ! strcmp (language_string, "GNU F95"))
15140 else if (! strcmp (language_string, "GNU Pascal"))
15142 else if (! strcmp (language_string, "GNU Ada"))
15144 else if (! strcmp (language_string, "GNU C++"))
15146 else if (! strcmp (language_string, "GNU Java"))
15148 else if (! strcmp (language_string, "GNU Objective-C"))
15151 gcc_unreachable ();
15152 fprintf (file, "%d,", i);
15154 /* 8 single bit fields: global linkage (not set for C extern linkage,
15155 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15156 from start of procedure stored in tbtab, internal function, function
15157 has controlled storage, function has no toc, function uses fp,
15158 function logs/aborts fp operations. */
15159 /* Assume that fp operations are used if any fp reg must be saved. */
15160 fprintf (file, "%d,",
15161 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15163 /* 6 bitfields: function is interrupt handler, name present in
15164 proc table, function calls alloca, on condition directives
15165 (controls stack walks, 3 bits), saves condition reg, saves
15167 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15168 set up as a frame pointer, even when there is no alloca call. */
15169 fprintf (file, "%d,",
15170 ((optional_tbtab << 6)
15171 | ((optional_tbtab & frame_pointer_needed) << 5)
15172 | (info->cr_save_p << 1)
15173 | (info->lr_save_p)));
15175 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15177 fprintf (file, "%d,",
15178 (info->push_p << 7) | (64 - info->first_fp_reg_save));
15180 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15181 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15183 if (optional_tbtab)
15185 /* Compute the parameter info from the function decl argument
15188 int next_parm_info_bit = 31;
15190 for (decl = DECL_ARGUMENTS (current_function_decl);
15191 decl; decl = TREE_CHAIN (decl))
15193 rtx parameter = DECL_INCOMING_RTL (decl);
15194 enum machine_mode mode = GET_MODE (parameter);
15196 if (GET_CODE (parameter) == REG)
15198 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
15216 gcc_unreachable ();
15219 /* If only one bit will fit, don't or in this entry. */
15220 if (next_parm_info_bit > 0)
15221 parm_info |= (bits << (next_parm_info_bit - 1));
15222 next_parm_info_bit -= 2;
15226 fixed_parms += ((GET_MODE_SIZE (mode)
15227 + (UNITS_PER_WORD - 1))
15229 next_parm_info_bit -= 1;
15235 /* Number of fixed point parameters. */
15236 /* This is actually the number of words of fixed point parameters; thus
15237 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15238 fprintf (file, "%d,", fixed_parms);
15240 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15242 /* This is actually the number of fp registers that hold parameters;
15243 and thus the maximum value is 13. */
15244 /* Set parameters on stack bit if parameters are not in their original
15245 registers, regardless of whether they are on the stack? Xlc
15246 seems to set the bit when not optimizing. */
15247 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15249 if (! optional_tbtab)
15252 /* Optional fields follow. Some are variable length. */
15254 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15255 11 double float. */
15256 /* There is an entry for each parameter in a register, in the order that
15257 they occur in the parameter list. Any intervening arguments on the
15258 stack are ignored. If the list overflows a long (max possible length
15259 34 bits) then completely leave off all elements that don't fit. */
15260 /* Only emit this long if there was at least one parameter. */
15261 if (fixed_parms || float_parms)
15262 fprintf (file, "\t.long %d\n", parm_info);
15264 /* Offset from start of code to tb table. */
15265 fputs ("\t.long ", file);
15266 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15268 RS6000_OUTPUT_BASENAME (file, fname);
15270 assemble_name (file, fname);
15272 rs6000_output_function_entry (file, fname);
15275 /* Interrupt handler mask. */
15276 /* Omit this long, since we never set the interrupt handler bit
15279 /* Number of CTL (controlled storage) anchors. */
15280 /* Omit this long, since the has_ctl bit is never set above. */
15282 /* Displacement into stack of each CTL anchor. */
15283 /* Omit this list of longs, because there are no CTL anchors. */
15285 /* Length of function name. */
15288 fprintf (file, "\t.short %d\n", (int) strlen (fname));
15290 /* Function name. */
15291 assemble_string (fname, strlen (fname));
15293 /* Register for alloca automatic storage; this is always reg 31.
15294 Only emit this if the alloca bit was set above. */
15295 if (frame_pointer_needed)
15296 fputs ("\t.byte 31\n", file);
15298 fputs ("\t.align 2\n", file);
15302 /* A C compound statement that outputs the assembler code for a thunk
15303 function, used to implement C++ virtual function calls with
15304 multiple inheritance. The thunk acts as a wrapper around a virtual
15305 function, adjusting the implicit object parameter before handing
15306 control off to the real function.
15308 First, emit code to add the integer DELTA to the location that
15309 contains the incoming first argument. Assume that this argument
15310 contains a pointer, and is the one used to pass the `this' pointer
15311 in C++. This is the incoming argument *before* the function
15312 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15313 values of all other incoming arguments.
15315 After the addition, emit code to jump to FUNCTION, which is a
15316 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15317 not touch the return address. Hence returning from FUNCTION will
15318 return to whoever called the current `thunk'.
15320 The effect must be as if FUNCTION had been called directly with the
15321 adjusted first argument. This macro is responsible for emitting
15322 all of the code for a thunk function; output_function_prologue()
15323 and output_function_epilogue() are not invoked.
15325 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15326 been extracted from it.) It might possibly be useful on some
15327 targets, but probably not.
15329 If you do not define this macro, the target-independent code in the
15330 C++ frontend will generate a less efficient heavyweight thunk that
15331 calls FUNCTION instead of jumping to it. The generic approach does
15332 not support varargs. */
15335 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15336 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15339 rtx this, insn, funexp;
15341 reload_completed = 1;
15342 epilogue_completed = 1;
15343 no_new_pseudos = 1;
15344 reset_block_changes ();
15346 /* Mark the end of the (empty) prologue. */
15347 emit_note (NOTE_INSN_PROLOGUE_END);
15349 /* Find the "this" pointer. If the function returns a structure,
15350 the structure return pointer is in r3. */
15351 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15352 this = gen_rtx_REG (Pmode, 4);
15354 this = gen_rtx_REG (Pmode, 3);
15356 /* Apply the constant offset, if required. */
15359 rtx delta_rtx = GEN_INT (delta);
15360 emit_insn (TARGET_32BIT
15361 ? gen_addsi3 (this, this, delta_rtx)
15362 : gen_adddi3 (this, this, delta_rtx));
15365 /* Apply the offset from the vtable, if required. */
15368 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15369 rtx tmp = gen_rtx_REG (Pmode, 12);
15371 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15372 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15374 emit_insn (TARGET_32BIT
15375 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15376 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15377 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15381 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15383 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15385 emit_insn (TARGET_32BIT
15386 ? gen_addsi3 (this, this, tmp)
15387 : gen_adddi3 (this, this, tmp));
15390 /* Generate a tail call to the target function. */
15391 if (!TREE_USED (function))
15393 assemble_external (function);
15394 TREE_USED (function) = 1;
15396 funexp = XEXP (DECL_RTL (function), 0);
15397 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15400 if (MACHOPIC_INDIRECT)
15401 funexp = machopic_indirect_call_target (funexp);
15404 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15405 generate sibcall RTL explicitly. */
15406 insn = emit_call_insn (
15407 gen_rtx_PARALLEL (VOIDmode,
15409 gen_rtx_CALL (VOIDmode,
15410 funexp, const0_rtx),
15411 gen_rtx_USE (VOIDmode, const0_rtx),
15412 gen_rtx_USE (VOIDmode,
15413 gen_rtx_REG (SImode,
15414 LINK_REGISTER_REGNUM)),
15415 gen_rtx_RETURN (VOIDmode))));
15416 SIBLING_CALL_P (insn) = 1;
15419 /* Run just enough of rest_of_compilation to get the insns emitted.
15420 There's not really enough bulk here to make other passes such as
15421 instruction scheduling worth while. Note that use_thunk calls
15422 assemble_start_function and assemble_end_function. */
15423 insn = get_insns ();
15424 insn_locators_initialize ();
15425 shorten_branches (insn);
15426 final_start_function (insn, file, 1);
15427 final (insn, file, 1);
15428 final_end_function ();
15430 reload_completed = 0;
15431 epilogue_completed = 0;
15432 no_new_pseudos = 0;
15435 /* A quick summary of the various types of 'constant-pool tables'
15438 Target Flags Name One table per
15439 AIX (none) AIX TOC object file
15440 AIX -mfull-toc AIX TOC object file
15441 AIX -mminimal-toc AIX minimal TOC translation unit
15442 SVR4/EABI (none) SVR4 SDATA object file
15443 SVR4/EABI -fpic SVR4 pic object file
15444 SVR4/EABI -fPIC SVR4 PIC translation unit
15445 SVR4/EABI -mrelocatable EABI TOC function
15446 SVR4/EABI -maix AIX TOC object file
15447 SVR4/EABI -maix -mminimal-toc
15448 AIX minimal TOC translation unit
15450 Name Reg. Set by entries contains:
15451 made by addrs? fp? sum?
15453 AIX TOC 2 crt0 as Y option option
15454 AIX minimal TOC 30 prolog gcc Y Y option
15455 SVR4 SDATA 13 crt0 gcc N Y N
15456 SVR4 pic 30 prolog ld Y not yet N
15457 SVR4 PIC 30 prolog gcc Y option option
15458 EABI TOC 30 prolog gcc Y option option
15462 /* Hash functions for the hash table. */
15465 rs6000_hash_constant (rtx k)
15467 enum rtx_code code = GET_CODE (k);
15468 enum machine_mode mode = GET_MODE (k);
15469 unsigned result = (code << 3) ^ mode;
15470 const char *format;
15473 format = GET_RTX_FORMAT (code);
15474 flen = strlen (format);
15480 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15483 if (mode != VOIDmode)
15484 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15496 for (; fidx < flen; fidx++)
15497 switch (format[fidx])
15502 const char *str = XSTR (k, fidx);
15503 len = strlen (str);
15504 result = result * 613 + len;
15505 for (i = 0; i < len; i++)
15506 result = result * 613 + (unsigned) str[i];
15511 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15515 result = result * 613 + (unsigned) XINT (k, fidx);
15518 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15519 result = result * 613 + (unsigned) XWINT (k, fidx);
15523 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15524 result = result * 613 + (unsigned) (XWINT (k, fidx)
15531 gcc_unreachable ();
15538 toc_hash_function (const void *hash_entry)
15540 const struct toc_hash_struct *thc =
15541 (const struct toc_hash_struct *) hash_entry;
15542 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15545 /* Compare H1 and H2 for equivalence. */
15548 toc_hash_eq (const void *h1, const void *h2)
15550 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15551 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15553 if (((const struct toc_hash_struct *) h1)->key_mode
15554 != ((const struct toc_hash_struct *) h2)->key_mode)
15557 return rtx_equal_p (r1, r2);
15560 /* These are the names given by the C++ front-end to vtables, and
15561 vtable-like objects. Ideally, this logic should not be here;
15562 instead, there should be some programmatic way of inquiring as
15563 to whether or not an object is a vtable. */
15565 #define VTABLE_NAME_P(NAME) \
15566 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
15567 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15568 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
15569 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
15570 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15573 rs6000_output_symbol_ref (FILE *file, rtx x)
15575 /* Currently C++ toc references to vtables can be emitted before it
15576 is decided whether the vtable is public or private. If this is
15577 the case, then the linker will eventually complain that there is
15578 a reference to an unknown section. Thus, for vtables only,
15579 we emit the TOC reference to reference the symbol and not the
15581 const char *name = XSTR (x, 0);
15583 if (VTABLE_NAME_P (name))
15585 RS6000_OUTPUT_BASENAME (file, name);
15588 assemble_name (file, name);
15591 /* Output a TOC entry. We derive the entry name from what is being
15595 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15598 const char *name = buf;
15599 const char *real_name;
15603 gcc_assert (!TARGET_NO_TOC);
15605 /* When the linker won't eliminate them, don't output duplicate
15606 TOC entries (this happens on AIX if there is any kind of TOC,
15607 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15609 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15611 struct toc_hash_struct *h;
15614 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
15615 time because GGC is not initialized at that point. */
15616 if (toc_hash_table == NULL)
15617 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15618 toc_hash_eq, NULL);
15620 h = ggc_alloc (sizeof (*h));
15622 h->key_mode = mode;
15623 h->labelno = labelno;
15625 found = htab_find_slot (toc_hash_table, h, 1);
15626 if (*found == NULL)
15628 else /* This is indeed a duplicate.
15629 Set this label equal to that label. */
15631 fputs ("\t.set ", file);
15632 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15633 fprintf (file, "%d,", labelno);
15634 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15635 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15641 /* If we're going to put a double constant in the TOC, make sure it's
15642 aligned properly when strict alignment is on. */
15643 if (GET_CODE (x) == CONST_DOUBLE
15644 && STRICT_ALIGNMENT
15645 && GET_MODE_BITSIZE (mode) >= 64
15646 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15647 ASM_OUTPUT_ALIGN (file, 3);
15650 (*targetm.asm_out.internal_label) (file, "LC", labelno);
15652 /* Handle FP constants specially. Note that if we have a minimal
15653 TOC, things we put here aren't actually in the TOC, so we can allow
15655 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15657 REAL_VALUE_TYPE rv;
15660 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15661 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15665 if (TARGET_MINIMAL_TOC)
15666 fputs (DOUBLE_INT_ASM_OP, file);
15668 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15669 k[0] & 0xffffffff, k[1] & 0xffffffff,
15670 k[2] & 0xffffffff, k[3] & 0xffffffff);
15671 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15672 k[0] & 0xffffffff, k[1] & 0xffffffff,
15673 k[2] & 0xffffffff, k[3] & 0xffffffff);
15678 if (TARGET_MINIMAL_TOC)
15679 fputs ("\t.long ", file);
15681 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15682 k[0] & 0xffffffff, k[1] & 0xffffffff,
15683 k[2] & 0xffffffff, k[3] & 0xffffffff);
15684 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15685 k[0] & 0xffffffff, k[1] & 0xffffffff,
15686 k[2] & 0xffffffff, k[3] & 0xffffffff);
15690 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
15692 REAL_VALUE_TYPE rv;
15695 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15696 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
15700 if (TARGET_MINIMAL_TOC)
15701 fputs (DOUBLE_INT_ASM_OP, file);
15703 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15704 k[0] & 0xffffffff, k[1] & 0xffffffff);
15705 fprintf (file, "0x%lx%08lx\n",
15706 k[0] & 0xffffffff, k[1] & 0xffffffff);
15711 if (TARGET_MINIMAL_TOC)
15712 fputs ("\t.long ", file);
15714 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15715 k[0] & 0xffffffff, k[1] & 0xffffffff);
15716 fprintf (file, "0x%lx,0x%lx\n",
15717 k[0] & 0xffffffff, k[1] & 0xffffffff);
15721 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
15723 REAL_VALUE_TYPE rv;
15726 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15727 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15731 if (TARGET_MINIMAL_TOC)
15732 fputs (DOUBLE_INT_ASM_OP, file);
15734 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15735 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
15740 if (TARGET_MINIMAL_TOC)
15741 fputs ("\t.long ", file);
15743 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15744 fprintf (file, "0x%lx\n", l & 0xffffffff);
15748 else if (GET_MODE (x) == VOIDmode
15749 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
15751 unsigned HOST_WIDE_INT low;
15752 HOST_WIDE_INT high;
15754 if (GET_CODE (x) == CONST_DOUBLE)
15756 low = CONST_DOUBLE_LOW (x);
15757 high = CONST_DOUBLE_HIGH (x);
15760 #if HOST_BITS_PER_WIDE_INT == 32
15763 high = (low & 0x80000000) ? ~0 : 0;
15767 low = INTVAL (x) & 0xffffffff;
15768 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
15772 /* TOC entries are always Pmode-sized, but since this
15773 is a bigendian machine then if we're putting smaller
15774 integer constants in the TOC we have to pad them.
15775 (This is still a win over putting the constants in
15776 a separate constant pool, because then we'd have
15777 to have both a TOC entry _and_ the actual constant.)
15779 For a 32-bit target, CONST_INT values are loaded and shifted
15780 entirely within `low' and can be stored in one TOC entry. */
15782 /* It would be easy to make this work, but it doesn't now. */
15783 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
15785 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
15787 #if HOST_BITS_PER_WIDE_INT == 32
15788 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15789 POINTER_SIZE, &low, &high, 0);
15792 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15793 high = (HOST_WIDE_INT) low >> 32;
15800 if (TARGET_MINIMAL_TOC)
15801 fputs (DOUBLE_INT_ASM_OP, file);
15803 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15804 (long) high & 0xffffffff, (long) low & 0xffffffff);
15805 fprintf (file, "0x%lx%08lx\n",
15806 (long) high & 0xffffffff, (long) low & 0xffffffff);
15811 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15813 if (TARGET_MINIMAL_TOC)
15814 fputs ("\t.long ", file);
15816 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15817 (long) high & 0xffffffff, (long) low & 0xffffffff);
15818 fprintf (file, "0x%lx,0x%lx\n",
15819 (long) high & 0xffffffff, (long) low & 0xffffffff);
15823 if (TARGET_MINIMAL_TOC)
15824 fputs ("\t.long ", file);
15826 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15827 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15833 if (GET_CODE (x) == CONST)
15835 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
15837 base = XEXP (XEXP (x, 0), 0);
15838 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15841 switch (GET_CODE (base))
15844 name = XSTR (base, 0);
15848 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15849 CODE_LABEL_NUMBER (XEXP (base, 0)));
15853 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15857 gcc_unreachable ();
15860 real_name = (*targetm.strip_name_encoding) (name);
15861 if (TARGET_MINIMAL_TOC)
15862 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15865 fprintf (file, "\t.tc %s", real_name);
15868 fprintf (file, ".N%d", - offset);
15870 fprintf (file, ".P%d", offset);
15872 fputs ("[TC],", file);
15875 /* Currently C++ toc references to vtables can be emitted before it
15876 is decided whether the vtable is public or private. If this is
15877 the case, then the linker will eventually complain that there is
15878 a TOC reference to an unknown section. Thus, for vtables only,
15879 we emit the TOC reference to reference the symbol and not the
15881 if (VTABLE_NAME_P (name))
15883 RS6000_OUTPUT_BASENAME (file, name);
15885 fprintf (file, "%d", offset);
15886 else if (offset > 0)
15887 fprintf (file, "+%d", offset);
15890 output_addr_const (file, x);
15894 /* Output an assembler pseudo-op to write an ASCII string of N characters
15895 starting at P to FILE.
15897 On the RS/6000, we have to do this using the .byte operation and
15898 write out special characters outside the quoted string.
15899 Also, the assembler is broken; very long strings are truncated,
15900 so we must artificially break them up early. */
15903 output_ascii (FILE *file, const char *p, int n)
15906 int i, count_string;
15907 const char *for_string = "\t.byte \"";
15908 const char *for_decimal = "\t.byte ";
15909 const char *to_close = NULL;
15912 for (i = 0; i < n; i++)
15915 if (c >= ' ' && c < 0177)
15918 fputs (for_string, file);
15921 /* Write two quotes to get one. */
15929 for_decimal = "\"\n\t.byte ";
15933 if (count_string >= 512)
15935 fputs (to_close, file);
15937 for_string = "\t.byte \"";
15938 for_decimal = "\t.byte ";
15946 fputs (for_decimal, file);
15947 fprintf (file, "%d", c);
15949 for_string = "\n\t.byte \"";
15950 for_decimal = ", ";
15956 /* Now close the string if we have written one. Then end the line. */
15958 fputs (to_close, file);
15961 /* Generate a unique section name for FILENAME for a section type
15962 represented by SECTION_DESC. Output goes into BUF.
15964 SECTION_DESC can be any string, as long as it is different for each
15965 possible section type.
15967 We name the section in the same manner as xlc. The name begins with an
15968 underscore followed by the filename (after stripping any leading directory
15969 names) with the last period replaced by the string SECTION_DESC. If
15970 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15974 rs6000_gen_section_name (char **buf, const char *filename,
15975 const char *section_desc)
15977 const char *q, *after_last_slash, *last_period = 0;
15981 after_last_slash = filename;
15982 for (q = filename; *q; q++)
15985 after_last_slash = q + 1;
15986 else if (*q == '.')
15990 len = strlen (after_last_slash) + strlen (section_desc) + 2;
15991 *buf = (char *) xmalloc (len);
15996 for (q = after_last_slash; *q; q++)
15998 if (q == last_period)
16000 strcpy (p, section_desc);
16001 p += strlen (section_desc);
16005 else if (ISALNUM (*q))
16009 if (last_period == 0)
16010 strcpy (p, section_desc);
16015 /* Emit profile function. */
16018 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16020 /* Non-standard profiling for kernels, which just saves LR then calls
16021 _mcount without worrying about arg saves. The idea is to change
16022 the function prologue as little as possible as it isn't easy to
16023 account for arg save/restore code added just for _mcount. */
16024 if (TARGET_PROFILE_KERNEL)
16027 if (DEFAULT_ABI == ABI_AIX)
16029 #ifndef NO_PROFILE_COUNTERS
16030 # define NO_PROFILE_COUNTERS 0
16032 if (NO_PROFILE_COUNTERS)
16033 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16037 const char *label_name;
16040 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16041 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16042 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16044 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16048 else if (DEFAULT_ABI == ABI_DARWIN)
16050 const char *mcount_name = RS6000_MCOUNT;
16051 int caller_addr_regno = LINK_REGISTER_REGNUM;
16053 /* Be conservative and always set this, at least for now. */
16054 current_function_uses_pic_offset_table = 1;
16057 /* For PIC code, set up a stub and collect the caller's address
16058 from r0, which is where the prologue puts it. */
16059 if (MACHOPIC_INDIRECT
16060 && current_function_uses_pic_offset_table)
16061 caller_addr_regno = 0;
16063 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16065 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16069 /* Write function profiler code. */
16072 output_function_profiler (FILE *file, int labelno)
16076 switch (DEFAULT_ABI)
16079 gcc_unreachable ();
16084 warning (0, "no profiling of 64-bit code for this ABI");
16087 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16088 fprintf (file, "\tmflr %s\n", reg_names[0]);
16089 if (NO_PROFILE_COUNTERS)
16091 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16092 reg_names[0], reg_names[1]);
16094 else if (TARGET_SECURE_PLT && flag_pic)
16096 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16097 reg_names[0], reg_names[1]);
16098 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16099 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16100 reg_names[12], reg_names[12]);
16101 assemble_name (file, buf);
16102 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16103 assemble_name (file, buf);
16104 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16106 else if (flag_pic == 1)
16108 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16109 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16110 reg_names[0], reg_names[1]);
16111 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16112 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16113 assemble_name (file, buf);
16114 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16116 else if (flag_pic > 1)
16118 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16119 reg_names[0], reg_names[1]);
16120 /* Now, we need to get the address of the label. */
16121 fputs ("\tbcl 20,31,1f\n\t.long ", file);
16122 assemble_name (file, buf);
16123 fputs ("-.\n1:", file);
16124 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16125 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16126 reg_names[0], reg_names[11]);
16127 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16128 reg_names[0], reg_names[0], reg_names[11]);
16132 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16133 assemble_name (file, buf);
16134 fputs ("@ha\n", file);
16135 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16136 reg_names[0], reg_names[1]);
16137 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16138 assemble_name (file, buf);
16139 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16142 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
16143 fprintf (file, "\tbl %s%s\n",
16144 RS6000_MCOUNT, flag_pic ? "@plt" : "");
16149 if (!TARGET_PROFILE_KERNEL)
16151 /* Don't do anything, done in output_profile_hook (). */
16155 gcc_assert (!TARGET_32BIT);
16157 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16158 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16160 if (cfun->static_chain_decl != NULL)
16162 asm_fprintf (file, "\tstd %s,24(%s)\n",
16163 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16164 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16165 asm_fprintf (file, "\tld %s,24(%s)\n",
16166 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16169 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16176 /* Power4 load update and store update instructions are cracked into a
16177 load or store and an integer insn which are executed in the same cycle.
16178 Branches have their own dispatch slot which does not count against the
16179 GCC issue rate, but it changes the program flow so there are no other
16180 instructions to issue in this cycle. */
16183 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16184 int verbose ATTRIBUTE_UNUSED,
16185 rtx insn, int more)
16187 if (GET_CODE (PATTERN (insn)) == USE
16188 || GET_CODE (PATTERN (insn)) == CLOBBER)
16191 if (rs6000_sched_groups)
16193 if (is_microcoded_insn (insn))
16195 else if (is_cracked_insn (insn))
16196 return more > 2 ? more - 2 : 0;
16202 /* Adjust the cost of a scheduling dependency. Return the new cost of
16203 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16206 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16208 if (! recog_memoized (insn))
16211 if (REG_NOTE_KIND (link) != 0)
16214 if (REG_NOTE_KIND (link) == 0)
16216 /* Data dependency; DEP_INSN writes a register that INSN reads
16217 some cycles later. */
16219 /* Separate a load from a narrower, dependent store. */
16220 if (rs6000_sched_groups
16221 && GET_CODE (PATTERN (insn)) == SET
16222 && GET_CODE (PATTERN (dep_insn)) == SET
16223 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16224 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16225 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16226 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16229 switch (get_attr_type (insn))
16232 /* Tell the first scheduling pass about the latency between
16233 a mtctr and bctr (and mtlr and br/blr). The first
16234 scheduling pass will not know about this latency since
16235 the mtctr instruction, which has the latency associated
16236 to it, will be generated by reload. */
16237 return TARGET_POWER ? 5 : 4;
16239 /* Leave some extra cycles between a compare and its
16240 dependent branch, to inhibit expensive mispredicts. */
16241 if ((rs6000_cpu_attr == CPU_PPC603
16242 || rs6000_cpu_attr == CPU_PPC604
16243 || rs6000_cpu_attr == CPU_PPC604E
16244 || rs6000_cpu_attr == CPU_PPC620
16245 || rs6000_cpu_attr == CPU_PPC630
16246 || rs6000_cpu_attr == CPU_PPC750
16247 || rs6000_cpu_attr == CPU_PPC7400
16248 || rs6000_cpu_attr == CPU_PPC7450
16249 || rs6000_cpu_attr == CPU_POWER4
16250 || rs6000_cpu_attr == CPU_POWER5)
16251 && recog_memoized (dep_insn)
16252 && (INSN_CODE (dep_insn) >= 0)
16253 && (get_attr_type (dep_insn) == TYPE_CMP
16254 || get_attr_type (dep_insn) == TYPE_COMPARE
16255 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16256 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16257 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16258 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16259 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16260 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16265 /* Fall out to return default cost. */
16271 /* The function returns a true if INSN is microcoded.
16272 Return false otherwise. */
16275 is_microcoded_insn (rtx insn)
16277 if (!insn || !INSN_P (insn)
16278 || GET_CODE (PATTERN (insn)) == USE
16279 || GET_CODE (PATTERN (insn)) == CLOBBER)
16282 if (rs6000_sched_groups)
16284 enum attr_type type = get_attr_type (insn);
16285 if (type == TYPE_LOAD_EXT_U
16286 || type == TYPE_LOAD_EXT_UX
16287 || type == TYPE_LOAD_UX
16288 || type == TYPE_STORE_UX
16289 || type == TYPE_MFCR)
16296 /* The function returns a nonzero value if INSN can be scheduled only
16297 as the first insn in a dispatch group ("dispatch-slot restricted").
16298 In this case, the returned value indicates how many dispatch slots
16299 the insn occupies (at the beginning of the group).
16300 Return 0 otherwise. */
16303 is_dispatch_slot_restricted (rtx insn)
16305 enum attr_type type;
16307 if (!rs6000_sched_groups)
16311 || insn == NULL_RTX
16312 || GET_CODE (insn) == NOTE
16313 || GET_CODE (PATTERN (insn)) == USE
16314 || GET_CODE (PATTERN (insn)) == CLOBBER)
16317 type = get_attr_type (insn);
16324 case TYPE_DELAYED_CR:
16325 case TYPE_CR_LOGICAL:
16338 if (rs6000_cpu == PROCESSOR_POWER5
16339 && is_cracked_insn (insn))
16345 /* The function returns true if INSN is cracked into 2 instructions
16346 by the processor (and therefore occupies 2 issue slots). */
16349 is_cracked_insn (rtx insn)
16351 if (!insn || !INSN_P (insn)
16352 || GET_CODE (PATTERN (insn)) == USE
16353 || GET_CODE (PATTERN (insn)) == CLOBBER)
16356 if (rs6000_sched_groups)
16358 enum attr_type type = get_attr_type (insn);
16359 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16360 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16361 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16362 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16363 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16364 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16365 || type == TYPE_IDIV || type == TYPE_LDIV
16366 || type == TYPE_INSERT_WORD)
16373 /* The function returns true if INSN can be issued only from
16374 the branch slot. */
16377 is_branch_slot_insn (rtx insn)
16379 if (!insn || !INSN_P (insn)
16380 || GET_CODE (PATTERN (insn)) == USE
16381 || GET_CODE (PATTERN (insn)) == CLOBBER)
16384 if (rs6000_sched_groups)
16386 enum attr_type type = get_attr_type (insn);
16387 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16395 /* A C statement (sans semicolon) to update the integer scheduling
16396 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16397 INSN earlier, reduce the priority to execute INSN later. Do not
16398 define this macro if you do not need to adjust the scheduling
16399 priorities of insns. */
16402 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16404 /* On machines (like the 750) which have asymmetric integer units,
16405 where one integer unit can do multiply and divides and the other
16406 can't, reduce the priority of multiply/divide so it is scheduled
16407 before other integer operations. */
16410 if (! INSN_P (insn))
16413 if (GET_CODE (PATTERN (insn)) == USE)
16416 switch (rs6000_cpu_attr) {
16418 switch (get_attr_type (insn))
16425 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16426 priority, priority);
16427 if (priority >= 0 && priority < 0x01000000)
16434 if (is_dispatch_slot_restricted (insn)
16435 && reload_completed
16436 && current_sched_info->sched_max_insns_priority
16437 && rs6000_sched_restricted_insns_priority)
16440 /* Prioritize insns that can be dispatched only in the first
16442 if (rs6000_sched_restricted_insns_priority == 1)
16443 /* Attach highest priority to insn. This means that in
16444 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16445 precede 'priority' (critical path) considerations. */
16446 return current_sched_info->sched_max_insns_priority;
16447 else if (rs6000_sched_restricted_insns_priority == 2)
16448 /* Increase priority of insn by a minimal amount. This means that in
16449 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16450 considerations precede dispatch-slot restriction considerations. */
16451 return (priority + 1);
16457 /* Return how many instructions the machine can issue per cycle. */
16460 rs6000_issue_rate (void)
16462 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16463 if (!reload_completed)
16466 switch (rs6000_cpu_attr) {
16467 case CPU_RIOS1: /* ? */
16469 case CPU_PPC601: /* ? */
16492 /* Return how many instructions to look ahead for better insn
16496 rs6000_use_sched_lookahead (void)
16498 if (rs6000_cpu_attr == CPU_PPC8540)
16503 /* Determine is PAT refers to memory. */
16506 is_mem_ref (rtx pat)
16512 if (GET_CODE (pat) == MEM)
16515 /* Recursively process the pattern. */
16516 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16518 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16521 ret |= is_mem_ref (XEXP (pat, i));
16522 else if (fmt[i] == 'E')
16523 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16524 ret |= is_mem_ref (XVECEXP (pat, i, j));
16530 /* Determine if PAT is a PATTERN of a load insn. */
16533 is_load_insn1 (rtx pat)
16535 if (!pat || pat == NULL_RTX)
16538 if (GET_CODE (pat) == SET)
16539 return is_mem_ref (SET_SRC (pat));
16541 if (GET_CODE (pat) == PARALLEL)
16545 for (i = 0; i < XVECLEN (pat, 0); i++)
16546 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16553 /* Determine if INSN loads from memory. */
16556 is_load_insn (rtx insn)
16558 if (!insn || !INSN_P (insn))
16561 if (GET_CODE (insn) == CALL_INSN)
16564 return is_load_insn1 (PATTERN (insn));
16567 /* Determine if PAT is a PATTERN of a store insn. */
16570 is_store_insn1 (rtx pat)
16572 if (!pat || pat == NULL_RTX)
16575 if (GET_CODE (pat) == SET)
16576 return is_mem_ref (SET_DEST (pat));
16578 if (GET_CODE (pat) == PARALLEL)
16582 for (i = 0; i < XVECLEN (pat, 0); i++)
16583 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16590 /* Determine if INSN stores to memory. */
16593 is_store_insn (rtx insn)
16595 if (!insn || !INSN_P (insn))
16598 return is_store_insn1 (PATTERN (insn));
16601 /* Returns whether the dependence between INSN and NEXT is considered
16602 costly by the given target. */
16605 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16608 /* If the flag is not enabled - no dependence is considered costly;
16609 allow all dependent insns in the same group.
16610 This is the most aggressive option. */
16611 if (rs6000_sched_costly_dep == no_dep_costly)
16614 /* If the flag is set to 1 - a dependence is always considered costly;
16615 do not allow dependent instructions in the same group.
16616 This is the most conservative option. */
16617 if (rs6000_sched_costly_dep == all_deps_costly)
16620 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16621 && is_load_insn (next)
16622 && is_store_insn (insn))
16623 /* Prevent load after store in the same group. */
16626 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16627 && is_load_insn (next)
16628 && is_store_insn (insn)
16629 && (!link || (int) REG_NOTE_KIND (link) == 0))
16630 /* Prevent load after store in the same group if it is a true
16634 /* The flag is set to X; dependences with latency >= X are considered costly,
16635 and will not be scheduled in the same group. */
16636 if (rs6000_sched_costly_dep <= max_dep_latency
16637 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16643 /* Return the next insn after INSN that is found before TAIL is reached,
16644 skipping any "non-active" insns - insns that will not actually occupy
16645 an issue slot. Return NULL_RTX if such an insn is not found. */
16648 get_next_active_insn (rtx insn, rtx tail)
16652 if (!insn || insn == tail)
16655 next_insn = NEXT_INSN (insn);
16658 && next_insn != tail
16659 && (GET_CODE (next_insn) == NOTE
16660 || GET_CODE (PATTERN (next_insn)) == USE
16661 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
16663 next_insn = NEXT_INSN (next_insn);
16666 if (!next_insn || next_insn == tail)
16672 /* Return whether the presence of INSN causes a dispatch group termination
16673 of group WHICH_GROUP.
16675 If WHICH_GROUP == current_group, this function will return true if INSN
16676 causes the termination of the current group (i.e, the dispatch group to
16677 which INSN belongs). This means that INSN will be the last insn in the
16678 group it belongs to.
16680 If WHICH_GROUP == previous_group, this function will return true if INSN
16681 causes the termination of the previous group (i.e, the dispatch group that
16682 precedes the group to which INSN belongs). This means that INSN will be
16683 the first insn in the group it belongs to). */
16686 insn_terminates_group_p (rtx insn, enum group_termination which_group)
16688 enum attr_type type;
16693 type = get_attr_type (insn);
16695 if (is_microcoded_insn (insn))
16698 if (which_group == current_group)
16700 if (is_branch_slot_insn (insn))
16704 else if (which_group == previous_group)
16706 if (is_dispatch_slot_restricted (insn))
16714 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16715 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16718 is_costly_group (rtx *group_insns, rtx next_insn)
16723 int issue_rate = rs6000_issue_rate ();
16725 for (i = 0; i < issue_rate; i++)
16727 rtx insn = group_insns[i];
16730 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
16732 rtx next = XEXP (link, 0);
16733 if (next == next_insn)
16735 cost = insn_cost (insn, link, next_insn);
16736 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16745 /* Utility of the function redefine_groups.
16746 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16747 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16748 to keep it "far" (in a separate group) from GROUP_INSNS, following
16749 one of the following schemes, depending on the value of the flag
16750 -minsert_sched_nops = X:
16751 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16752 in order to force NEXT_INSN into a separate group.
16753 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16754 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16755 insertion (has a group just ended, how many vacant issue slots remain in the
16756 last group, and how many dispatch groups were encountered so far). */
16759 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16760 rtx next_insn, bool *group_end, int can_issue_more,
16765 int issue_rate = rs6000_issue_rate ();
16766 bool end = *group_end;
16769 if (next_insn == NULL_RTX)
16770 return can_issue_more;
16772 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16773 return can_issue_more;
16775 force = is_costly_group (group_insns, next_insn);
16777 return can_issue_more;
16779 if (sched_verbose > 6)
16780 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
16781 *group_count ,can_issue_more);
16783 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16786 can_issue_more = 0;
16788 /* Since only a branch can be issued in the last issue_slot, it is
16789 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16790 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16791 in this case the last nop will start a new group and the branch
16792 will be forced to the new group. */
16793 if (can_issue_more && !is_branch_slot_insn (next_insn))
16796 while (can_issue_more > 0)
16799 emit_insn_before (nop, next_insn);
16807 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16809 int n_nops = rs6000_sched_insert_nops;
16811 /* Nops can't be issued from the branch slot, so the effective
16812 issue_rate for nops is 'issue_rate - 1'. */
16813 if (can_issue_more == 0)
16814 can_issue_more = issue_rate;
16816 if (can_issue_more == 0)
16818 can_issue_more = issue_rate - 1;
16821 for (i = 0; i < issue_rate; i++)
16823 group_insns[i] = 0;
16830 emit_insn_before (nop, next_insn);
16831 if (can_issue_more == issue_rate - 1) /* new group begins */
16834 if (can_issue_more == 0)
16836 can_issue_more = issue_rate - 1;
16839 for (i = 0; i < issue_rate; i++)
16841 group_insns[i] = 0;
16847 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16850 /* Is next_insn going to start a new group? */
16853 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16854 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16855 || (can_issue_more < issue_rate &&
16856 insn_terminates_group_p (next_insn, previous_group)));
16857 if (*group_end && end)
16860 if (sched_verbose > 6)
16861 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16862 *group_count, can_issue_more);
16863 return can_issue_more;
16866 return can_issue_more;
16869 /* This function tries to synch the dispatch groups that the compiler "sees"
16870 with the dispatch groups that the processor dispatcher is expected to
16871 form in practice. It tries to achieve this synchronization by forcing the
16872 estimated processor grouping on the compiler (as opposed to the function
16873 'pad_goups' which tries to force the scheduler's grouping on the processor).
16875 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16876 examines the (estimated) dispatch groups that will be formed by the processor
16877 dispatcher. It marks these group boundaries to reflect the estimated
16878 processor grouping, overriding the grouping that the scheduler had marked.
16879 Depending on the value of the flag '-minsert-sched-nops' this function can
16880 force certain insns into separate groups or force a certain distance between
16881 them by inserting nops, for example, if there exists a "costly dependence"
16884 The function estimates the group boundaries that the processor will form as
16885 follows: It keeps track of how many vacant issue slots are available after
16886 each insn. A subsequent insn will start a new group if one of the following
16888 - no more vacant issue slots remain in the current dispatch group.
16889 - only the last issue slot, which is the branch slot, is vacant, but the next
16890 insn is not a branch.
16891 - only the last 2 or less issue slots, including the branch slot, are vacant,
16892 which means that a cracked insn (which occupies two issue slots) can't be
16893 issued in this group.
16894 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16895 start a new group. */
16898 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16900 rtx insn, next_insn;
16902 int can_issue_more;
16905 int group_count = 0;
16909 issue_rate = rs6000_issue_rate ();
16910 group_insns = alloca (issue_rate * sizeof (rtx));
16911 for (i = 0; i < issue_rate; i++)
16913 group_insns[i] = 0;
16915 can_issue_more = issue_rate;
16917 insn = get_next_active_insn (prev_head_insn, tail);
16920 while (insn != NULL_RTX)
16922 slot = (issue_rate - can_issue_more);
16923 group_insns[slot] = insn;
16925 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16926 if (insn_terminates_group_p (insn, current_group))
16927 can_issue_more = 0;
16929 next_insn = get_next_active_insn (insn, tail);
16930 if (next_insn == NULL_RTX)
16931 return group_count + 1;
16933 /* Is next_insn going to start a new group? */
16935 = (can_issue_more == 0
16936 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16937 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16938 || (can_issue_more < issue_rate &&
16939 insn_terminates_group_p (next_insn, previous_group)));
16941 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16942 next_insn, &group_end, can_issue_more,
16948 can_issue_more = 0;
16949 for (i = 0; i < issue_rate; i++)
16951 group_insns[i] = 0;
16955 if (GET_MODE (next_insn) == TImode && can_issue_more)
16956 PUT_MODE (next_insn, VOIDmode);
16957 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16958 PUT_MODE (next_insn, TImode);
16961 if (can_issue_more == 0)
16962 can_issue_more = issue_rate;
16965 return group_count;
16968 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16969 dispatch group boundaries that the scheduler had marked. Pad with nops
16970 any dispatch groups which have vacant issue slots, in order to force the
16971 scheduler's grouping on the processor dispatcher. The function
16972 returns the number of dispatch groups found. */
16975 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16977 rtx insn, next_insn;
16980 int can_issue_more;
16982 int group_count = 0;
16984 /* Initialize issue_rate. */
16985 issue_rate = rs6000_issue_rate ();
16986 can_issue_more = issue_rate;
16988 insn = get_next_active_insn (prev_head_insn, tail);
16989 next_insn = get_next_active_insn (insn, tail);
16991 while (insn != NULL_RTX)
16994 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16996 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
16998 if (next_insn == NULL_RTX)
17003 /* If the scheduler had marked group termination at this location
17004 (between insn and next_indn), and neither insn nor next_insn will
17005 force group termination, pad the group with nops to force group
17008 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17009 && !insn_terminates_group_p (insn, current_group)
17010 && !insn_terminates_group_p (next_insn, previous_group))
17012 if (!is_branch_slot_insn (next_insn))
17015 while (can_issue_more)
17018 emit_insn_before (nop, next_insn);
17023 can_issue_more = issue_rate;
17028 next_insn = get_next_active_insn (insn, tail);
17031 return group_count;
17034 /* The following function is called at the end of scheduling BB.
17035 After reload, it inserts nops at insn group bundling. */
17038 rs6000_sched_finish (FILE *dump, int sched_verbose)
17043 fprintf (dump, "=== Finishing schedule.\n");
17045 if (reload_completed && rs6000_sched_groups)
17047 if (rs6000_sched_insert_nops == sched_finish_none)
17050 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17051 n_groups = pad_groups (dump, sched_verbose,
17052 current_sched_info->prev_head,
17053 current_sched_info->next_tail);
17055 n_groups = redefine_groups (dump, sched_verbose,
17056 current_sched_info->prev_head,
17057 current_sched_info->next_tail);
17059 if (sched_verbose >= 6)
17061 fprintf (dump, "ngroups = %d\n", n_groups);
17062 print_rtl (dump, current_sched_info->prev_head);
17063 fprintf (dump, "Done finish_sched\n");
17068 /* Length in units of the trampoline for entering a nested function. */
17071 rs6000_trampoline_size (void)
17075 switch (DEFAULT_ABI)
17078 gcc_unreachable ();
17081 ret = (TARGET_32BIT) ? 12 : 24;
17086 ret = (TARGET_32BIT) ? 40 : 48;
17093 /* Emit RTL insns to initialize the variable parts of a trampoline.
17094 FNADDR is an RTX for the address of the function's pure code.
17095 CXT is an RTX for the static chain value for the function. */
17098 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17100 enum machine_mode pmode = Pmode;
17101 int regsize = (TARGET_32BIT) ? 4 : 8;
17102 rtx ctx_reg = force_reg (pmode, cxt);
17104 switch (DEFAULT_ABI)
17107 gcc_unreachable ();
17109 /* Macros to shorten the code expansions below. */
17110 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
17111 #define MEM_PLUS(addr,offset) \
17112 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
17114 /* Under AIX, just build the 3 word function descriptor */
17117 rtx fn_reg = gen_reg_rtx (pmode);
17118 rtx toc_reg = gen_reg_rtx (pmode);
17119 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17120 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17121 emit_move_insn (MEM_DEREF (addr), fn_reg);
17122 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17123 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17127 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17130 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
17131 FALSE, VOIDmode, 4,
17133 GEN_INT (rs6000_trampoline_size ()), SImode,
17143 /* Table of valid machine attributes. */
17145 const struct attribute_spec rs6000_attribute_table[] =
17147 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17148 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
17149 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17150 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17151 #ifdef SUBTARGET_ATTRIBUTE_TABLE
17152 SUBTARGET_ATTRIBUTE_TABLE,
17154 { NULL, 0, 0, false, false, false, NULL }
17157 /* Handle the "altivec" attribute. The attribute may have
17158 arguments as follows:
17160 __attribute__((altivec(vector__)))
17161 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17162 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17164 and may appear more than once (e.g., 'vector bool char') in a
17165 given declaration. */
17168 rs6000_handle_altivec_attribute (tree *node,
17169 tree name ATTRIBUTE_UNUSED,
17171 int flags ATTRIBUTE_UNUSED,
17172 bool *no_add_attrs)
17174 tree type = *node, result = NULL_TREE;
17175 enum machine_mode mode;
17178 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17179 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17180 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17183 while (POINTER_TYPE_P (type)
17184 || TREE_CODE (type) == FUNCTION_TYPE
17185 || TREE_CODE (type) == METHOD_TYPE
17186 || TREE_CODE (type) == ARRAY_TYPE)
17187 type = TREE_TYPE (type);
17189 mode = TYPE_MODE (type);
17191 /* Check for invalid AltiVec type qualifiers. */
17192 if (type == long_unsigned_type_node || type == long_integer_type_node)
17195 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17196 else if (rs6000_warn_altivec_long)
17197 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17199 else if (type == long_long_unsigned_type_node
17200 || type == long_long_integer_type_node)
17201 error ("use of %<long long%> in AltiVec types is invalid");
17202 else if (type == double_type_node)
17203 error ("use of %<double%> in AltiVec types is invalid");
17204 else if (type == long_double_type_node)
17205 error ("use of %<long double%> in AltiVec types is invalid");
17206 else if (type == boolean_type_node)
17207 error ("use of boolean types in AltiVec types is invalid");
17208 else if (TREE_CODE (type) == COMPLEX_TYPE)
17209 error ("use of %<complex%> in AltiVec types is invalid");
17211 switch (altivec_type)
17214 unsigned_p = TYPE_UNSIGNED (type);
17218 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17221 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17224 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17226 case SFmode: result = V4SF_type_node; break;
17227 /* If the user says 'vector int bool', we may be handed the 'bool'
17228 attribute _before_ the 'vector' attribute, and so select the
17229 proper type in the 'b' case below. */
17230 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17238 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17239 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17240 case QImode: case V16QImode: result = bool_V16QI_type_node;
17247 case V8HImode: result = pixel_V8HI_type_node;
17253 if (result && result != type && TYPE_READONLY (type))
17254 result = build_qualified_type (result, TYPE_QUAL_CONST);
17256 *no_add_attrs = true; /* No need to hang on to the attribute. */
17259 *node = reconstruct_complex_type (*node, result);
17264 /* AltiVec defines four built-in scalar types that serve as vector
17265 elements; we must teach the compiler how to mangle them. */
17267 static const char *
17268 rs6000_mangle_fundamental_type (tree type)
17270 if (type == bool_char_type_node) return "U6__boolc";
17271 if (type == bool_short_type_node) return "U6__bools";
17272 if (type == pixel_type_node) return "u7__pixel";
17273 if (type == bool_int_type_node) return "U6__booli";
17275 /* For all other types, use normal C++ mangling. */
17279 /* Handle a "longcall" or "shortcall" attribute; arguments as in
17280 struct attribute_spec.handler. */
17283 rs6000_handle_longcall_attribute (tree *node, tree name,
17284 tree args ATTRIBUTE_UNUSED,
17285 int flags ATTRIBUTE_UNUSED,
17286 bool *no_add_attrs)
17288 if (TREE_CODE (*node) != FUNCTION_TYPE
17289 && TREE_CODE (*node) != FIELD_DECL
17290 && TREE_CODE (*node) != TYPE_DECL)
17292 warning (OPT_Wattributes, "%qs attribute only applies to functions",
17293 IDENTIFIER_POINTER (name));
17294 *no_add_attrs = true;
17300 /* Set longcall attributes on all functions declared when
17301 rs6000_default_long_calls is true. */
17303 rs6000_set_default_type_attributes (tree type)
17305 if (rs6000_default_long_calls
17306 && (TREE_CODE (type) == FUNCTION_TYPE
17307 || TREE_CODE (type) == METHOD_TYPE))
17308 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17310 TYPE_ATTRIBUTES (type));
17313 /* Return a reference suitable for calling a function with the
17314 longcall attribute. */
17317 rs6000_longcall_ref (rtx call_ref)
17319 const char *call_name;
17322 if (GET_CODE (call_ref) != SYMBOL_REF)
17325 /* System V adds '.' to the internal name, so skip them. */
17326 call_name = XSTR (call_ref, 0);
17327 if (*call_name == '.')
17329 while (*call_name == '.')
17332 node = get_identifier (call_name);
17333 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17336 return force_reg (Pmode, call_ref);
17339 #ifdef USING_ELFOS_H
17341 /* A C statement or statements to switch to the appropriate section
17342 for output of RTX in mode MODE. You can assume that RTX is some
17343 kind of constant in RTL. The argument MODE is redundant except in
17344 the case of a `const_int' rtx. Select the section by calling
17345 `text_section' or one of the alternatives for other sections.
17347 Do not define this macro if you put all constants in the read-only
17351 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17352 unsigned HOST_WIDE_INT align)
17354 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17357 default_elf_select_rtx_section (mode, x, align);
17360 /* A C statement or statements to switch to the appropriate
17361 section for output of DECL. DECL is either a `VAR_DECL' node
17362 or a constant of some sort. RELOC indicates whether forming
17363 the initial value of DECL requires link-time relocations. */
17366 rs6000_elf_select_section (tree decl, int reloc,
17367 unsigned HOST_WIDE_INT align)
17369 /* Pretend that we're always building for a shared library when
17370 ABI_AIX, because otherwise we end up with dynamic relocations
17371 in read-only sections. This happens for function pointers,
17372 references to vtables in typeinfo, and probably other cases. */
17373 default_elf_select_section_1 (decl, reloc, align,
17374 flag_pic || DEFAULT_ABI == ABI_AIX);
17377 /* A C statement to build up a unique section name, expressed as a
17378 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17379 RELOC indicates whether the initial value of EXP requires
17380 link-time relocations. If you do not define this macro, GCC will use
17381 the symbol name prefixed by `.' as the section name. Note - this
17382 macro can now be called for uninitialized data items as well as
17383 initialized data and functions. */
17386 rs6000_elf_unique_section (tree decl, int reloc)
17388 /* As above, pretend that we're always building for a shared library
17389 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
17390 default_unique_section_1 (decl, reloc,
17391 flag_pic || DEFAULT_ABI == ABI_AIX);
17394 /* For a SYMBOL_REF, set generic flags and then perform some
17395 target-specific processing.
17397 When the AIX ABI is requested on a non-AIX system, replace the
17398 function name with the real name (with a leading .) rather than the
17399 function descriptor name. This saves a lot of overriding code to
17400 read the prefixes. */
17403 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17405 default_encode_section_info (decl, rtl, first);
17408 && TREE_CODE (decl) == FUNCTION_DECL
17410 && DEFAULT_ABI == ABI_AIX)
17412 rtx sym_ref = XEXP (rtl, 0);
17413 size_t len = strlen (XSTR (sym_ref, 0));
17414 char *str = alloca (len + 2);
17416 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17417 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17422 rs6000_elf_in_small_data_p (tree decl)
17424 if (rs6000_sdata == SDATA_NONE)
17427 /* We want to merge strings, so we never consider them small data. */
17428 if (TREE_CODE (decl) == STRING_CST)
17431 /* Functions are never in the small data area. */
17432 if (TREE_CODE (decl) == FUNCTION_DECL)
17435 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17437 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17438 if (strcmp (section, ".sdata") == 0
17439 || strcmp (section, ".sdata2") == 0
17440 || strcmp (section, ".sbss") == 0
17441 || strcmp (section, ".sbss2") == 0
17442 || strcmp (section, ".PPC.EMB.sdata0") == 0
17443 || strcmp (section, ".PPC.EMB.sbss0") == 0)
17448 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17451 && (unsigned HOST_WIDE_INT) size <= g_switch_value
17452 /* If it's not public, and we're not going to reference it there,
17453 there's no need to put it in the small data section. */
17454 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17461 #endif /* USING_ELFOS_H */
17464 /* Return a REG that occurs in ADDR with coefficient 1.
17465 ADDR can be effectively incremented by incrementing REG.
17467 r0 is special and we must not select it as an address
17468 register by this routine since our caller will try to
17469 increment the returned register via an "la" instruction. */
17472 find_addr_reg (rtx addr)
17474 while (GET_CODE (addr) == PLUS)
17476 if (GET_CODE (XEXP (addr, 0)) == REG
17477 && REGNO (XEXP (addr, 0)) != 0)
17478 addr = XEXP (addr, 0);
17479 else if (GET_CODE (XEXP (addr, 1)) == REG
17480 && REGNO (XEXP (addr, 1)) != 0)
17481 addr = XEXP (addr, 1);
17482 else if (CONSTANT_P (XEXP (addr, 0)))
17483 addr = XEXP (addr, 1);
17484 else if (CONSTANT_P (XEXP (addr, 1)))
17485 addr = XEXP (addr, 0);
17487 gcc_unreachable ();
17489 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17494 rs6000_fatal_bad_address (rtx op)
17496 fatal_insn ("bad address", op);
17501 static tree branch_island_list = 0;
17503 /* Remember to generate a branch island for far calls to the given
17507 add_compiler_branch_island (tree label_name, tree function_name,
17510 tree branch_island = build_tree_list (function_name, label_name);
17511 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17512 TREE_CHAIN (branch_island) = branch_island_list;
17513 branch_island_list = branch_island;
17516 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17517 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17518 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17519 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17521 /* Generate far-jump branch islands for everything on the
17522 branch_island_list. Invoked immediately after the last instruction
17523 of the epilogue has been emitted; the branch-islands must be
17524 appended to, and contiguous with, the function body. Mach-O stubs
17525 are generated in machopic_output_stub(). */
17528 macho_branch_islands (void)
17531 tree branch_island;
17533 for (branch_island = branch_island_list;
17535 branch_island = TREE_CHAIN (branch_island))
17537 const char *label =
17538 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17540 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17541 char name_buf[512];
17542 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17543 if (name[0] == '*' || name[0] == '&')
17544 strcpy (name_buf, name+1);
17548 strcpy (name_buf+1, name);
17550 strcpy (tmp_buf, "\n");
17551 strcat (tmp_buf, label);
17552 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17553 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17554 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17555 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17558 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17559 strcat (tmp_buf, label);
17560 strcat (tmp_buf, "_pic\n");
17561 strcat (tmp_buf, label);
17562 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
17564 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17565 strcat (tmp_buf, name_buf);
17566 strcat (tmp_buf, " - ");
17567 strcat (tmp_buf, label);
17568 strcat (tmp_buf, "_pic)\n");
17570 strcat (tmp_buf, "\tmtlr r0\n");
17572 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17573 strcat (tmp_buf, name_buf);
17574 strcat (tmp_buf, " - ");
17575 strcat (tmp_buf, label);
17576 strcat (tmp_buf, "_pic)\n");
17578 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17582 strcat (tmp_buf, ":\nlis r12,hi16(");
17583 strcat (tmp_buf, name_buf);
17584 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17585 strcat (tmp_buf, name_buf);
17586 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17588 output_asm_insn (tmp_buf, 0);
17589 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17590 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17591 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17592 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17595 branch_island_list = 0;
17598 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
17599 already there or not. */
17602 no_previous_def (tree function_name)
17604 tree branch_island;
17605 for (branch_island = branch_island_list;
17607 branch_island = TREE_CHAIN (branch_island))
17608 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17613 /* GET_PREV_LABEL gets the label name from the previous definition of
17617 get_prev_label (tree function_name)
17619 tree branch_island;
17620 for (branch_island = branch_island_list;
17622 branch_island = TREE_CHAIN (branch_island))
17623 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17624 return BRANCH_ISLAND_LABEL_NAME (branch_island);
17628 /* INSN is either a function call or a millicode call. It may have an
17629 unconditional jump in its delay slot.
17631 CALL_DEST is the routine we are calling. */
17634 output_call (rtx insn, rtx *operands, int dest_operand_number,
17635 int cookie_operand_number)
17637 static char buf[256];
17638 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17639 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
17642 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
17644 if (no_previous_def (funname))
17646 int line_number = 0;
17647 rtx label_rtx = gen_label_rtx ();
17648 char *label_buf, temp_buf[256];
17649 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17650 CODE_LABEL_NUMBER (label_rtx));
17651 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17652 labelname = get_identifier (label_buf);
17653 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17655 line_number = NOTE_LINE_NUMBER (insn);
17656 add_compiler_branch_island (labelname, funname, line_number);
17659 labelname = get_prev_label (funname);
17661 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17662 instruction will reach 'foo', otherwise link as 'bl L42'".
17663 "L42" should be a 'branch island', that will do a far jump to
17664 'foo'. Branch islands are generated in
17665 macho_branch_islands(). */
17666 sprintf (buf, "jbsr %%z%d,%.246s",
17667 dest_operand_number, IDENTIFIER_POINTER (labelname));
17670 sprintf (buf, "bl %%z%d", dest_operand_number);
17674 /* Generate PIC and indirect symbol stubs. */
17677 machopic_output_stub (FILE *file, const char *symb, const char *stub)
17679 unsigned int length;
17680 char *symbol_name, *lazy_ptr_name;
17681 char *local_label_0;
17682 static int label = 0;
17684 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17685 symb = (*targetm.strip_name_encoding) (symb);
17688 length = strlen (symb);
17689 symbol_name = alloca (length + 32);
17690 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17692 lazy_ptr_name = alloca (length + 32);
17693 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17696 machopic_picsymbol_stub1_section ();
17698 machopic_symbol_stub1_section ();
17702 fprintf (file, "\t.align 5\n");
17704 fprintf (file, "%s:\n", stub);
17705 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17708 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
17709 sprintf (local_label_0, "\"L%011d$spb\"", label);
17711 fprintf (file, "\tmflr r0\n");
17712 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17713 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17714 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17715 lazy_ptr_name, local_label_0);
17716 fprintf (file, "\tmtlr r0\n");
17717 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17718 (TARGET_64BIT ? "ldu" : "lwzu"),
17719 lazy_ptr_name, local_label_0);
17720 fprintf (file, "\tmtctr r12\n");
17721 fprintf (file, "\tbctr\n");
17725 fprintf (file, "\t.align 4\n");
17727 fprintf (file, "%s:\n", stub);
17728 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17730 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
17731 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17732 (TARGET_64BIT ? "ldu" : "lwzu"),
17734 fprintf (file, "\tmtctr r12\n");
17735 fprintf (file, "\tbctr\n");
17738 machopic_lazy_symbol_ptr_section ();
17739 fprintf (file, "%s:\n", lazy_ptr_name);
17740 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17741 fprintf (file, "%sdyld_stub_binding_helper\n",
17742 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
17745 /* Legitimize PIC addresses. If the address is already
17746 position-independent, we return ORIG. Newly generated
17747 position-independent addresses go into a reg. This is REG if non
17748 zero, otherwise we allocate register(s) as necessary. */
17750 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
17753 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
17758 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17759 reg = gen_reg_rtx (Pmode);
17761 if (GET_CODE (orig) == CONST)
17765 if (GET_CODE (XEXP (orig, 0)) == PLUS
17766 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17769 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
17771 /* Use a different reg for the intermediate value, as
17772 it will be marked UNCHANGING. */
17773 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17774 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17777 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17780 if (GET_CODE (offset) == CONST_INT)
17782 if (SMALL_INT (offset))
17783 return plus_constant (base, INTVAL (offset));
17784 else if (! reload_in_progress && ! reload_completed)
17785 offset = force_reg (Pmode, offset);
17788 rtx mem = force_const_mem (Pmode, orig);
17789 return machopic_legitimize_pic_address (mem, Pmode, reg);
17792 return gen_rtx_PLUS (Pmode, base, offset);
17795 /* Fall back on generic machopic code. */
17796 return machopic_legitimize_pic_address (orig, mode, reg);
17799 /* This is just a placeholder to make linking work without having to
17800 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17801 ever needed for Darwin (not too likely!) this would have to get a
17802 real definition. */
17809 /* Output a .machine directive for the Darwin assembler, and call
17810 the generic start_file routine. */
17813 rs6000_darwin_file_start (void)
17815 static const struct
17821 { "ppc64", "ppc64", MASK_64BIT },
17822 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17823 { "power4", "ppc970", 0 },
17824 { "G5", "ppc970", 0 },
17825 { "7450", "ppc7450", 0 },
17826 { "7400", "ppc7400", MASK_ALTIVEC },
17827 { "G4", "ppc7400", 0 },
17828 { "750", "ppc750", 0 },
17829 { "740", "ppc750", 0 },
17830 { "G3", "ppc750", 0 },
17831 { "604e", "ppc604e", 0 },
17832 { "604", "ppc604", 0 },
17833 { "603e", "ppc603", 0 },
17834 { "603", "ppc603", 0 },
17835 { "601", "ppc601", 0 },
17836 { NULL, "ppc", 0 } };
17837 const char *cpu_id = "";
17840 rs6000_file_start ();
17842 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17843 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17844 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17845 && rs6000_select[i].string[0] != '\0')
17846 cpu_id = rs6000_select[i].string;
17848 /* Look through the mapping array. Pick the first name that either
17849 matches the argument, has a bit set in IF_SET that is also set
17850 in the target flags, or has a NULL name. */
17853 while (mapping[i].arg != NULL
17854 && strcmp (mapping[i].arg, cpu_id) != 0
17855 && (mapping[i].if_set & target_flags) == 0)
17858 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17861 #endif /* TARGET_MACHO */
17864 static unsigned int
17865 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17867 return default_section_type_flags_1 (decl, name, reloc,
17868 flag_pic || DEFAULT_ABI == ABI_AIX);
17871 /* Record an element in the table of global constructors. SYMBOL is
17872 a SYMBOL_REF of the function to be called; PRIORITY is a number
17873 between 0 and MAX_INIT_PRIORITY.
17875 This differs from default_named_section_asm_out_constructor in
17876 that we have special handling for -mrelocatable. */
17879 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17881 const char *section = ".ctors";
17884 if (priority != DEFAULT_INIT_PRIORITY)
17886 sprintf (buf, ".ctors.%.5u",
17887 /* Invert the numbering so the linker puts us in the proper
17888 order; constructors are run from right to left, and the
17889 linker sorts in increasing order. */
17890 MAX_INIT_PRIORITY - priority);
17894 named_section_flags (section, SECTION_WRITE);
17895 assemble_align (POINTER_SIZE);
17897 if (TARGET_RELOCATABLE)
17899 fputs ("\t.long (", asm_out_file);
17900 output_addr_const (asm_out_file, symbol);
17901 fputs (")@fixup\n", asm_out_file);
17904 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17908 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17910 const char *section = ".dtors";
17913 if (priority != DEFAULT_INIT_PRIORITY)
17915 sprintf (buf, ".dtors.%.5u",
17916 /* Invert the numbering so the linker puts us in the proper
17917 order; constructors are run from right to left, and the
17918 linker sorts in increasing order. */
17919 MAX_INIT_PRIORITY - priority);
17923 named_section_flags (section, SECTION_WRITE);
17924 assemble_align (POINTER_SIZE);
17926 if (TARGET_RELOCATABLE)
17928 fputs ("\t.long (", asm_out_file);
17929 output_addr_const (asm_out_file, symbol);
17930 fputs (")@fixup\n", asm_out_file);
17933 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17937 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17941 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17942 ASM_OUTPUT_LABEL (file, name);
17943 fputs (DOUBLE_INT_ASM_OP, file);
17944 rs6000_output_function_entry (file, name);
17945 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17948 fputs ("\t.size\t", file);
17949 assemble_name (file, name);
17950 fputs (",24\n\t.type\t.", file);
17951 assemble_name (file, name);
17952 fputs (",@function\n", file);
17953 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17955 fputs ("\t.globl\t.", file);
17956 assemble_name (file, name);
17961 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17962 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17963 rs6000_output_function_entry (file, name);
17964 fputs (":\n", file);
17968 if (TARGET_RELOCATABLE
17969 && !TARGET_SECURE_PLT
17970 && (get_pool_size () != 0 || current_function_profile)
17975 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17977 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17978 fprintf (file, "\t.long ");
17979 assemble_name (file, buf);
17981 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17982 assemble_name (file, buf);
17986 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17987 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17989 if (DEFAULT_ABI == ABI_AIX)
17991 const char *desc_name, *orig_name;
17993 orig_name = (*targetm.strip_name_encoding) (name);
17994 desc_name = orig_name;
17995 while (*desc_name == '.')
17998 if (TREE_PUBLIC (decl))
17999 fprintf (file, "\t.globl %s\n", desc_name);
18001 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18002 fprintf (file, "%s:\n", desc_name);
18003 fprintf (file, "\t.long %s\n", orig_name);
18004 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18005 if (DEFAULT_ABI == ABI_AIX)
18006 fputs ("\t.long 0\n", file);
18007 fprintf (file, "\t.previous\n");
18009 ASM_OUTPUT_LABEL (file, name);
18013 rs6000_elf_end_indicate_exec_stack (void)
18016 file_end_indicate_exec_stack ();
18022 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18024 fputs (GLOBAL_ASM_OP, stream);
18025 RS6000_OUTPUT_BASENAME (stream, name);
18026 putc ('\n', stream);
18030 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18031 tree decl ATTRIBUTE_UNUSED)
18034 static const char * const suffix[3] = { "PR", "RO", "RW" };
18036 if (flags & SECTION_CODE)
18038 else if (flags & SECTION_WRITE)
18043 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18044 (flags & SECTION_CODE) ? "." : "",
18045 name, suffix[smclass], flags & SECTION_ENTSIZE);
18049 rs6000_xcoff_select_section (tree decl, int reloc,
18050 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18052 if (decl_readonly_section_1 (decl, reloc, 1))
18054 if (TREE_PUBLIC (decl))
18055 read_only_data_section ();
18057 read_only_private_data_section ();
18061 if (TREE_PUBLIC (decl))
18064 private_data_section ();
18069 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18073 /* Use select_section for private and uninitialized data. */
18074 if (!TREE_PUBLIC (decl)
18075 || DECL_COMMON (decl)
18076 || DECL_INITIAL (decl) == NULL_TREE
18077 || DECL_INITIAL (decl) == error_mark_node
18078 || (flag_zero_initialized_in_bss
18079 && initializer_zerop (DECL_INITIAL (decl))))
18082 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18083 name = (*targetm.strip_name_encoding) (name);
18084 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18087 /* Select section for constant in constant pool.
18089 On RS/6000, all constants are in the private read-only data area.
18090 However, if this is being placed in the TOC it must be output as a
18094 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18095 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18097 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18100 read_only_private_data_section ();
18103 /* Remove any trailing [DS] or the like from the symbol name. */
18105 static const char *
18106 rs6000_xcoff_strip_name_encoding (const char *name)
18111 len = strlen (name);
18112 if (name[len - 1] == ']')
18113 return ggc_alloc_string (name, len - 4);
18118 /* Section attributes. AIX is always PIC. */
18120 static unsigned int
18121 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18123 unsigned int align;
18124 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18126 /* Align to at least UNIT size. */
18127 if (flags & SECTION_CODE)
18128 align = MIN_UNITS_PER_WORD;
18130 /* Increase alignment of large objects if not already stricter. */
18131 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18132 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18133 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18135 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18138 /* Output at beginning of assembler file.
18140 Initialize the section names for the RS/6000 at this point.
18142 Specify filename, including full path, to assembler.
18144 We want to go into the TOC section so at least one .toc will be emitted.
18145 Also, in order to output proper .bs/.es pairs, we need at least one static
18146 [RW] section emitted.
18148 Finally, declare mcount when profiling to make the assembler happy. */
18151 rs6000_xcoff_file_start (void)
18153 rs6000_gen_section_name (&xcoff_bss_section_name,
18154 main_input_filename, ".bss_");
18155 rs6000_gen_section_name (&xcoff_private_data_section_name,
18156 main_input_filename, ".rw_");
18157 rs6000_gen_section_name (&xcoff_read_only_section_name,
18158 main_input_filename, ".ro_");
18160 fputs ("\t.file\t", asm_out_file);
18161 output_quoted_string (asm_out_file, main_input_filename);
18162 fputc ('\n', asm_out_file);
18163 if (write_symbols != NO_DEBUG)
18164 private_data_section ();
18167 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18168 rs6000_file_start ();
18171 /* Output at end of assembler file.
18172 On the RS/6000, referencing data should automatically pull in text. */
18175 rs6000_xcoff_file_end (void)
18178 fputs ("_section_.text:\n", asm_out_file);
18180 fputs (TARGET_32BIT
18181 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18184 #endif /* TARGET_XCOFF */
18186 /* Compute a (partial) cost for rtx X. Return true if the complete
18187 cost has been computed, and false if subexpressions should be
18188 scanned. In either case, *TOTAL contains the cost result. */
18191 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18193 enum machine_mode mode = GET_MODE (x);
18197 /* On the RS/6000, if it is valid in the insn, it is free. */
18199 if (((outer_code == SET
18200 || outer_code == PLUS
18201 || outer_code == MINUS)
18202 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18203 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
18204 || (outer_code == AND
18205 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18206 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18207 mode == SImode ? 'L' : 'J'))
18208 || mask_operand (x, VOIDmode)))
18209 || ((outer_code == IOR || outer_code == XOR)
18210 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18211 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18212 mode == SImode ? 'L' : 'J'))))
18213 || outer_code == ASHIFT
18214 || outer_code == ASHIFTRT
18215 || outer_code == LSHIFTRT
18216 || outer_code == ROTATE
18217 || outer_code == ROTATERT
18218 || outer_code == ZERO_EXTRACT
18219 || (outer_code == MULT
18220 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18221 || ((outer_code == DIV || outer_code == UDIV
18222 || outer_code == MOD || outer_code == UMOD)
18223 && exact_log2 (INTVAL (x)) >= 0)
18224 || (outer_code == COMPARE
18225 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18226 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
18227 || (outer_code == EQ
18228 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18229 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18230 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18231 mode == SImode ? 'L' : 'J'))))
18232 || (outer_code == GTU
18233 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18234 || (outer_code == LTU
18235 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
18240 else if ((outer_code == PLUS
18241 && reg_or_add_cint_operand (x, VOIDmode))
18242 || (outer_code == MINUS
18243 && reg_or_sub_cint_operand (x, VOIDmode))
18244 || ((outer_code == SET
18245 || outer_code == IOR
18246 || outer_code == XOR)
18248 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18250 *total = COSTS_N_INSNS (1);
18257 && ((outer_code == AND
18258 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18259 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
18260 || mask_operand (x, DImode)))
18261 || ((outer_code == IOR || outer_code == XOR)
18262 && CONST_DOUBLE_HIGH (x) == 0
18263 && (CONST_DOUBLE_LOW (x)
18264 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
18269 else if (mode == DImode
18270 && (outer_code == SET
18271 || outer_code == IOR
18272 || outer_code == XOR)
18273 && CONST_DOUBLE_HIGH (x) == 0)
18275 *total = COSTS_N_INSNS (1);
18284 /* When optimizing for size, MEM should be slightly more expensive
18285 than generating address, e.g., (plus (reg) (const)).
18286 L1 cache latency is about two instructions. */
18287 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18295 if (mode == DFmode)
18297 if (GET_CODE (XEXP (x, 0)) == MULT)
18299 /* FNMA accounted in outer NEG. */
18300 if (outer_code == NEG)
18301 *total = rs6000_cost->dmul - rs6000_cost->fp;
18303 *total = rs6000_cost->dmul;
18306 *total = rs6000_cost->fp;
18308 else if (mode == SFmode)
18310 /* FNMA accounted in outer NEG. */
18311 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18314 *total = rs6000_cost->fp;
18316 else if (GET_CODE (XEXP (x, 0)) == MULT)
18318 /* The rs6000 doesn't have shift-and-add instructions. */
18319 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
18320 *total += COSTS_N_INSNS (1);
18323 *total = COSTS_N_INSNS (1);
18327 if (mode == DFmode)
18329 if (GET_CODE (XEXP (x, 0)) == MULT)
18331 /* FNMA accounted in outer NEG. */
18332 if (outer_code == NEG)
18335 *total = rs6000_cost->dmul;
18338 *total = rs6000_cost->fp;
18340 else if (mode == SFmode)
18342 /* FNMA accounted in outer NEG. */
18343 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18346 *total = rs6000_cost->fp;
18348 else if (GET_CODE (XEXP (x, 0)) == MULT)
18350 /* The rs6000 doesn't have shift-and-sub instructions. */
18351 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
18352 *total += COSTS_N_INSNS (1);
18355 *total = COSTS_N_INSNS (1);
18359 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18360 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
18362 if (INTVAL (XEXP (x, 1)) >= -256
18363 && INTVAL (XEXP (x, 1)) <= 255)
18364 *total = rs6000_cost->mulsi_const9;
18366 *total = rs6000_cost->mulsi_const;
18368 /* FMA accounted in outer PLUS/MINUS. */
18369 else if ((mode == DFmode || mode == SFmode)
18370 && (outer_code == PLUS || outer_code == MINUS))
18372 else if (mode == DFmode)
18373 *total = rs6000_cost->dmul;
18374 else if (mode == SFmode)
18375 *total = rs6000_cost->fp;
18376 else if (mode == DImode)
18377 *total = rs6000_cost->muldi;
18379 *total = rs6000_cost->mulsi;
18384 if (FLOAT_MODE_P (mode))
18386 *total = mode == DFmode ? rs6000_cost->ddiv
18387 : rs6000_cost->sdiv;
18394 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18395 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18397 if (code == DIV || code == MOD)
18399 *total = COSTS_N_INSNS (2);
18402 *total = COSTS_N_INSNS (1);
18406 if (GET_MODE (XEXP (x, 1)) == DImode)
18407 *total = rs6000_cost->divdi;
18409 *total = rs6000_cost->divsi;
18411 /* Add in shift and subtract for MOD. */
18412 if (code == MOD || code == UMOD)
18413 *total += COSTS_N_INSNS (2);
18417 *total = COSTS_N_INSNS (4);
18421 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18432 *total = COSTS_N_INSNS (1);
18440 /* Handle mul_highpart. */
18441 if (outer_code == TRUNCATE
18442 && GET_CODE (XEXP (x, 0)) == MULT)
18444 if (mode == DImode)
18445 *total = rs6000_cost->muldi;
18447 *total = rs6000_cost->mulsi;
18450 else if (outer_code == AND)
18453 *total = COSTS_N_INSNS (1);
18458 if (GET_CODE (XEXP (x, 0)) == MEM)
18461 *total = COSTS_N_INSNS (1);
18467 if (!FLOAT_MODE_P (mode))
18469 *total = COSTS_N_INSNS (1);
18475 case UNSIGNED_FLOAT:
18478 case FLOAT_TRUNCATE:
18479 *total = rs6000_cost->fp;
18483 if (mode == DFmode)
18486 *total = rs6000_cost->fp;
18490 switch (XINT (x, 1))
18493 *total = rs6000_cost->fp;
18505 *total = COSTS_N_INSNS (1);
18508 else if (FLOAT_MODE_P (mode)
18509 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18511 *total = rs6000_cost->fp;
18519 /* Carry bit requires mode == Pmode.
18520 NEG or PLUS already counted so only add one. */
18522 && (outer_code == NEG || outer_code == PLUS))
18524 *total = COSTS_N_INSNS (1);
18527 if (outer_code == SET)
18529 if (XEXP (x, 1) == const0_rtx)
18531 *total = COSTS_N_INSNS (2);
18534 else if (mode == Pmode)
18536 *total = COSTS_N_INSNS (3);
18545 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
18547 *total = COSTS_N_INSNS (2);
18551 if (outer_code == COMPARE)
18565 /* A C expression returning the cost of moving data from a register of class
18566 CLASS1 to one of CLASS2. */
18569 rs6000_register_move_cost (enum machine_mode mode,
18570 enum reg_class from, enum reg_class to)
18572 /* Moves from/to GENERAL_REGS. */
18573 if (reg_classes_intersect_p (to, GENERAL_REGS)
18574 || reg_classes_intersect_p (from, GENERAL_REGS))
18576 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18579 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18580 return (rs6000_memory_move_cost (mode, from, 0)
18581 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18583 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18585 else if (from == CR_REGS)
18589 /* A move will cost one instruction per GPR moved. */
18590 return 2 * hard_regno_nregs[0][mode];
18593 /* Moving between two similar registers is just one instruction. */
18594 else if (reg_classes_intersect_p (to, from))
18595 return mode == TFmode ? 4 : 2;
18597 /* Everything else has to go through GENERAL_REGS. */
18599 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
18600 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18603 /* A C expressions returning the cost of moving data of MODE from a register to
18607 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
18608 int in ATTRIBUTE_UNUSED)
18610 if (reg_classes_intersect_p (class, GENERAL_REGS))
18611 return 4 * hard_regno_nregs[0][mode];
18612 else if (reg_classes_intersect_p (class, FLOAT_REGS))
18613 return 4 * hard_regno_nregs[32][mode];
18614 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
18615 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
18617 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18620 /* Newton-Raphson approximation of single-precision floating point divide n/d.
18621 Assumes no trapping math and finite arguments. */
18624 rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
18626 rtx x0, e0, e1, y1, u0, v0, one;
18628 x0 = gen_reg_rtx (SFmode);
18629 e0 = gen_reg_rtx (SFmode);
18630 e1 = gen_reg_rtx (SFmode);
18631 y1 = gen_reg_rtx (SFmode);
18632 u0 = gen_reg_rtx (SFmode);
18633 v0 = gen_reg_rtx (SFmode);
18634 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
18636 /* x0 = 1./d estimate */
18637 emit_insn (gen_rtx_SET (VOIDmode, x0,
18638 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
18640 /* e0 = 1. - d * x0 */
18641 emit_insn (gen_rtx_SET (VOIDmode, e0,
18642 gen_rtx_MINUS (SFmode, one,
18643 gen_rtx_MULT (SFmode, d, x0))));
18644 /* e1 = e0 + e0 * e0 */
18645 emit_insn (gen_rtx_SET (VOIDmode, e1,
18646 gen_rtx_PLUS (SFmode,
18647 gen_rtx_MULT (SFmode, e0, e0), e0)));
18648 /* y1 = x0 + e1 * x0 */
18649 emit_insn (gen_rtx_SET (VOIDmode, y1,
18650 gen_rtx_PLUS (SFmode,
18651 gen_rtx_MULT (SFmode, e1, x0), x0)));
18653 emit_insn (gen_rtx_SET (VOIDmode, u0,
18654 gen_rtx_MULT (SFmode, n, y1)));
18655 /* v0 = n - d * u0 */
18656 emit_insn (gen_rtx_SET (VOIDmode, v0,
18657 gen_rtx_MINUS (SFmode, n,
18658 gen_rtx_MULT (SFmode, d, u0))));
18659 /* res = u0 + v0 * y1 */
18660 emit_insn (gen_rtx_SET (VOIDmode, res,
18661 gen_rtx_PLUS (SFmode,
18662 gen_rtx_MULT (SFmode, v0, y1), u0)));
18665 /* Newton-Raphson approximation of double-precision floating point divide n/d.
18666 Assumes no trapping math and finite arguments. */
18669 rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18671 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18673 x0 = gen_reg_rtx (DFmode);
18674 e0 = gen_reg_rtx (DFmode);
18675 e1 = gen_reg_rtx (DFmode);
18676 e2 = gen_reg_rtx (DFmode);
18677 y1 = gen_reg_rtx (DFmode);
18678 y2 = gen_reg_rtx (DFmode);
18679 y3 = gen_reg_rtx (DFmode);
18680 u0 = gen_reg_rtx (DFmode);
18681 v0 = gen_reg_rtx (DFmode);
18682 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18684 /* x0 = 1./d estimate */
18685 emit_insn (gen_rtx_SET (VOIDmode, x0,
18686 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18688 /* e0 = 1. - d * x0 */
18689 emit_insn (gen_rtx_SET (VOIDmode, e0,
18690 gen_rtx_MINUS (DFmode, one,
18691 gen_rtx_MULT (SFmode, d, x0))));
18692 /* y1 = x0 + e0 * x0 */
18693 emit_insn (gen_rtx_SET (VOIDmode, y1,
18694 gen_rtx_PLUS (DFmode,
18695 gen_rtx_MULT (DFmode, e0, x0), x0)));
18697 emit_insn (gen_rtx_SET (VOIDmode, e1,
18698 gen_rtx_MULT (DFmode, e0, e0)));
18699 /* y2 = y1 + e1 * y1 */
18700 emit_insn (gen_rtx_SET (VOIDmode, y2,
18701 gen_rtx_PLUS (DFmode,
18702 gen_rtx_MULT (DFmode, e1, y1), y1)));
18704 emit_insn (gen_rtx_SET (VOIDmode, e2,
18705 gen_rtx_MULT (DFmode, e1, e1)));
18706 /* y3 = y2 + e2 * y2 */
18707 emit_insn (gen_rtx_SET (VOIDmode, y3,
18708 gen_rtx_PLUS (DFmode,
18709 gen_rtx_MULT (DFmode, e2, y2), y2)));
18711 emit_insn (gen_rtx_SET (VOIDmode, u0,
18712 gen_rtx_MULT (DFmode, n, y3)));
18713 /* v0 = n - d * u0 */
18714 emit_insn (gen_rtx_SET (VOIDmode, v0,
18715 gen_rtx_MINUS (DFmode, n,
18716 gen_rtx_MULT (DFmode, d, u0))));
18717 /* res = u0 + v0 * y3 */
18718 emit_insn (gen_rtx_SET (VOIDmode, res,
18719 gen_rtx_PLUS (DFmode,
18720 gen_rtx_MULT (DFmode, v0, y3), u0)));
18723 /* Return an RTX representing where to find the function value of a
18724 function returning MODE. */
18726 rs6000_complex_function_value (enum machine_mode mode)
18728 unsigned int regno;
18730 enum machine_mode inner = GET_MODE_INNER (mode);
18731 unsigned int inner_bytes = GET_MODE_SIZE (inner);
18733 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18734 regno = FP_ARG_RETURN;
18737 regno = GP_ARG_RETURN;
18739 /* 32-bit is OK since it'll go in r3/r4. */
18740 if (TARGET_32BIT && inner_bytes >= 4)
18741 return gen_rtx_REG (mode, regno);
18744 if (inner_bytes >= 8)
18745 return gen_rtx_REG (mode, regno);
18747 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18749 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
18750 GEN_INT (inner_bytes));
18751 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18754 /* Define how to find the value returned by a function.
18755 VALTYPE is the data type of the value (as a tree).
18756 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18757 otherwise, FUNC is 0.
18759 On the SPE, both FPs and vectors are returned in r3.
18761 On RS/6000 an integer value is in r3 and a floating-point value is in
18762 fp1, unless -msoft-float. */
18765 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18767 enum machine_mode mode;
18768 unsigned int regno;
18770 /* Special handling for structs in darwin64. */
18771 if (rs6000_darwin64_abi
18772 && TYPE_MODE (valtype) == BLKmode
18773 && TREE_CODE (valtype) == RECORD_TYPE
18774 && int_size_in_bytes (valtype) > 0)
18776 CUMULATIVE_ARGS valcum;
18780 valcum.fregno = FP_ARG_MIN_REG;
18781 valcum.vregno = ALTIVEC_ARG_MIN_REG;
18782 /* Do a trial code generation as if this were going to be passed as
18783 an argument; if any part goes in memory, we return NULL. */
18784 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
18787 /* Otherwise fall through to standard ABI rules. */
18790 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18792 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18793 return gen_rtx_PARALLEL (DImode,
18795 gen_rtx_EXPR_LIST (VOIDmode,
18796 gen_rtx_REG (SImode, GP_ARG_RETURN),
18798 gen_rtx_EXPR_LIST (VOIDmode,
18799 gen_rtx_REG (SImode,
18800 GP_ARG_RETURN + 1),
18804 if ((INTEGRAL_TYPE_P (valtype)
18805 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18806 || POINTER_TYPE_P (valtype))
18807 mode = TARGET_32BIT ? SImode : DImode;
18809 mode = TYPE_MODE (valtype);
18811 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
18812 regno = FP_ARG_RETURN;
18813 else if (TREE_CODE (valtype) == COMPLEX_TYPE
18814 && targetm.calls.split_complex_arg)
18815 return rs6000_complex_function_value (mode);
18816 else if (TREE_CODE (valtype) == VECTOR_TYPE
18817 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
18818 && ALTIVEC_VECTOR_MODE (mode))
18819 regno = ALTIVEC_ARG_RETURN;
18820 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18821 && (mode == DFmode || mode == DCmode))
18822 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18824 regno = GP_ARG_RETURN;
18826 return gen_rtx_REG (mode, regno);
18829 /* Define how to find the value returned by a library function
18830 assuming the value has mode MODE. */
18832 rs6000_libcall_value (enum machine_mode mode)
18834 unsigned int regno;
18836 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18838 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18839 return gen_rtx_PARALLEL (DImode,
18841 gen_rtx_EXPR_LIST (VOIDmode,
18842 gen_rtx_REG (SImode, GP_ARG_RETURN),
18844 gen_rtx_EXPR_LIST (VOIDmode,
18845 gen_rtx_REG (SImode,
18846 GP_ARG_RETURN + 1),
18850 if (GET_MODE_CLASS (mode) == MODE_FLOAT
18851 && TARGET_HARD_FLOAT && TARGET_FPRS)
18852 regno = FP_ARG_RETURN;
18853 else if (ALTIVEC_VECTOR_MODE (mode)
18854 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
18855 regno = ALTIVEC_ARG_RETURN;
18856 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
18857 return rs6000_complex_function_value (mode);
18858 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18859 && (mode == DFmode || mode == DCmode))
18860 return spe_build_register_parallel (mode, GP_ARG_RETURN);
18862 regno = GP_ARG_RETURN;
18864 return gen_rtx_REG (mode, regno);
18867 /* Define the offset between two registers, FROM to be eliminated and its
18868 replacement TO, at the start of a routine. */
18870 rs6000_initial_elimination_offset (int from, int to)
18872 rs6000_stack_t *info = rs6000_stack_info ();
18873 HOST_WIDE_INT offset;
18875 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18876 offset = info->push_p ? 0 : -info->total_size;
18877 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18879 offset = info->push_p ? 0 : -info->total_size;
18880 if (FRAME_GROWS_DOWNWARD)
18881 offset += info->fixed_size + info->vars_size + info->parm_size;
18883 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18884 offset = FRAME_GROWS_DOWNWARD
18885 ? info->fixed_size + info->vars_size + info->parm_size
18887 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18888 offset = info->total_size;
18889 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18890 offset = info->push_p ? info->total_size : 0;
18891 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18894 gcc_unreachable ();
18899 /* Return true if TYPE is a SPE or AltiVec opaque type. */
18902 rs6000_is_opaque_type (tree type)
18904 return (type == opaque_V2SI_type_node
18905 || type == opaque_V2SF_type_node
18906 || type == opaque_p_V2SI_type_node
18907 || type == opaque_V4SI_type_node);
18911 rs6000_dwarf_register_span (rtx reg)
18916 && (SPE_VECTOR_MODE (GET_MODE (reg))
18917 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18922 regno = REGNO (reg);
18924 /* The duality of the SPE register size wreaks all kinds of havoc.
18925 This is a way of distinguishing r0 in 32-bits from r0 in
18928 gen_rtx_PARALLEL (VOIDmode,
18931 gen_rtx_REG (SImode, regno + 1200),
18932 gen_rtx_REG (SImode, regno))
18934 gen_rtx_REG (SImode, regno),
18935 gen_rtx_REG (SImode, regno + 1200)));
18938 /* Map internal gcc register numbers to DWARF2 register numbers. */
18941 rs6000_dbx_register_number (unsigned int regno)
18943 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18945 if (regno == MQ_REGNO)
18947 if (regno == LINK_REGISTER_REGNUM)
18949 if (regno == COUNT_REGISTER_REGNUM)
18951 if (CR_REGNO_P (regno))
18952 return regno - CR0_REGNO + 86;
18953 if (regno == XER_REGNO)
18955 if (ALTIVEC_REGNO_P (regno))
18956 return regno - FIRST_ALTIVEC_REGNO + 1124;
18957 if (regno == VRSAVE_REGNO)
18959 if (regno == VSCR_REGNO)
18961 if (regno == SPE_ACC_REGNO)
18963 if (regno == SPEFSCR_REGNO)
18965 /* SPE high reg number. We get these values of regno from
18966 rs6000_dwarf_register_span. */
18967 gcc_assert (regno >= 1200 && regno < 1232);
18971 /* target hook eh_return_filter_mode */
18972 static enum machine_mode
18973 rs6000_eh_return_filter_mode (void)
18975 return TARGET_32BIT ? SImode : word_mode;
18978 /* Target hook for vector_mode_supported_p. */
18980 rs6000_vector_mode_supported_p (enum machine_mode mode)
18983 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
18986 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
18993 /* Target hook for invalid_arg_for_unprototyped_fn. */
18994 static const char *
18995 invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
18997 return (!rs6000_darwin64_abi
18999 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19000 && (funcdecl == NULL_TREE
19001 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19002 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19003 ? N_("AltiVec argument passed to unprototyped function")
19007 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19008 setup by using __stack_chk_fail_local hidden function instead of
19009 calling __stack_chk_fail directly. Otherwise it is better to call
19010 __stack_chk_fail directly. */
19013 rs6000_stack_protect_fail (void)
19015 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19016 ? default_hidden_stack_protect_fail ()
19017 : default_external_stack_protect_fail ();
19020 #include "gt-rs6000.h"