1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
58 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61 #include "gstab.h" /* for N_SLINE */
64 #ifndef TARGET_NO_PROTOTYPE
65 #define TARGET_NO_PROTOTYPE 0
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 int world_save_p; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Always emit branch hint bits. */
128 static GTY(()) bool rs6000_always_hint;
130 /* Schedule instructions for group formation. */
131 static GTY(()) bool rs6000_sched_groups;
133 /* Support adjust_priority scheduler hook
134 and -mprioritize-restricted-insns= option. */
135 const char *rs6000_sched_restricted_insns_priority_str;
136 int rs6000_sched_restricted_insns_priority;
138 /* Support for -msched-costly-dep option. */
139 const char *rs6000_sched_costly_dep_str;
140 enum rs6000_dependence_cost rs6000_sched_costly_dep;
142 /* Support for -minsert-sched-nops option. */
143 const char *rs6000_sched_insert_nops_str;
144 enum rs6000_nop_insertion rs6000_sched_insert_nops;
146 /* Support targetm.vectorize.builtin_mask_for_load. */
147 static GTY(()) tree altivec_builtin_mask_for_load;
149 /* Size of long double */
150 const char *rs6000_long_double_size_string;
151 int rs6000_long_double_type_size;
153 /* Whether -mabi=altivec has appeared */
154 int rs6000_altivec_abi;
156 /* Whether VRSAVE instructions should be generated. */
157 int rs6000_altivec_vrsave;
159 /* String from -mvrsave= option. */
160 const char *rs6000_altivec_vrsave_string;
162 /* Nonzero if we want SPE ABI extensions. */
165 /* Whether isel instructions should be generated. */
168 /* Whether SPE simd instructions should be generated. */
171 /* Nonzero if floating point operations are done in the GPRs. */
172 int rs6000_float_gprs = 0;
174 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
175 int rs6000_darwin64_abi;
177 /* String from -mfloat-gprs=. */
178 const char *rs6000_float_gprs_string;
180 /* String from -misel=. */
181 const char *rs6000_isel_string;
183 /* String from -mspe=. */
184 const char *rs6000_spe_string;
186 /* Set to nonzero once AIX common-mode calls have been defined. */
187 static GTY(()) int common_mode_defined;
189 /* Save information from a "cmpxx" operation until the branch or scc is
191 rtx rs6000_compare_op0, rs6000_compare_op1;
192 int rs6000_compare_fp_p;
194 /* Label number of label created for -mrelocatable, to call to so we can
195 get the address of the GOT section */
196 int rs6000_pic_labelno;
199 /* Which abi to adhere to */
200 const char *rs6000_abi_name;
202 /* Semantics of the small data area */
203 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
205 /* Which small data model to use */
206 const char *rs6000_sdata_name = (char *)0;
208 /* Counter for labels which are to be placed in .fixup. */
209 int fixuplabelno = 0;
212 /* Bit size of immediate TLS offsets and string from which it is decoded. */
213 int rs6000_tls_size = 32;
214 const char *rs6000_tls_size_string;
216 /* ABI enumeration available for subtarget to use. */
217 enum rs6000_abi rs6000_current_abi;
219 /* ABI string from -mabi= option. */
220 const char *rs6000_abi_string;
222 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
226 const char *rs6000_debug_name;
227 int rs6000_debug_stack; /* debug stack applications */
228 int rs6000_debug_arg; /* debug argument handling */
230 /* Value is TRUE if register/mode pair is acceptable. */
231 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
233 /* Built in types. */
235 tree rs6000_builtin_types[RS6000_BTI_MAX];
236 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
238 int rs6000_warn_altivec_long = 1; /* On by default. */
239 const char *rs6000_warn_altivec_long_switch;
241 const char *rs6000_traceback_name;
243 traceback_default = 0,
249 /* Flag to say the TOC is initialized */
251 char toc_label_name[10];
253 /* Alias set for saves and restores from the rs6000 stack. */
254 static GTY(()) int rs6000_sr_alias_set;
256 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
257 The only place that looks at this is rs6000_set_default_type_attributes;
258 everywhere else should rely on the presence or absence of a longcall
259 attribute on the function declaration. Exception: init_cumulative_args
260 looks at it too, for libcalls. */
261 int rs6000_default_long_calls;
262 const char *rs6000_longcall_switch;
264 /* Control alignment for fields within structures. */
265 /* String from -malign-XXXXX. */
266 const char *rs6000_alignment_string;
267 int rs6000_alignment_flags;
269 struct builtin_description
271 /* mask is not const because we're going to alter it below. This
272 nonsense will go away when we rewrite the -march infrastructure
273 to give us more target flag bits. */
275 const enum insn_code icode;
276 const char *const name;
277 const enum rs6000_builtins code;
280 /* Target cpu costs. */
282 struct processor_costs {
283 const int mulsi; /* cost of SImode multiplication. */
284 const int mulsi_const; /* cost of SImode multiplication by constant. */
285 const int mulsi_const9; /* cost of SImode mult by short constant. */
286 const int muldi; /* cost of DImode multiplication. */
287 const int divsi; /* cost of SImode division. */
288 const int divdi; /* cost of DImode division. */
289 const int fp; /* cost of simple SFmode and DFmode insns. */
290 const int dmul; /* cost of DFmode multiplication (and fmadd). */
291 const int sdiv; /* cost of SFmode division (fdivs). */
292 const int ddiv; /* cost of DFmode division (fdiv). */
295 const struct processor_costs *rs6000_cost;
297 /* Processor costs (relative to an add) */
299 /* Instruction size costs on 32bit processors. */
301 struct processor_costs size32_cost = {
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
314 /* Instruction size costs on 64bit processors. */
316 struct processor_costs size64_cost = {
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
329 /* Instruction costs on RIOS1 processors. */
331 struct processor_costs rios1_cost = {
332 COSTS_N_INSNS (5), /* mulsi */
333 COSTS_N_INSNS (4), /* mulsi_const */
334 COSTS_N_INSNS (3), /* mulsi_const9 */
335 COSTS_N_INSNS (5), /* muldi */
336 COSTS_N_INSNS (19), /* divsi */
337 COSTS_N_INSNS (19), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (19), /* sdiv */
341 COSTS_N_INSNS (19), /* ddiv */
344 /* Instruction costs on RIOS2 processors. */
346 struct processor_costs rios2_cost = {
347 COSTS_N_INSNS (2), /* mulsi */
348 COSTS_N_INSNS (2), /* mulsi_const */
349 COSTS_N_INSNS (2), /* mulsi_const9 */
350 COSTS_N_INSNS (2), /* muldi */
351 COSTS_N_INSNS (13), /* divsi */
352 COSTS_N_INSNS (13), /* divdi */
353 COSTS_N_INSNS (2), /* fp */
354 COSTS_N_INSNS (2), /* dmul */
355 COSTS_N_INSNS (17), /* sdiv */
356 COSTS_N_INSNS (17), /* ddiv */
359 /* Instruction costs on RS64A processors. */
361 struct processor_costs rs64a_cost = {
362 COSTS_N_INSNS (20), /* mulsi */
363 COSTS_N_INSNS (12), /* mulsi_const */
364 COSTS_N_INSNS (8), /* mulsi_const9 */
365 COSTS_N_INSNS (34), /* muldi */
366 COSTS_N_INSNS (65), /* divsi */
367 COSTS_N_INSNS (67), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (4), /* dmul */
370 COSTS_N_INSNS (31), /* sdiv */
371 COSTS_N_INSNS (31), /* ddiv */
374 /* Instruction costs on MPCCORE processors. */
376 struct processor_costs mpccore_cost = {
377 COSTS_N_INSNS (2), /* mulsi */
378 COSTS_N_INSNS (2), /* mulsi_const */
379 COSTS_N_INSNS (2), /* mulsi_const9 */
380 COSTS_N_INSNS (2), /* muldi */
381 COSTS_N_INSNS (6), /* divsi */
382 COSTS_N_INSNS (6), /* divdi */
383 COSTS_N_INSNS (4), /* fp */
384 COSTS_N_INSNS (5), /* dmul */
385 COSTS_N_INSNS (10), /* sdiv */
386 COSTS_N_INSNS (17), /* ddiv */
389 /* Instruction costs on PPC403 processors. */
391 struct processor_costs ppc403_cost = {
392 COSTS_N_INSNS (4), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (4), /* mulsi_const9 */
395 COSTS_N_INSNS (4), /* muldi */
396 COSTS_N_INSNS (33), /* divsi */
397 COSTS_N_INSNS (33), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
404 /* Instruction costs on PPC405 processors. */
406 struct processor_costs ppc405_cost = {
407 COSTS_N_INSNS (5), /* mulsi */
408 COSTS_N_INSNS (4), /* mulsi_const */
409 COSTS_N_INSNS (3), /* mulsi_const9 */
410 COSTS_N_INSNS (5), /* muldi */
411 COSTS_N_INSNS (35), /* divsi */
412 COSTS_N_INSNS (35), /* divdi */
413 COSTS_N_INSNS (11), /* fp */
414 COSTS_N_INSNS (11), /* dmul */
415 COSTS_N_INSNS (11), /* sdiv */
416 COSTS_N_INSNS (11), /* ddiv */
419 /* Instruction costs on PPC440 processors. */
421 struct processor_costs ppc440_cost = {
422 COSTS_N_INSNS (3), /* mulsi */
423 COSTS_N_INSNS (2), /* mulsi_const */
424 COSTS_N_INSNS (2), /* mulsi_const9 */
425 COSTS_N_INSNS (3), /* muldi */
426 COSTS_N_INSNS (34), /* divsi */
427 COSTS_N_INSNS (34), /* divdi */
428 COSTS_N_INSNS (5), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (19), /* sdiv */
431 COSTS_N_INSNS (33), /* ddiv */
434 /* Instruction costs on PPC601 processors. */
436 struct processor_costs ppc601_cost = {
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (5), /* mulsi_const */
439 COSTS_N_INSNS (5), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (36), /* divsi */
442 COSTS_N_INSNS (36), /* divdi */
443 COSTS_N_INSNS (4), /* fp */
444 COSTS_N_INSNS (5), /* dmul */
445 COSTS_N_INSNS (17), /* sdiv */
446 COSTS_N_INSNS (31), /* ddiv */
449 /* Instruction costs on PPC603 processors. */
451 struct processor_costs ppc603_cost = {
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (3), /* mulsi_const */
454 COSTS_N_INSNS (2), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (37), /* divsi */
457 COSTS_N_INSNS (37), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (4), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (33), /* ddiv */
464 /* Instruction costs on PPC604 processors. */
466 struct processor_costs ppc604_cost = {
467 COSTS_N_INSNS (4), /* mulsi */
468 COSTS_N_INSNS (4), /* mulsi_const */
469 COSTS_N_INSNS (4), /* mulsi_const9 */
470 COSTS_N_INSNS (4), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
479 /* Instruction costs on PPC604e processors. */
481 struct processor_costs ppc604e_cost = {
482 COSTS_N_INSNS (2), /* mulsi */
483 COSTS_N_INSNS (2), /* mulsi_const */
484 COSTS_N_INSNS (2), /* mulsi_const9 */
485 COSTS_N_INSNS (2), /* muldi */
486 COSTS_N_INSNS (20), /* divsi */
487 COSTS_N_INSNS (20), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
494 /* Instruction costs on PPC620 processors. */
496 struct processor_costs ppc620_cost = {
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (18), /* sdiv */
506 COSTS_N_INSNS (32), /* ddiv */
509 /* Instruction costs on PPC630 processors. */
511 struct processor_costs ppc630_cost = {
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (4), /* mulsi_const */
514 COSTS_N_INSNS (3), /* mulsi_const9 */
515 COSTS_N_INSNS (7), /* muldi */
516 COSTS_N_INSNS (21), /* divsi */
517 COSTS_N_INSNS (37), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (21), /* ddiv */
524 /* Instruction costs on PPC750 and PPC7400 processors. */
526 struct processor_costs ppc750_cost = {
527 COSTS_N_INSNS (5), /* mulsi */
528 COSTS_N_INSNS (3), /* mulsi_const */
529 COSTS_N_INSNS (2), /* mulsi_const9 */
530 COSTS_N_INSNS (5), /* muldi */
531 COSTS_N_INSNS (17), /* divsi */
532 COSTS_N_INSNS (17), /* divdi */
533 COSTS_N_INSNS (3), /* fp */
534 COSTS_N_INSNS (3), /* dmul */
535 COSTS_N_INSNS (17), /* sdiv */
536 COSTS_N_INSNS (31), /* ddiv */
539 /* Instruction costs on PPC7450 processors. */
541 struct processor_costs ppc7450_cost = {
542 COSTS_N_INSNS (4), /* mulsi */
543 COSTS_N_INSNS (3), /* mulsi_const */
544 COSTS_N_INSNS (3), /* mulsi_const9 */
545 COSTS_N_INSNS (4), /* muldi */
546 COSTS_N_INSNS (23), /* divsi */
547 COSTS_N_INSNS (23), /* divdi */
548 COSTS_N_INSNS (5), /* fp */
549 COSTS_N_INSNS (5), /* dmul */
550 COSTS_N_INSNS (21), /* sdiv */
551 COSTS_N_INSNS (35), /* ddiv */
554 /* Instruction costs on PPC8540 processors. */
556 struct processor_costs ppc8540_cost = {
557 COSTS_N_INSNS (4), /* mulsi */
558 COSTS_N_INSNS (4), /* mulsi_const */
559 COSTS_N_INSNS (4), /* mulsi_const9 */
560 COSTS_N_INSNS (4), /* muldi */
561 COSTS_N_INSNS (19), /* divsi */
562 COSTS_N_INSNS (19), /* divdi */
563 COSTS_N_INSNS (4), /* fp */
564 COSTS_N_INSNS (4), /* dmul */
565 COSTS_N_INSNS (29), /* sdiv */
566 COSTS_N_INSNS (29), /* ddiv */
569 /* Instruction costs on POWER4 and POWER5 processors. */
571 struct processor_costs power4_cost = {
572 COSTS_N_INSNS (3), /* mulsi */
573 COSTS_N_INSNS (2), /* mulsi_const */
574 COSTS_N_INSNS (2), /* mulsi_const9 */
575 COSTS_N_INSNS (4), /* muldi */
576 COSTS_N_INSNS (18), /* divsi */
577 COSTS_N_INSNS (34), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (17), /* ddiv */
585 static bool rs6000_function_ok_for_sibcall (tree, tree);
586 static rtx rs6000_generate_compare (enum rtx_code);
587 static void rs6000_maybe_dead (rtx);
588 static void rs6000_emit_stack_tie (void);
589 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
590 static rtx spe_synthesize_frame_save (rtx);
591 static bool spe_func_has_64bit_regs_p (void);
592 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
594 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
595 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
596 static unsigned rs6000_hash_constant (rtx);
597 static unsigned toc_hash_function (const void *);
598 static int toc_hash_eq (const void *, const void *);
599 static int constant_pool_expr_1 (rtx, int *, int *);
600 static bool constant_pool_expr_p (rtx);
601 static bool legitimate_small_data_p (enum machine_mode, rtx);
602 static bool legitimate_indexed_address_p (rtx, int);
603 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
604 static struct machine_function * rs6000_init_machine_status (void);
605 static bool rs6000_assemble_integer (rtx, unsigned int, int);
606 #ifdef HAVE_GAS_HIDDEN
607 static void rs6000_assemble_visibility (tree, int);
609 static int rs6000_ra_ever_killed (void);
610 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
611 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
612 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
613 static const char *rs6000_mangle_fundamental_type (tree);
614 extern const struct attribute_spec rs6000_attribute_table[];
615 static void rs6000_set_default_type_attributes (tree);
616 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
617 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
618 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
620 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
621 static bool rs6000_return_in_memory (tree, tree);
622 static void rs6000_file_start (void);
624 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
625 static void rs6000_elf_asm_out_constructor (rtx, int);
626 static void rs6000_elf_asm_out_destructor (rtx, int);
627 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
628 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
629 static void rs6000_elf_unique_section (tree, int);
630 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
631 unsigned HOST_WIDE_INT);
632 static void rs6000_elf_encode_section_info (tree, rtx, int)
634 static bool rs6000_elf_in_small_data_p (tree);
637 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
638 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
639 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
640 static void rs6000_xcoff_unique_section (tree, int);
641 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
642 unsigned HOST_WIDE_INT);
643 static const char * rs6000_xcoff_strip_name_encoding (const char *);
644 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
645 static void rs6000_xcoff_file_start (void);
646 static void rs6000_xcoff_file_end (void);
649 static bool rs6000_binds_local_p (tree);
651 static int rs6000_variable_issue (FILE *, int, rtx, int);
652 static bool rs6000_rtx_costs (rtx, int, int, int *);
653 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
654 static bool is_microcoded_insn (rtx);
655 static int is_dispatch_slot_restricted (rtx);
656 static bool is_cracked_insn (rtx);
657 static bool is_branch_slot_insn (rtx);
658 static int rs6000_adjust_priority (rtx, int);
659 static int rs6000_issue_rate (void);
660 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
661 static rtx get_next_active_insn (rtx, rtx);
662 static bool insn_terminates_group_p (rtx , enum group_termination);
663 static bool is_costly_group (rtx *, rtx);
664 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
665 static int redefine_groups (FILE *, int, rtx, rtx);
666 static int pad_groups (FILE *, int, rtx, rtx);
667 static void rs6000_sched_finish (FILE *, int);
668 static int rs6000_use_sched_lookahead (void);
669 static tree rs6000_builtin_mask_for_load (void);
671 static void def_builtin (int, const char *, tree, int);
672 static void rs6000_init_builtins (void);
673 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
674 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
675 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
676 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
677 static void altivec_init_builtins (void);
678 static void rs6000_common_init_builtins (void);
679 static void rs6000_init_libfuncs (void);
681 static void enable_mask_for_builtins (struct builtin_description *, int,
682 enum rs6000_builtins,
683 enum rs6000_builtins);
684 static tree build_opaque_vector_type (tree, int);
685 static void spe_init_builtins (void);
686 static rtx spe_expand_builtin (tree, rtx, bool *);
687 static rtx spe_expand_stv_builtin (enum insn_code, tree);
688 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
689 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
690 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
691 static rs6000_stack_t *rs6000_stack_info (void);
692 static void debug_stack_info (rs6000_stack_t *);
694 static rtx altivec_expand_builtin (tree, rtx, bool *);
695 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
696 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
697 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
698 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
699 static rtx altivec_expand_predicate_builtin (enum insn_code,
700 const char *, tree, rtx);
701 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
702 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
703 static void rs6000_parse_abi_options (void);
704 static void rs6000_parse_alignment_option (void);
705 static void rs6000_parse_tls_size_option (void);
706 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
707 static void rs6000_parse_float_gprs_option (void);
708 static int first_altivec_reg_to_save (void);
709 static unsigned int compute_vrsave_mask (void);
710 static void compute_save_world_info (rs6000_stack_t *info_ptr);
711 static void is_altivec_return_reg (rtx, void *);
712 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
713 int easy_vector_constant (rtx, enum machine_mode);
714 static bool rs6000_is_opaque_type (tree);
715 static rtx rs6000_dwarf_register_span (rtx);
716 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
717 static rtx rs6000_tls_get_addr (void);
718 static rtx rs6000_got_sym (void);
719 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
720 static const char *rs6000_get_some_local_dynamic_name (void);
721 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
722 static rtx rs6000_complex_function_value (enum machine_mode);
723 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
724 enum machine_mode, tree);
725 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
727 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
728 tree, HOST_WIDE_INT);
729 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
732 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
735 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
736 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
737 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
738 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
739 enum machine_mode, tree,
741 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
743 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
745 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
747 static void macho_branch_islands (void);
748 static void add_compiler_branch_island (tree, tree, int);
749 static int no_previous_def (tree function_name);
750 static tree get_prev_label (tree function_name);
751 static void rs6000_darwin_file_start (void);
754 static tree rs6000_build_builtin_va_list (void);
755 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
756 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
757 static bool rs6000_vector_mode_supported_p (enum machine_mode);
758 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
760 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
762 static int get_vsel_insn (enum machine_mode);
763 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
766 const int INSN_NOT_AVAILABLE = -1;
767 static enum machine_mode rs6000_eh_return_filter_mode (void);
769 /* Hash table stuff for keeping track of TOC entries. */
771 struct toc_hash_struct GTY(())
773 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
774 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
776 enum machine_mode key_mode;
780 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
782 /* Default register names. */
783 char rs6000_reg_names[][8] =
785 "0", "1", "2", "3", "4", "5", "6", "7",
786 "8", "9", "10", "11", "12", "13", "14", "15",
787 "16", "17", "18", "19", "20", "21", "22", "23",
788 "24", "25", "26", "27", "28", "29", "30", "31",
789 "0", "1", "2", "3", "4", "5", "6", "7",
790 "8", "9", "10", "11", "12", "13", "14", "15",
791 "16", "17", "18", "19", "20", "21", "22", "23",
792 "24", "25", "26", "27", "28", "29", "30", "31",
793 "mq", "lr", "ctr","ap",
794 "0", "1", "2", "3", "4", "5", "6", "7",
796 /* AltiVec registers. */
797 "0", "1", "2", "3", "4", "5", "6", "7",
798 "8", "9", "10", "11", "12", "13", "14", "15",
799 "16", "17", "18", "19", "20", "21", "22", "23",
800 "24", "25", "26", "27", "28", "29", "30", "31",
806 #ifdef TARGET_REGNAMES
807 static const char alt_reg_names[][8] =
809 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
810 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
811 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
812 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
813 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
814 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
815 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
816 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
817 "mq", "lr", "ctr", "ap",
818 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
820 /* AltiVec registers. */
821 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
822 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
823 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
824 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
831 #ifndef MASK_STRICT_ALIGN
832 #define MASK_STRICT_ALIGN 0
834 #ifndef TARGET_PROFILE_KERNEL
835 #define TARGET_PROFILE_KERNEL 0
838 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
839 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
841 /* Initialize the GCC target structure. */
842 #undef TARGET_ATTRIBUTE_TABLE
843 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
844 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
845 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
847 #undef TARGET_ASM_ALIGNED_DI_OP
848 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
850 /* Default unaligned ops are only provided for ELF. Find the ops needed
851 for non-ELF systems. */
852 #ifndef OBJECT_FORMAT_ELF
854 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
856 #undef TARGET_ASM_UNALIGNED_HI_OP
857 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
858 #undef TARGET_ASM_UNALIGNED_SI_OP
859 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
860 #undef TARGET_ASM_UNALIGNED_DI_OP
861 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
864 #undef TARGET_ASM_UNALIGNED_HI_OP
865 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
866 #undef TARGET_ASM_UNALIGNED_SI_OP
867 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
868 #undef TARGET_ASM_UNALIGNED_DI_OP
869 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
870 #undef TARGET_ASM_ALIGNED_DI_OP
871 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
875 /* This hook deals with fixups for relocatable code and DI-mode objects
877 #undef TARGET_ASM_INTEGER
878 #define TARGET_ASM_INTEGER rs6000_assemble_integer
880 #ifdef HAVE_GAS_HIDDEN
881 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
882 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
885 #undef TARGET_HAVE_TLS
886 #define TARGET_HAVE_TLS HAVE_AS_TLS
888 #undef TARGET_CANNOT_FORCE_CONST_MEM
889 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
891 #undef TARGET_ASM_FUNCTION_PROLOGUE
892 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
893 #undef TARGET_ASM_FUNCTION_EPILOGUE
894 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
896 #undef TARGET_SCHED_VARIABLE_ISSUE
897 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
899 #undef TARGET_SCHED_ISSUE_RATE
900 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
901 #undef TARGET_SCHED_ADJUST_COST
902 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
903 #undef TARGET_SCHED_ADJUST_PRIORITY
904 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
905 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
906 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
907 #undef TARGET_SCHED_FINISH
908 #define TARGET_SCHED_FINISH rs6000_sched_finish
910 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
911 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
913 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
914 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
916 #undef TARGET_INIT_BUILTINS
917 #define TARGET_INIT_BUILTINS rs6000_init_builtins
919 #undef TARGET_EXPAND_BUILTIN
920 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
922 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
923 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
925 #undef TARGET_INIT_LIBFUNCS
926 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
929 #undef TARGET_BINDS_LOCAL_P
930 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
933 #undef TARGET_ASM_OUTPUT_MI_THUNK
934 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
936 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
937 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
939 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
940 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
942 #undef TARGET_RTX_COSTS
943 #define TARGET_RTX_COSTS rs6000_rtx_costs
944 #undef TARGET_ADDRESS_COST
945 #define TARGET_ADDRESS_COST hook_int_rtx_0
947 #undef TARGET_VECTOR_OPAQUE_P
948 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
950 #undef TARGET_DWARF_REGISTER_SPAN
951 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
953 /* On rs6000, function arguments are promoted, as are function return
955 #undef TARGET_PROMOTE_FUNCTION_ARGS
956 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
957 #undef TARGET_PROMOTE_FUNCTION_RETURN
958 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
960 #undef TARGET_RETURN_IN_MEMORY
961 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
963 #undef TARGET_SETUP_INCOMING_VARARGS
964 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
966 /* Always strict argument naming on rs6000. */
967 #undef TARGET_STRICT_ARGUMENT_NAMING
968 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
969 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
970 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
971 #undef TARGET_SPLIT_COMPLEX_ARG
972 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
973 #undef TARGET_MUST_PASS_IN_STACK
974 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
975 #undef TARGET_PASS_BY_REFERENCE
976 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
977 #undef TARGET_ARG_PARTIAL_BYTES
978 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
980 #undef TARGET_BUILD_BUILTIN_VA_LIST
981 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
983 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
984 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
986 #undef TARGET_EH_RETURN_FILTER_MODE
987 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
989 #undef TARGET_VECTOR_MODE_SUPPORTED_P
990 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
992 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
993 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
995 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
996 The PowerPC architecture requires only weak consistency among
997 processors--that is, memory accesses between processors need not be
998 sequentially consistent and memory accesses among processors can occur
999 in any order. The ability to order memory accesses weakly provides
1000 opportunities for more efficient use of the system bus. Unless a
1001 dependency exists, the 604e allows read operations to precede store
1003 #undef TARGET_RELAXED_ORDERING
1004 #define TARGET_RELAXED_ORDERING true
1006 struct gcc_target targetm = TARGET_INITIALIZER;
1009 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1012 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1014 /* The GPRs can hold any mode, but values bigger than one register
1015 cannot go past R31. */
1016 if (INT_REGNO_P (regno))
1017 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1019 /* The float registers can only hold floating modes and DImode. */
1020 if (FP_REGNO_P (regno))
1022 (GET_MODE_CLASS (mode) == MODE_FLOAT
1023 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1024 || (GET_MODE_CLASS (mode) == MODE_INT
1025 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1027 /* The CR register can only hold CC modes. */
1028 if (CR_REGNO_P (regno))
1029 return GET_MODE_CLASS (mode) == MODE_CC;
1031 if (XER_REGNO_P (regno))
1032 return mode == PSImode;
1034 /* AltiVec only in AldyVec registers. */
1035 if (ALTIVEC_REGNO_P (regno))
1036 return ALTIVEC_VECTOR_MODE (mode);
1038 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1039 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1042 /* We cannot put TImode anywhere except general register and it must be
1043 able to fit within the register set. */
1045 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1048 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1050 rs6000_init_hard_regno_mode_ok (void)
1054 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1055 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1056 if (rs6000_hard_regno_mode_ok (r, m))
1057 rs6000_hard_regno_mode_ok_p[m][r] = true;
1060 /* If not otherwise specified by a target, make 'long double' equivalent to
1063 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1064 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1067 /* Override command line options. Mostly we process the processor
1068 type and sometimes adjust other TARGET_ options. */
1071 rs6000_override_options (const char *default_cpu)
1074 struct rs6000_cpu_select *ptr;
1077 /* Simplifications for entries below. */
1080 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1081 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1084 /* This table occasionally claims that a processor does not support
1085 a particular feature even though it does, but the feature is slower
1086 than the alternative. Thus, it shouldn't be relied on as a
1087 complete description of the processor's support.
1089 Please keep this list in order, and don't forget to update the
1090 documentation in invoke.texi when adding a new processor or
1094 const char *const name; /* Canonical processor name. */
1095 const enum processor_type processor; /* Processor type enum value. */
1096 const int target_enable; /* Target flags to enable. */
1097 } const processor_target_table[]
1098 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1099 {"403", PROCESSOR_PPC403,
1100 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1101 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1102 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1103 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1104 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1105 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1106 {"601", PROCESSOR_PPC601,
1107 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1108 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1109 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1110 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1111 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1112 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1113 {"620", PROCESSOR_PPC620,
1114 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1115 {"630", PROCESSOR_PPC630,
1116 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1117 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1118 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1119 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1120 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1121 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1122 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1123 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1124 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1125 /* 8548 has a dummy entry for now. */
1126 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1127 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1128 {"970", PROCESSOR_POWER4,
1129 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1130 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1131 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1132 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1133 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1134 {"G5", PROCESSOR_POWER4,
1135 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1136 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1137 {"power2", PROCESSOR_POWER,
1138 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1139 {"power3", PROCESSOR_PPC630,
1140 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1141 {"power4", PROCESSOR_POWER4,
1142 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1143 {"power5", PROCESSOR_POWER5,
1144 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1145 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1146 {"powerpc64", PROCESSOR_POWERPC64,
1147 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1148 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1149 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1150 {"rios2", PROCESSOR_RIOS2,
1151 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1152 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1153 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1154 {"rs64", PROCESSOR_RS64A,
1155 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1158 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1160 /* Some OSs don't support saving the high part of 64-bit registers on
1161 context switch. Other OSs don't support saving Altivec registers.
1162 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1163 settings; if the user wants either, the user must explicitly specify
1164 them and we won't interfere with the user's specification. */
1167 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1168 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1169 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1173 rs6000_init_hard_regno_mode_ok ();
1175 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1176 #ifdef OS_MISSING_POWERPC64
1177 if (OS_MISSING_POWERPC64)
1178 set_masks &= ~MASK_POWERPC64;
1180 #ifdef OS_MISSING_ALTIVEC
1181 if (OS_MISSING_ALTIVEC)
1182 set_masks &= ~MASK_ALTIVEC;
1185 /* Don't override by the processor default if given explicitly. */
1186 set_masks &= ~target_flags_explicit;
1188 /* Identify the processor type. */
1189 rs6000_select[0].string = default_cpu;
1190 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1192 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1194 ptr = &rs6000_select[i];
1195 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1197 for (j = 0; j < ptt_size; j++)
1198 if (! strcmp (ptr->string, processor_target_table[j].name))
1200 if (ptr->set_tune_p)
1201 rs6000_cpu = processor_target_table[j].processor;
1203 if (ptr->set_arch_p)
1205 target_flags &= ~set_masks;
1206 target_flags |= (processor_target_table[j].target_enable
1213 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1220 /* If we are optimizing big endian systems for space, use the load/store
1221 multiple and string instructions. */
1222 if (BYTES_BIG_ENDIAN && optimize_size)
1223 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1225 /* Don't allow -mmultiple or -mstring on little endian systems
1226 unless the cpu is a 750, because the hardware doesn't support the
1227 instructions used in little endian mode, and causes an alignment
1228 trap. The 750 does not cause an alignment trap (except when the
1229 target is unaligned). */
1231 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1233 if (TARGET_MULTIPLE)
1235 target_flags &= ~MASK_MULTIPLE;
1236 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1237 warning (0, "-mmultiple is not supported on little endian systems");
1242 target_flags &= ~MASK_STRING;
1243 if ((target_flags_explicit & MASK_STRING) != 0)
1244 warning (0, "-mstring is not supported on little endian systems");
1248 /* Set debug flags */
1249 if (rs6000_debug_name)
1251 if (! strcmp (rs6000_debug_name, "all"))
1252 rs6000_debug_stack = rs6000_debug_arg = 1;
1253 else if (! strcmp (rs6000_debug_name, "stack"))
1254 rs6000_debug_stack = 1;
1255 else if (! strcmp (rs6000_debug_name, "arg"))
1256 rs6000_debug_arg = 1;
1258 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1261 if (rs6000_traceback_name)
1263 if (! strncmp (rs6000_traceback_name, "full", 4))
1264 rs6000_traceback = traceback_full;
1265 else if (! strncmp (rs6000_traceback_name, "part", 4))
1266 rs6000_traceback = traceback_part;
1267 else if (! strncmp (rs6000_traceback_name, "no", 2))
1268 rs6000_traceback = traceback_none;
1270 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1271 rs6000_traceback_name);
1274 /* Set size of long double */
1275 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1276 if (rs6000_long_double_size_string)
1279 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1280 if (*tail != '\0' || (size != 64 && size != 128))
1281 error ("Unknown switch -mlong-double-%s",
1282 rs6000_long_double_size_string);
1284 rs6000_long_double_type_size = size;
1287 /* Set Altivec ABI as default for powerpc64 linux. */
1288 if (TARGET_ELF && TARGET_64BIT)
1290 rs6000_altivec_abi = 1;
1291 rs6000_altivec_vrsave = 1;
1294 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1295 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1297 rs6000_darwin64_abi = 1;
1299 darwin_one_byte_bool = 1;
1301 /* Default to natural alignment, for better performance. */
1302 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1305 /* Handle -mabi= options. */
1306 rs6000_parse_abi_options ();
1308 /* Handle -malign-XXXXX option. */
1309 rs6000_parse_alignment_option ();
1311 rs6000_parse_float_gprs_option ();
1313 /* Handle generic -mFOO=YES/NO options. */
1314 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1315 &rs6000_altivec_vrsave);
1316 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1318 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1320 /* Handle -mtls-size option. */
1321 rs6000_parse_tls_size_option ();
1323 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1324 SUBTARGET_OVERRIDE_OPTIONS;
1326 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1327 SUBSUBTARGET_OVERRIDE_OPTIONS;
1329 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1330 SUB3TARGET_OVERRIDE_OPTIONS;
1336 error ("AltiVec and E500 instructions cannot coexist");
1338 /* The e500 does not have string instructions, and we set
1339 MASK_STRING above when optimizing for size. */
1340 if ((target_flags & MASK_STRING) != 0)
1341 target_flags = target_flags & ~MASK_STRING;
1343 /* No SPE means 64-bit long doubles, even if an E500. */
1344 if (rs6000_spe_string != 0
1345 && !strcmp (rs6000_spe_string, "no"))
1346 rs6000_long_double_type_size = 64;
1348 else if (rs6000_select[1].string != NULL)
1350 /* For the powerpc-eabispe configuration, we set all these by
1351 default, so let's unset them if we manually set another
1352 CPU that is not the E500. */
1353 if (rs6000_abi_string == 0)
1355 if (rs6000_spe_string == 0)
1357 if (rs6000_float_gprs_string == 0)
1358 rs6000_float_gprs = 0;
1359 if (rs6000_isel_string == 0)
1361 if (rs6000_long_double_size_string == 0)
1362 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1365 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1366 && rs6000_cpu != PROCESSOR_POWER5);
1367 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1368 || rs6000_cpu == PROCESSOR_POWER5);
1370 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1371 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1372 bits in target_flags so TARGET_SWITCHES cannot be used.
1373 Assumption here is that rs6000_longcall_switch points into the
1374 text of the complete option, rather than being a copy, so we can
1375 scan back for the presence or absence of the no- modifier. */
1376 if (rs6000_longcall_switch)
1378 const char *base = rs6000_longcall_switch;
1379 while (base[-1] != 'm') base--;
1381 if (*rs6000_longcall_switch != '\0')
1382 error ("invalid option %qs", base);
1383 rs6000_default_long_calls = (base[0] != 'n');
1386 /* Handle -m(no-)warn-altivec-long similarly. */
1387 if (rs6000_warn_altivec_long_switch)
1389 const char *base = rs6000_warn_altivec_long_switch;
1390 while (base[-1] != 'm') base--;
1392 if (*rs6000_warn_altivec_long_switch != '\0')
1393 error ("invalid option %qs", base);
1394 rs6000_warn_altivec_long = (base[0] != 'n');
1397 /* Handle -mprioritize-restricted-insns option. */
1398 rs6000_sched_restricted_insns_priority
1399 = (rs6000_sched_groups ? 1 : 0);
1400 if (rs6000_sched_restricted_insns_priority_str)
1401 rs6000_sched_restricted_insns_priority =
1402 atoi (rs6000_sched_restricted_insns_priority_str);
1404 /* Handle -msched-costly-dep option. */
1405 rs6000_sched_costly_dep
1406 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1407 if (rs6000_sched_costly_dep_str)
1409 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1410 rs6000_sched_costly_dep = no_dep_costly;
1411 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1412 rs6000_sched_costly_dep = all_deps_costly;
1413 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1414 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1415 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1416 rs6000_sched_costly_dep = store_to_load_dep_costly;
1418 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1421 /* Handle -minsert-sched-nops option. */
1422 rs6000_sched_insert_nops
1423 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1424 if (rs6000_sched_insert_nops_str)
1426 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1427 rs6000_sched_insert_nops = sched_finish_none;
1428 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1429 rs6000_sched_insert_nops = sched_finish_pad_groups;
1430 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1431 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1433 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1436 #ifdef TARGET_REGNAMES
1437 /* If the user desires alternate register names, copy in the
1438 alternate names now. */
1439 if (TARGET_REGNAMES)
1440 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1443 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1444 If -maix-struct-return or -msvr4-struct-return was explicitly
1445 used, don't override with the ABI default. */
1446 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1448 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1449 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1451 target_flags |= MASK_AIX_STRUCT_RET;
1454 if (TARGET_LONG_DOUBLE_128
1455 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1456 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1458 /* Allocate an alias set for register saves & restores from stack. */
1459 rs6000_sr_alias_set = new_alias_set ();
1462 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1464 /* We can only guarantee the availability of DI pseudo-ops when
1465 assembling for 64-bit targets. */
1468 targetm.asm_out.aligned_op.di = NULL;
1469 targetm.asm_out.unaligned_op.di = NULL;
1472 /* Set branch target alignment, if not optimizing for size. */
1475 if (rs6000_sched_groups)
1477 if (align_functions <= 0)
1478 align_functions = 16;
1479 if (align_jumps <= 0)
1481 if (align_loops <= 0)
1484 if (align_jumps_max_skip <= 0)
1485 align_jumps_max_skip = 15;
1486 if (align_loops_max_skip <= 0)
1487 align_loops_max_skip = 15;
1490 /* Arrange to save and restore machine status around nested functions. */
1491 init_machine_status = rs6000_init_machine_status;
1493 /* We should always be splitting complex arguments, but we can't break
1494 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1495 if (DEFAULT_ABI != ABI_AIX)
1496 targetm.calls.split_complex_arg = NULL;
1498 /* Initialize rs6000_cost with the appropriate target costs. */
1500 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1504 case PROCESSOR_RIOS1:
1505 rs6000_cost = &rios1_cost;
1508 case PROCESSOR_RIOS2:
1509 rs6000_cost = &rios2_cost;
1512 case PROCESSOR_RS64A:
1513 rs6000_cost = &rs64a_cost;
1516 case PROCESSOR_MPCCORE:
1517 rs6000_cost = &mpccore_cost;
1520 case PROCESSOR_PPC403:
1521 rs6000_cost = &ppc403_cost;
1524 case PROCESSOR_PPC405:
1525 rs6000_cost = &ppc405_cost;
1528 case PROCESSOR_PPC440:
1529 rs6000_cost = &ppc440_cost;
1532 case PROCESSOR_PPC601:
1533 rs6000_cost = &ppc601_cost;
1536 case PROCESSOR_PPC603:
1537 rs6000_cost = &ppc603_cost;
1540 case PROCESSOR_PPC604:
1541 rs6000_cost = &ppc604_cost;
1544 case PROCESSOR_PPC604e:
1545 rs6000_cost = &ppc604e_cost;
1548 case PROCESSOR_PPC620:
1549 rs6000_cost = &ppc620_cost;
1552 case PROCESSOR_PPC630:
1553 rs6000_cost = &ppc630_cost;
1556 case PROCESSOR_PPC750:
1557 case PROCESSOR_PPC7400:
1558 rs6000_cost = &ppc750_cost;
1561 case PROCESSOR_PPC7450:
1562 rs6000_cost = &ppc7450_cost;
1565 case PROCESSOR_PPC8540:
1566 rs6000_cost = &ppc8540_cost;
1569 case PROCESSOR_POWER4:
1570 case PROCESSOR_POWER5:
1571 rs6000_cost = &power4_cost;
1579 /* Implement targetm.vectorize.builtin_mask_for_load. */
1581 rs6000_builtin_mask_for_load (void)
1584 return altivec_builtin_mask_for_load;
1589 /* Handle generic options of the form -mfoo=yes/no.
1590 NAME is the option name.
1591 VALUE is the option value.
1592 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1593 whether the option value is 'yes' or 'no' respectively. */
1595 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1599 else if (!strcmp (value, "yes"))
1601 else if (!strcmp (value, "no"))
1604 error ("unknown -m%s= option specified: '%s'", name, value);
1607 /* Handle -mabi= options. */
1609 rs6000_parse_abi_options (void)
1611 if (rs6000_abi_string == 0)
1613 else if (! strcmp (rs6000_abi_string, "altivec"))
1615 rs6000_altivec_abi = 1;
1618 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1619 rs6000_altivec_abi = 0;
1620 else if (! strcmp (rs6000_abi_string, "spe"))
1623 rs6000_altivec_abi = 0;
1624 if (!TARGET_SPE_ABI)
1625 error ("not configured for ABI: '%s'", rs6000_abi_string);
1628 /* These are here for testing during development only, do not
1629 document in the manual please. */
1630 else if (! strcmp (rs6000_abi_string, "d64"))
1632 rs6000_darwin64_abi = 1;
1633 warning (0, "Using darwin64 ABI");
1635 else if (! strcmp (rs6000_abi_string, "d32"))
1637 rs6000_darwin64_abi = 0;
1638 warning (0, "Using old darwin ABI");
1641 else if (! strcmp (rs6000_abi_string, "no-spe"))
1644 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1647 /* Handle -mfloat-gprs= options. */
1649 rs6000_parse_float_gprs_option (void)
1651 if (rs6000_float_gprs_string == 0)
1653 else if (! strcmp (rs6000_float_gprs_string, "yes")
1654 || ! strcmp (rs6000_float_gprs_string, "single"))
1655 rs6000_float_gprs = 1;
1656 else if (! strcmp (rs6000_float_gprs_string, "double"))
1657 rs6000_float_gprs = 2;
1658 else if (! strcmp (rs6000_float_gprs_string, "no"))
1659 rs6000_float_gprs = 0;
1661 error ("invalid option for -mfloat-gprs");
1664 /* Handle -malign-XXXXXX options. */
1666 rs6000_parse_alignment_option (void)
1668 if (rs6000_alignment_string == 0)
1670 else if (! strcmp (rs6000_alignment_string, "power"))
1672 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1673 some C library functions, so warn about it. The flag may be
1674 useful for performance studies from time to time though, so
1675 don't disable it entirely. */
1676 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1677 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1678 " it is incompatible with the installed C and C++ libraries");
1679 rs6000_alignment_flags = MASK_ALIGN_POWER;
1681 else if (! strcmp (rs6000_alignment_string, "natural"))
1682 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1684 error ("unknown -malign-XXXXX option specified: '%s'",
1685 rs6000_alignment_string);
1688 /* Validate and record the size specified with the -mtls-size option. */
1691 rs6000_parse_tls_size_option (void)
1693 if (rs6000_tls_size_string == 0)
1695 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1696 rs6000_tls_size = 16;
1697 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1698 rs6000_tls_size = 32;
1699 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1700 rs6000_tls_size = 64;
1702 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1706 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1710 /* Do anything needed at the start of the asm file. */
1713 rs6000_file_start (void)
1717 const char *start = buffer;
1718 struct rs6000_cpu_select *ptr;
1719 const char *default_cpu = TARGET_CPU_DEFAULT;
1720 FILE *file = asm_out_file;
1722 default_file_start ();
1724 #ifdef TARGET_BI_ARCH
1725 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1729 if (flag_verbose_asm)
1731 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1732 rs6000_select[0].string = default_cpu;
1734 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1736 ptr = &rs6000_select[i];
1737 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1739 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1744 #ifdef USING_ELFOS_H
1745 switch (rs6000_sdata)
1747 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1748 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1749 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1750 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1753 if (rs6000_sdata && g_switch_value)
1755 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1765 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1773 /* Return nonzero if this function is known to have a null epilogue. */
1776 direct_return (void)
1778 if (reload_completed)
1780 rs6000_stack_t *info = rs6000_stack_info ();
1782 if (info->first_gp_reg_save == 32
1783 && info->first_fp_reg_save == 64
1784 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1785 && ! info->lr_save_p
1786 && ! info->cr_save_p
1787 && info->vrsave_mask == 0
1795 /* Return the number of instructions it takes to form a constant in an
1796 integer register. */
1799 num_insns_constant_wide (HOST_WIDE_INT value)
1801 /* signed constant loadable with {cal|addi} */
1802 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1805 /* constant loadable with {cau|addis} */
1806 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1809 #if HOST_BITS_PER_WIDE_INT == 64
1810 else if (TARGET_POWERPC64)
1812 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1813 HOST_WIDE_INT high = value >> 31;
1815 if (high == 0 || high == -1)
1821 return num_insns_constant_wide (high) + 1;
1823 return (num_insns_constant_wide (high)
1824 + num_insns_constant_wide (low) + 1);
1833 num_insns_constant (rtx op, enum machine_mode mode)
1836 switch (GET_CODE (op))
1838 HOST_WIDE_INT low, high;
1840 switch (GET_CODE (op))
1844 #if HOST_BITS_PER_WIDE_INT == 64
1845 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1846 && mask64_operand (op, mode))
1850 return num_insns_constant_wide (INTVAL (op));
1858 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1859 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1860 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1869 int endian = (WORDS_BIG_ENDIAN == 0);
1871 if (mode == VOIDmode || mode == DImode)
1873 high = CONST_DOUBLE_HIGH (op);
1874 low = CONST_DOUBLE_LOW (op);
1878 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1879 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1881 low = l[1 - endian];
1885 return (num_insns_constant_wide (low)
1886 + num_insns_constant_wide (high));
1890 if (high == 0 && low >= 0)
1891 return num_insns_constant_wide (low);
1893 else if (high == -1 && low < 0)
1894 return num_insns_constant_wide (low);
1896 else if (mask64_operand (op, mode))
1900 return num_insns_constant_wide (high) + 1;
1903 return (num_insns_constant_wide (high)
1904 + num_insns_constant_wide (low) + 1);
1911 if (mode == VOIDmode || mode == DImode)
1913 high = CONST_DOUBLE_HIGH (op);
1914 low = CONST_DOUBLE_LOW (op);
1921 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1922 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1923 high = l[WORDS_BIG_ENDIAN == 0];
1924 low = l[WORDS_BIG_ENDIAN != 0];
1928 return (num_insns_constant_wide (low)
1929 + num_insns_constant_wide (high));
1932 if ((high == 0 && low >= 0)
1933 || (high == -1 && low < 0))
1934 return num_insns_constant_wide (low);
1936 else if (mask64_operand (op, mode))
1940 return num_insns_constant_wide (high) + 1;
1943 return (num_insns_constant_wide (high)
1944 + num_insns_constant_wide (low) + 1);
1953 /* Returns the constant for the splat instruction, if exists. */
1956 easy_vector_splat_const (int cst, enum machine_mode mode)
1961 if (EASY_VECTOR_15 (cst)
1962 || EASY_VECTOR_15_ADD_SELF (cst))
1964 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1970 if (EASY_VECTOR_15 (cst)
1971 || EASY_VECTOR_15_ADD_SELF (cst))
1973 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1979 if (EASY_VECTOR_15 (cst)
1980 || EASY_VECTOR_15_ADD_SELF (cst))
1988 /* Return nonzero if all elements of a vector have the same value. */
1991 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1995 units = CONST_VECTOR_NUNITS (op);
1997 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1998 for (i = 1; i < units; ++i)
1999 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2001 if (i == units && easy_vector_splat_const (cst, mode))
2006 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2009 gen_easy_vector_constant_add_self (rtx op)
2013 units = GET_MODE_NUNITS (GET_MODE (op));
2014 v = rtvec_alloc (units);
2016 for (i = 0; i < units; i++)
2018 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2019 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2023 output_vec_const_move (rtx *operands)
2026 enum machine_mode mode;
2032 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2033 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2034 mode = GET_MODE (dest);
2038 if (zero_constant (vec, mode))
2039 return "vxor %0,%0,%0";
2041 gcc_assert (easy_vector_constant (vec, mode));
2043 operands[1] = GEN_INT (cst);
2047 if (EASY_VECTOR_15 (cst))
2049 operands[1] = GEN_INT (cst);
2050 return "vspltisw %0,%1";
2052 else if (EASY_VECTOR_15_ADD_SELF (cst))
2058 if (EASY_VECTOR_15 (cst))
2060 operands[1] = GEN_INT (cst);
2061 return "vspltish %0,%1";
2063 else if (EASY_VECTOR_15_ADD_SELF (cst))
2069 if (EASY_VECTOR_15 (cst))
2071 operands[1] = GEN_INT (cst);
2072 return "vspltisb %0,%1";
2074 else if (EASY_VECTOR_15_ADD_SELF (cst))
2082 gcc_assert (TARGET_SPE);
2084 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2085 pattern of V1DI, V4HI, and V2SF.
2087 FIXME: We should probably return # and add post reload
2088 splitters for these, but this way is so easy ;-). */
2089 operands[1] = GEN_INT (cst);
2090 operands[2] = GEN_INT (cst2);
2092 return "li %0,%1\n\tevmergelo %0,%0,%0";
2094 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2098 mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
2101 if (GET_CODE (op) == CONST_INT)
2103 HOST_WIDE_INT c, lsb;
2108 /* Disallow all zeros. */
2112 /* We can use a single rlwinm insn if no upper bits of C are set
2113 AND there are zero, one or two transitions in the _whole_ of
2115 one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
2117 /* We don't change the number of transitions by inverting,
2118 so make sure we start with the LS bit zero. */
2122 /* Find the first transition. */
2125 /* Invert to look for a second transition. */
2128 /* Erase first transition. */
2131 /* Find the second transition. */
2134 /* Invert to look for a third transition. */
2137 /* Erase second transition. */
2140 if (one_ok && !(allow_one || c))
2143 /* Find the third transition (if any). */
2146 /* Match if all the bits above are 1's (or c is zero). */
2152 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2153 implement ANDing by the mask IN. */
2155 build_mask64_2_operands (rtx in, rtx *out)
2157 #if HOST_BITS_PER_WIDE_INT >= 64
2158 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2161 gcc_assert (GET_CODE (in) == CONST_INT);
2166 /* Assume c initially something like 0x00fff000000fffff. The idea
2167 is to rotate the word so that the middle ^^^^^^ group of zeros
2168 is at the MS end and can be cleared with an rldicl mask. We then
2169 rotate back and clear off the MS ^^ group of zeros with a
2171 c = ~c; /* c == 0xff000ffffff00000 */
2172 lsb = c & -c; /* lsb == 0x0000000000100000 */
2173 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2174 c = ~c; /* c == 0x00fff000000fffff */
2175 c &= -lsb; /* c == 0x00fff00000000000 */
2176 lsb = c & -c; /* lsb == 0x0000100000000000 */
2177 c = ~c; /* c == 0xff000fffffffffff */
2178 c &= -lsb; /* c == 0xff00000000000000 */
2180 while ((lsb >>= 1) != 0)
2181 shift++; /* shift == 44 on exit from loop */
2182 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2183 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2184 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2188 /* Assume c initially something like 0xff000f0000000000. The idea
2189 is to rotate the word so that the ^^^ middle group of zeros
2190 is at the LS end and can be cleared with an rldicr mask. We then
2191 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2193 lsb = c & -c; /* lsb == 0x0000010000000000 */
2194 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2195 c = ~c; /* c == 0x00fff0ffffffffff */
2196 c &= -lsb; /* c == 0x00fff00000000000 */
2197 lsb = c & -c; /* lsb == 0x0000100000000000 */
2198 c = ~c; /* c == 0xff000fffffffffff */
2199 c &= -lsb; /* c == 0xff00000000000000 */
2201 while ((lsb >>= 1) != 0)
2202 shift++; /* shift == 44 on exit from loop */
2203 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2204 m1 >>= shift; /* m1 == 0x0000000000000fff */
2205 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2208 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2209 masks will be all 1's. We are guaranteed more than one transition. */
2210 out[0] = GEN_INT (64 - shift);
2211 out[1] = GEN_INT (m1);
2212 out[2] = GEN_INT (shift);
2213 out[3] = GEN_INT (m2);
2221 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2224 invalid_e500_subreg (rtx op, enum machine_mode mode)
2226 /* Reject (subreg:SI (reg:DF)). */
2227 if (GET_CODE (op) == SUBREG
2229 && REG_P (SUBREG_REG (op))
2230 && GET_MODE (SUBREG_REG (op)) == DFmode)
2233 /* Reject (subreg:DF (reg:DI)). */
2234 if (GET_CODE (op) == SUBREG
2236 && REG_P (SUBREG_REG (op))
2237 && GET_MODE (SUBREG_REG (op)) == DImode)
2243 /* Darwin, AIX increases natural record alignment to doubleword if the first
2244 field is an FP double while the FP fields remain word aligned. */
2247 rs6000_special_round_type_align (tree type, int computed, int specified)
2249 tree field = TYPE_FIELDS (type);
2251 /* Skip all non field decls */
2252 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2253 field = TREE_CHAIN (field);
2255 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2256 return MAX (computed, specified);
2258 return MAX (MAX (computed, specified), 64);
2261 /* Return 1 for an operand in small memory on V.4/eabi. */
2264 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2265 enum machine_mode mode ATTRIBUTE_UNUSED)
2270 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2273 if (DEFAULT_ABI != ABI_V4)
2276 if (GET_CODE (op) == SYMBOL_REF)
2279 else if (GET_CODE (op) != CONST
2280 || GET_CODE (XEXP (op, 0)) != PLUS
2281 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2282 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2287 rtx sum = XEXP (op, 0);
2288 HOST_WIDE_INT summand;
2290 /* We have to be careful here, because it is the referenced address
2291 that must be 32k from _SDA_BASE_, not just the symbol. */
2292 summand = INTVAL (XEXP (sum, 1));
2293 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2296 sym_ref = XEXP (sum, 0);
2299 return SYMBOL_REF_SMALL_P (sym_ref);
2305 /* Return true if either operand is a general purpose register. */
2308 gpr_or_gpr_p (rtx op0, rtx op1)
2310 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2311 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2315 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2318 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2320 switch (GET_CODE (op))
2323 if (RS6000_SYMBOL_REF_TLS_P (op))
2325 else if (CONSTANT_POOL_ADDRESS_P (op))
2327 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2335 else if (! strcmp (XSTR (op, 0), toc_label_name))
2344 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2345 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2347 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2356 constant_pool_expr_p (rtx op)
2360 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2364 toc_relative_expr_p (rtx op)
2368 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2372 legitimate_constant_pool_address_p (rtx x)
2375 && GET_CODE (x) == PLUS
2376 && GET_CODE (XEXP (x, 0)) == REG
2377 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2378 && constant_pool_expr_p (XEXP (x, 1)));
2382 legitimate_small_data_p (enum machine_mode mode, rtx x)
2384 return (DEFAULT_ABI == ABI_V4
2385 && !flag_pic && !TARGET_TOC
2386 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2387 && small_data_operand (x, mode));
2390 /* SPE offset addressing is limited to 5-bits worth of double words. */
2391 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2394 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2396 unsigned HOST_WIDE_INT offset, extra;
2398 if (GET_CODE (x) != PLUS)
2400 if (GET_CODE (XEXP (x, 0)) != REG)
2402 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2404 if (legitimate_constant_pool_address_p (x))
2406 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2409 offset = INTVAL (XEXP (x, 1));
2417 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2418 which leaves the only valid constant offset of zero, which by
2419 canonicalization rules is also invalid. */
2426 /* SPE vector modes. */
2427 return SPE_CONST_OFFSET_OK (offset);
2430 if (TARGET_E500_DOUBLE)
2431 return SPE_CONST_OFFSET_OK (offset);
2434 /* On e500v2, we may have:
2436 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2438 Which gets addressed with evldd instructions. */
2439 if (TARGET_E500_DOUBLE)
2440 return SPE_CONST_OFFSET_OK (offset);
2442 if (mode == DFmode || !TARGET_POWERPC64)
2444 else if (offset & 3)
2450 if (mode == TFmode || !TARGET_POWERPC64)
2452 else if (offset & 3)
2463 return (offset < 0x10000) && (offset + extra < 0x10000);
2467 legitimate_indexed_address_p (rtx x, int strict)
2471 if (GET_CODE (x) != PLUS)
2477 if (!REG_P (op0) || !REG_P (op1))
2480 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2481 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2482 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2483 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2487 legitimate_indirect_address_p (rtx x, int strict)
2489 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2493 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2495 if (!TARGET_MACHO || !flag_pic
2496 || mode != SImode || GET_CODE (x) != MEM)
2500 if (GET_CODE (x) != LO_SUM)
2502 if (GET_CODE (XEXP (x, 0)) != REG)
2504 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2508 return CONSTANT_P (x);
2512 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2514 if (GET_CODE (x) != LO_SUM)
2516 if (GET_CODE (XEXP (x, 0)) != REG)
2518 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2520 /* Restrict addressing for DI because of our SUBREG hackery. */
2521 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2525 if (TARGET_ELF || TARGET_MACHO)
2527 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2531 if (GET_MODE_NUNITS (mode) != 1)
2533 if (GET_MODE_BITSIZE (mode) > 64
2534 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2535 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2538 return CONSTANT_P (x);
2545 /* Try machine-dependent ways of modifying an illegitimate address
2546 to be legitimate. If we find one, return the new, valid address.
2547 This is used from only one place: `memory_address' in explow.c.
2549 OLDX is the address as it was before break_out_memory_refs was
2550 called. In some cases it is useful to look at this to decide what
2553 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2555 It is always safe for this function to do nothing. It exists to
2556 recognize opportunities to optimize the output.
2558 On RS/6000, first check for the sum of a register with a constant
2559 integer that is out of range. If so, generate code to add the
2560 constant with the low-order 16 bits masked to the register and force
2561 this result into another register (this can be done with `cau').
2562 Then generate an address of REG+(CONST&0xffff), allowing for the
2563 possibility of bit 16 being a one.
2565 Then check for the sum of a register and something not constant, try to
2566 load the other things into a register and return the sum. */
2569 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2570 enum machine_mode mode)
2572 if (GET_CODE (x) == SYMBOL_REF)
2574 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2576 return rs6000_legitimize_tls_address (x, model);
2579 if (GET_CODE (x) == PLUS
2580 && GET_CODE (XEXP (x, 0)) == REG
2581 && GET_CODE (XEXP (x, 1)) == CONST_INT
2582 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2584 HOST_WIDE_INT high_int, low_int;
2586 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2587 high_int = INTVAL (XEXP (x, 1)) - low_int;
2588 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2589 GEN_INT (high_int)), 0);
2590 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2592 else if (GET_CODE (x) == PLUS
2593 && GET_CODE (XEXP (x, 0)) == REG
2594 && GET_CODE (XEXP (x, 1)) != CONST_INT
2595 && GET_MODE_NUNITS (mode) == 1
2596 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2598 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2600 && (TARGET_POWERPC64 || mode != DImode)
2603 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2604 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2606 else if (ALTIVEC_VECTOR_MODE (mode))
2610 /* Make sure both operands are registers. */
2611 if (GET_CODE (x) == PLUS)
2612 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2613 force_reg (Pmode, XEXP (x, 1)));
2615 reg = force_reg (Pmode, x);
2618 else if (SPE_VECTOR_MODE (mode)
2619 || (TARGET_E500_DOUBLE && (mode == DFmode
2620 || mode == DImode)))
2624 /* We accept [reg + reg] and [reg + OFFSET]. */
2626 if (GET_CODE (x) == PLUS)
2628 rtx op1 = XEXP (x, 0);
2629 rtx op2 = XEXP (x, 1);
2631 op1 = force_reg (Pmode, op1);
2633 if (GET_CODE (op2) != REG
2634 && (GET_CODE (op2) != CONST_INT
2635 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2636 op2 = force_reg (Pmode, op2);
2638 return gen_rtx_PLUS (Pmode, op1, op2);
2641 return force_reg (Pmode, x);
2647 && GET_CODE (x) != CONST_INT
2648 && GET_CODE (x) != CONST_DOUBLE
2650 && GET_MODE_NUNITS (mode) == 1
2651 && (GET_MODE_BITSIZE (mode) <= 32
2652 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2654 rtx reg = gen_reg_rtx (Pmode);
2655 emit_insn (gen_elf_high (reg, x));
2656 return gen_rtx_LO_SUM (Pmode, reg, x);
2658 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2661 && ! MACHO_DYNAMIC_NO_PIC_P
2663 && GET_CODE (x) != CONST_INT
2664 && GET_CODE (x) != CONST_DOUBLE
2666 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2670 rtx reg = gen_reg_rtx (Pmode);
2671 emit_insn (gen_macho_high (reg, x));
2672 return gen_rtx_LO_SUM (Pmode, reg, x);
2675 && constant_pool_expr_p (x)
2676 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2678 return create_TOC_reference (x);
2684 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2685 We need to emit DTP-relative relocations. */
2688 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2693 fputs ("\t.long\t", file);
2696 fputs (DOUBLE_INT_ASM_OP, file);
2701 output_addr_const (file, x);
2702 fputs ("@dtprel+0x8000", file);
2705 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2707 static GTY(()) rtx rs6000_tls_symbol;
2709 rs6000_tls_get_addr (void)
2711 if (!rs6000_tls_symbol)
2712 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2714 return rs6000_tls_symbol;
2717 /* Construct the SYMBOL_REF for TLS GOT references. */
2719 static GTY(()) rtx rs6000_got_symbol;
2721 rs6000_got_sym (void)
2723 if (!rs6000_got_symbol)
2725 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2726 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2727 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2730 return rs6000_got_symbol;
2733 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2734 this (thread-local) address. */
2737 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2741 dest = gen_reg_rtx (Pmode);
2742 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2748 tlsreg = gen_rtx_REG (Pmode, 13);
2749 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2753 tlsreg = gen_rtx_REG (Pmode, 2);
2754 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2758 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2762 tmp = gen_reg_rtx (Pmode);
2765 tlsreg = gen_rtx_REG (Pmode, 13);
2766 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2770 tlsreg = gen_rtx_REG (Pmode, 2);
2771 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2775 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2777 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2782 rtx r3, got, tga, tmp1, tmp2, eqv;
2785 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2789 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2792 rtx gsym = rs6000_got_sym ();
2793 got = gen_reg_rtx (Pmode);
2795 rs6000_emit_move (got, gsym, Pmode);
2798 rtx tempLR, tmp3, mem;
2801 tempLR = gen_reg_rtx (Pmode);
2802 tmp1 = gen_reg_rtx (Pmode);
2803 tmp2 = gen_reg_rtx (Pmode);
2804 tmp3 = gen_reg_rtx (Pmode);
2805 mem = gen_const_mem (Pmode, tmp1);
2807 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
2808 emit_move_insn (tmp1, tempLR);
2809 emit_move_insn (tmp2, mem);
2810 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2811 last = emit_move_insn (got, tmp3);
2812 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2814 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2816 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2822 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2824 r3 = gen_rtx_REG (Pmode, 3);
2826 insn = gen_tls_gd_64 (r3, got, addr);
2828 insn = gen_tls_gd_32 (r3, got, addr);
2831 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2832 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2833 insn = emit_call_insn (insn);
2834 CONST_OR_PURE_CALL_P (insn) = 1;
2835 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2836 insn = get_insns ();
2838 emit_libcall_block (insn, dest, r3, addr);
2840 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2842 r3 = gen_rtx_REG (Pmode, 3);
2844 insn = gen_tls_ld_64 (r3, got);
2846 insn = gen_tls_ld_32 (r3, got);
2849 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2850 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2851 insn = emit_call_insn (insn);
2852 CONST_OR_PURE_CALL_P (insn) = 1;
2853 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2854 insn = get_insns ();
2856 tmp1 = gen_reg_rtx (Pmode);
2857 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2859 emit_libcall_block (insn, tmp1, r3, eqv);
2860 if (rs6000_tls_size == 16)
2863 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2865 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2867 else if (rs6000_tls_size == 32)
2869 tmp2 = gen_reg_rtx (Pmode);
2871 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2873 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2876 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2878 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2882 tmp2 = gen_reg_rtx (Pmode);
2884 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2886 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2888 insn = gen_rtx_SET (Pmode, dest,
2889 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2895 /* IE, or 64 bit offset LE. */
2896 tmp2 = gen_reg_rtx (Pmode);
2898 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2900 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2903 insn = gen_tls_tls_64 (dest, tmp2, addr);
2905 insn = gen_tls_tls_32 (dest, tmp2, addr);
2913 /* Return 1 if X contains a thread-local symbol. */
2916 rs6000_tls_referenced_p (rtx x)
2918 if (! TARGET_HAVE_TLS)
2921 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2924 /* Return 1 if *X is a thread-local symbol. This is the same as
2925 rs6000_tls_symbol_ref except for the type of the unused argument. */
2928 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2930 return RS6000_SYMBOL_REF_TLS_P (*x);
2933 /* The convention appears to be to define this wherever it is used.
2934 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2935 is now used here. */
2936 #ifndef REG_MODE_OK_FOR_BASE_P
2937 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2940 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2941 replace the input X, or the original X if no replacement is called for.
2942 The output parameter *WIN is 1 if the calling macro should goto WIN,
2945 For RS/6000, we wish to handle large displacements off a base
2946 register by splitting the addend across an addiu/addis and the mem insn.
2947 This cuts number of extra insns needed from 3 to 1.
2949 On Darwin, we use this to generate code for floating point constants.
2950 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2951 The Darwin code is inside #if TARGET_MACHO because only then is
2952 machopic_function_base_name() defined. */
2954 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2955 int opnum, int type,
2956 int ind_levels ATTRIBUTE_UNUSED, int *win)
2958 /* We must recognize output that we have already generated ourselves. */
2959 if (GET_CODE (x) == PLUS
2960 && GET_CODE (XEXP (x, 0)) == PLUS
2961 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2962 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2963 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2965 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2966 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2967 opnum, (enum reload_type)type);
2973 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2974 && GET_CODE (x) == LO_SUM
2975 && GET_CODE (XEXP (x, 0)) == PLUS
2976 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2977 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2978 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2979 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2980 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2981 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2982 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2984 /* Result of previous invocation of this function on Darwin
2985 floating point constant. */
2986 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2987 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2988 opnum, (enum reload_type)type);
2994 /* Force ld/std non-word aligned offset into base register by wrapping
2996 if (GET_CODE (x) == PLUS
2997 && GET_CODE (XEXP (x, 0)) == REG
2998 && REGNO (XEXP (x, 0)) < 32
2999 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3000 && GET_CODE (XEXP (x, 1)) == CONST_INT
3001 && (INTVAL (XEXP (x, 1)) & 3) != 0
3002 && !ALTIVEC_VECTOR_MODE (mode)
3003 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3004 && TARGET_POWERPC64)
3006 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3007 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3008 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3009 opnum, (enum reload_type) type);
3014 if (GET_CODE (x) == PLUS
3015 && GET_CODE (XEXP (x, 0)) == REG
3016 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3017 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3018 && GET_CODE (XEXP (x, 1)) == CONST_INT
3019 && !SPE_VECTOR_MODE (mode)
3020 && !(TARGET_E500_DOUBLE && (mode == DFmode
3022 && !ALTIVEC_VECTOR_MODE (mode))
3024 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3025 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3027 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3029 /* Check for 32-bit overflow. */
3030 if (high + low != val)
3036 /* Reload the high part into a base reg; leave the low part
3037 in the mem directly. */
3039 x = gen_rtx_PLUS (GET_MODE (x),
3040 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3044 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3045 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3046 opnum, (enum reload_type)type);
3052 if (GET_CODE (x) == SYMBOL_REF
3053 && DEFAULT_ABI == ABI_DARWIN
3054 && !ALTIVEC_VECTOR_MODE (mode)
3055 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3056 /* Don't do this for TFmode, since the result isn't offsettable.
3057 The same goes for DImode without 64-bit gprs. */
3059 && (mode != DImode || TARGET_POWERPC64))
3063 rtx offset = gen_rtx_CONST (Pmode,
3064 gen_rtx_MINUS (Pmode, x,
3065 machopic_function_base_sym ()));
3066 x = gen_rtx_LO_SUM (GET_MODE (x),
3067 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3068 gen_rtx_HIGH (Pmode, offset)), offset);
3071 x = gen_rtx_LO_SUM (GET_MODE (x),
3072 gen_rtx_HIGH (Pmode, x), x);
3074 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3075 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3076 opnum, (enum reload_type)type);
3083 && constant_pool_expr_p (x)
3084 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3086 (x) = create_TOC_reference (x);
3094 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3095 that is a valid memory address for an instruction.
3096 The MODE argument is the machine mode for the MEM expression
3097 that wants to use this address.
3099 On the RS/6000, there are four valid address: a SYMBOL_REF that
3100 refers to a constant pool entry of an address (or the sum of it
3101 plus a constant), a short (16-bit signed) constant plus a register,
3102 the sum of two registers, or a register indirect, possibly with an
3103 auto-increment. For DFmode and DImode with a constant plus register,
3104 we must ensure that both words are addressable or PowerPC64 with offset
3107 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3108 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3109 adjacent memory cells are accessed by adding word-sized offsets
3110 during assembly output. */
3112 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3114 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3116 && ALTIVEC_VECTOR_MODE (mode)
3117 && GET_CODE (x) == AND
3118 && GET_CODE (XEXP (x, 1)) == CONST_INT
3119 && INTVAL (XEXP (x, 1)) == -16)
3122 if (RS6000_SYMBOL_REF_TLS_P (x))
3124 if (legitimate_indirect_address_p (x, reg_ok_strict))
3126 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3127 && !ALTIVEC_VECTOR_MODE (mode)
3128 && !SPE_VECTOR_MODE (mode)
3129 /* Restrict addressing for DI because of our SUBREG hackery. */
3130 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3132 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3134 if (legitimate_small_data_p (mode, x))
3136 if (legitimate_constant_pool_address_p (x))
3138 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3140 && GET_CODE (x) == PLUS
3141 && GET_CODE (XEXP (x, 0)) == REG
3142 && (XEXP (x, 0) == virtual_stack_vars_rtx
3143 || XEXP (x, 0) == arg_pointer_rtx)
3144 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3146 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3150 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3152 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3153 && (TARGET_POWERPC64 || mode != DImode)
3154 && legitimate_indexed_address_p (x, reg_ok_strict))
3156 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3161 /* Go to LABEL if ADDR (a legitimate address expression)
3162 has an effect that depends on the machine mode it is used for.
3164 On the RS/6000 this is true of all integral offsets (since AltiVec
3165 modes don't allow them) or is a pre-increment or decrement.
3167 ??? Except that due to conceptual problems in offsettable_address_p
3168 we can't really report the problems of integral offsets. So leave
3169 this assuming that the adjustable offset must be valid for the
3170 sub-words of a TFmode operand, which is what we had before. */
3173 rs6000_mode_dependent_address (rtx addr)
3175 switch (GET_CODE (addr))
3178 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3180 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3181 return val + 12 + 0x8000 >= 0x10000;
3190 return TARGET_UPDATE;
3199 /* Return number of consecutive hard regs needed starting at reg REGNO
3200 to hold something of mode MODE.
3201 This is ordinarily the length in words of a value of mode MODE
3202 but can be less for certain modes in special long registers.
3204 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3205 scalar instructions. The upper 32 bits are only available to the
3208 POWER and PowerPC GPRs hold 32 bits worth;
3209 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3212 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3214 if (FP_REGNO_P (regno))
3215 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3217 if (TARGET_E500_DOUBLE && mode == DFmode)
3220 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3221 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3223 if (ALTIVEC_REGNO_P (regno))
3225 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3227 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3230 /* Change register usage conditional on target flags. */
3232 rs6000_conditional_register_usage (void)
3236 /* Set MQ register fixed (already call_used) if not POWER
3237 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3242 /* 64-bit AIX reserves GPR13 for thread-private data. */
3244 fixed_regs[13] = call_used_regs[13]
3245 = call_really_used_regs[13] = 1;
3247 /* Conditionally disable FPRs. */
3248 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3249 for (i = 32; i < 64; i++)
3250 fixed_regs[i] = call_used_regs[i]
3251 = call_really_used_regs[i] = 1;
3253 if (DEFAULT_ABI == ABI_V4
3254 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3256 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3258 if (DEFAULT_ABI == ABI_V4
3259 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3261 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3262 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3263 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3265 if (DEFAULT_ABI == ABI_DARWIN
3266 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3267 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3268 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3269 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3270 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3272 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3273 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3274 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3277 global_regs[VSCR_REGNO] = 1;
3281 global_regs[SPEFSCR_REGNO] = 1;
3282 fixed_regs[FIXED_SCRATCH]
3283 = call_used_regs[FIXED_SCRATCH]
3284 = call_really_used_regs[FIXED_SCRATCH] = 1;
3287 if (! TARGET_ALTIVEC)
3289 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3290 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3291 call_really_used_regs[VRSAVE_REGNO] = 1;
3294 if (TARGET_ALTIVEC_ABI)
3295 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3296 call_used_regs[i] = call_really_used_regs[i] = 1;
3299 /* Try to output insns to set TARGET equal to the constant C if it can
3300 be done in less than N insns. Do all computations in MODE.
3301 Returns the place where the output has been placed if it can be
3302 done and the insns have been emitted. If it would take more than N
3303 insns, zero is returned and no insns and emitted. */
3306 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3307 rtx source, int n ATTRIBUTE_UNUSED)
3309 rtx result, insn, set;
3310 HOST_WIDE_INT c0, c1;
3317 dest = gen_reg_rtx (mode);
3318 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3322 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3324 emit_insn (gen_rtx_SET (VOIDmode, result,
3325 GEN_INT (INTVAL (source)
3326 & (~ (HOST_WIDE_INT) 0xffff))));
3327 emit_insn (gen_rtx_SET (VOIDmode, dest,
3328 gen_rtx_IOR (SImode, result,
3329 GEN_INT (INTVAL (source) & 0xffff))));
3334 switch (GET_CODE (source))
3339 switch (GET_CODE (source))
3343 c0 = INTVAL (source);
3348 #if HOST_BITS_PER_WIDE_INT >= 64
3349 c0 = CONST_DOUBLE_LOW (source);
3352 c0 = CONST_DOUBLE_LOW (source);
3353 c1 = CONST_DOUBLE_HIGH (source);
3361 result = rs6000_emit_set_long_const (dest, c0, c1);
3368 insn = get_last_insn ();
3369 set = single_set (insn);
3370 if (! CONSTANT_P (SET_SRC (set)))
3371 set_unique_reg_note (insn, REG_EQUAL, source);
3376 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3377 fall back to a straight forward decomposition. We do this to avoid
3378 exponential run times encountered when looking for longer sequences
3379 with rs6000_emit_set_const. */
3381 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3383 if (!TARGET_POWERPC64)
3385 rtx operand1, operand2;
3387 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3389 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3391 emit_move_insn (operand1, GEN_INT (c1));
3392 emit_move_insn (operand2, GEN_INT (c2));
3396 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3399 ud2 = (c1 & 0xffff0000) >> 16;
3400 #if HOST_BITS_PER_WIDE_INT >= 64
3404 ud4 = (c2 & 0xffff0000) >> 16;
3406 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3407 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3410 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3412 emit_move_insn (dest, GEN_INT (ud1));
3415 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3416 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3419 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3422 emit_move_insn (dest, GEN_INT (ud2 << 16));
3424 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3426 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3427 || (ud4 == 0 && ! (ud3 & 0x8000)))
3430 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3433 emit_move_insn (dest, GEN_INT (ud3 << 16));
3436 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3437 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3439 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3444 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3447 emit_move_insn (dest, GEN_INT (ud4 << 16));
3450 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3452 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3454 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3455 GEN_INT (ud2 << 16)));
3457 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3463 /* Helper for the following. Get rid of [r+r] memory refs
3464 in cases where it won't work (TImode, TFmode). */
3467 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3469 if (GET_CODE (operands[0]) == MEM
3470 && GET_CODE (XEXP (operands[0], 0)) != REG
3471 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3472 && ! reload_in_progress)
3474 = replace_equiv_address (operands[0],
3475 copy_addr_to_reg (XEXP (operands[0], 0)));
3477 if (GET_CODE (operands[1]) == MEM
3478 && GET_CODE (XEXP (operands[1], 0)) != REG
3479 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3480 && ! reload_in_progress)
3482 = replace_equiv_address (operands[1],
3483 copy_addr_to_reg (XEXP (operands[1], 0)));
3486 /* Emit a move from SOURCE to DEST in mode MODE. */
3488 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3492 operands[1] = source;
3494 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3495 if (GET_CODE (operands[1]) == CONST_DOUBLE
3496 && ! FLOAT_MODE_P (mode)
3497 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3499 /* FIXME. This should never happen. */
3500 /* Since it seems that it does, do the safe thing and convert
3502 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3504 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3505 || FLOAT_MODE_P (mode)
3506 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3507 || CONST_DOUBLE_LOW (operands[1]) < 0)
3508 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3509 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3511 /* Check if GCC is setting up a block move that will end up using FP
3512 registers as temporaries. We must make sure this is acceptable. */
3513 if (GET_CODE (operands[0]) == MEM
3514 && GET_CODE (operands[1]) == MEM
3516 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3517 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3518 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3519 ? 32 : MEM_ALIGN (operands[0])))
3520 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3522 : MEM_ALIGN (operands[1]))))
3523 && ! MEM_VOLATILE_P (operands [0])
3524 && ! MEM_VOLATILE_P (operands [1]))
3526 emit_move_insn (adjust_address (operands[0], SImode, 0),
3527 adjust_address (operands[1], SImode, 0));
3528 emit_move_insn (adjust_address (operands[0], SImode, 4),
3529 adjust_address (operands[1], SImode, 4));
3533 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3534 && !gpc_reg_operand (operands[1], mode))
3535 operands[1] = force_reg (mode, operands[1]);
3537 if (mode == SFmode && ! TARGET_POWERPC
3538 && TARGET_HARD_FLOAT && TARGET_FPRS
3539 && GET_CODE (operands[0]) == MEM)
3543 if (reload_in_progress || reload_completed)
3544 regnum = true_regnum (operands[1]);
3545 else if (GET_CODE (operands[1]) == REG)
3546 regnum = REGNO (operands[1]);
3550 /* If operands[1] is a register, on POWER it may have
3551 double-precision data in it, so truncate it to single
3553 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3556 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3557 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3558 operands[1] = newreg;
3562 /* Recognize the case where operand[1] is a reference to thread-local
3563 data and load its address to a register. */
3564 if (GET_CODE (operands[1]) == SYMBOL_REF)
3566 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3568 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3571 /* Handle the case where reload calls us with an invalid address. */
3572 if (reload_in_progress && mode == Pmode
3573 && (! general_operand (operands[1], mode)
3574 || ! nonimmediate_operand (operands[0], mode)))
3577 /* 128-bit constant floating-point values on Darwin should really be
3578 loaded as two parts. */
3579 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3580 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3581 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3583 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3584 know how to get a DFmode SUBREG of a TFmode. */
3585 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3586 simplify_gen_subreg (DImode, operands[1], mode, 0),
3588 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3589 GET_MODE_SIZE (DImode)),
3590 simplify_gen_subreg (DImode, operands[1], mode,
3591 GET_MODE_SIZE (DImode)),
3596 /* FIXME: In the long term, this switch statement should go away
3597 and be replaced by a sequence of tests based on things like
3603 if (CONSTANT_P (operands[1])
3604 && GET_CODE (operands[1]) != CONST_INT)
3605 operands[1] = force_const_mem (mode, operands[1]);
3609 rs6000_eliminate_indexed_memrefs (operands);
3614 if (CONSTANT_P (operands[1])
3615 && ! easy_fp_constant (operands[1], mode))
3616 operands[1] = force_const_mem (mode, operands[1]);
3627 if (CONSTANT_P (operands[1])
3628 && !easy_vector_constant (operands[1], mode))
3629 operands[1] = force_const_mem (mode, operands[1]);
3634 /* Use default pattern for address of ELF small data */
3637 && DEFAULT_ABI == ABI_V4
3638 && (GET_CODE (operands[1]) == SYMBOL_REF
3639 || GET_CODE (operands[1]) == CONST)
3640 && small_data_operand (operands[1], mode))
3642 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3646 if (DEFAULT_ABI == ABI_V4
3647 && mode == Pmode && mode == SImode
3648 && flag_pic == 1 && got_operand (operands[1], mode))
3650 emit_insn (gen_movsi_got (operands[0], operands[1]));
3654 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3658 && CONSTANT_P (operands[1])
3659 && GET_CODE (operands[1]) != HIGH
3660 && GET_CODE (operands[1]) != CONST_INT)
3662 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3664 /* If this is a function address on -mcall-aixdesc,
3665 convert it to the address of the descriptor. */
3666 if (DEFAULT_ABI == ABI_AIX
3667 && GET_CODE (operands[1]) == SYMBOL_REF
3668 && XSTR (operands[1], 0)[0] == '.')
3670 const char *name = XSTR (operands[1], 0);
3672 while (*name == '.')
3674 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3675 CONSTANT_POOL_ADDRESS_P (new_ref)
3676 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3677 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3678 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3679 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3680 operands[1] = new_ref;
3683 if (DEFAULT_ABI == ABI_DARWIN)
3686 if (MACHO_DYNAMIC_NO_PIC_P)
3688 /* Take care of any required data indirection. */
3689 operands[1] = rs6000_machopic_legitimize_pic_address (
3690 operands[1], mode, operands[0]);
3691 if (operands[0] != operands[1])
3692 emit_insn (gen_rtx_SET (VOIDmode,
3693 operands[0], operands[1]));
3697 emit_insn (gen_macho_high (target, operands[1]));
3698 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3702 emit_insn (gen_elf_high (target, operands[1]));
3703 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3707 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3708 and we have put it in the TOC, we just need to make a TOC-relative
3711 && GET_CODE (operands[1]) == SYMBOL_REF
3712 && constant_pool_expr_p (operands[1])
3713 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3714 get_pool_mode (operands[1])))
3716 operands[1] = create_TOC_reference (operands[1]);
3718 else if (mode == Pmode
3719 && CONSTANT_P (operands[1])
3720 && ((GET_CODE (operands[1]) != CONST_INT
3721 && ! easy_fp_constant (operands[1], mode))
3722 || (GET_CODE (operands[1]) == CONST_INT
3723 && num_insns_constant (operands[1], mode) > 2)
3724 || (GET_CODE (operands[0]) == REG
3725 && FP_REGNO_P (REGNO (operands[0]))))
3726 && GET_CODE (operands[1]) != HIGH
3727 && ! legitimate_constant_pool_address_p (operands[1])
3728 && ! toc_relative_expr_p (operands[1]))
3730 /* Emit a USE operation so that the constant isn't deleted if
3731 expensive optimizations are turned on because nobody
3732 references it. This should only be done for operands that
3733 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3734 This should not be done for operands that contain LABEL_REFs.
3735 For now, we just handle the obvious case. */
3736 if (GET_CODE (operands[1]) != LABEL_REF)
3737 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3740 /* Darwin uses a special PIC legitimizer. */
3741 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3744 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3746 if (operands[0] != operands[1])
3747 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3752 /* If we are to limit the number of things we put in the TOC and
3753 this is a symbol plus a constant we can add in one insn,
3754 just put the symbol in the TOC and add the constant. Don't do
3755 this if reload is in progress. */
3756 if (GET_CODE (operands[1]) == CONST
3757 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3758 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3759 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3760 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3761 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3762 && ! side_effects_p (operands[0]))
3765 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3766 rtx other = XEXP (XEXP (operands[1], 0), 1);
3768 sym = force_reg (mode, sym);
3770 emit_insn (gen_addsi3 (operands[0], sym, other));
3772 emit_insn (gen_adddi3 (operands[0], sym, other));
3776 operands[1] = force_const_mem (mode, operands[1]);
3779 && constant_pool_expr_p (XEXP (operands[1], 0))
3780 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3781 get_pool_constant (XEXP (operands[1], 0)),
3782 get_pool_mode (XEXP (operands[1], 0))))
3785 = gen_const_mem (mode,
3786 create_TOC_reference (XEXP (operands[1], 0)));
3787 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3793 rs6000_eliminate_indexed_memrefs (operands);
3797 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3799 gen_rtx_SET (VOIDmode,
3800 operands[0], operands[1]),
3801 gen_rtx_CLOBBER (VOIDmode,
3802 gen_rtx_SCRATCH (SImode)))));
3811 /* Above, we may have called force_const_mem which may have returned
3812 an invalid address. If we can, fix this up; otherwise, reload will
3813 have to deal with it. */
3814 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3815 operands[1] = validize_mem (operands[1]);
3818 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3821 /* Nonzero if we can use a floating-point register to pass this arg. */
3822 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3823 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3824 && (CUM)->fregno <= FP_ARG_MAX_REG \
3825 && TARGET_HARD_FLOAT && TARGET_FPRS)
3827 /* Nonzero if we can use an AltiVec register to pass this arg. */
3828 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3829 (ALTIVEC_VECTOR_MODE (MODE) \
3830 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3831 && TARGET_ALTIVEC_ABI \
3834 /* Return a nonzero value to say to return the function value in
3835 memory, just as large structures are always returned. TYPE will be
3836 the data type of the value, and FNTYPE will be the type of the
3837 function doing the returning, or @code{NULL} for libcalls.
3839 The AIX ABI for the RS/6000 specifies that all structures are
3840 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3841 specifies that structures <= 8 bytes are returned in r3/r4, but a
3842 draft put them in memory, and GCC used to implement the draft
3843 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3844 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3845 compatibility can change DRAFT_V4_STRUCT_RET to override the
3846 default, and -m switches get the final word. See
3847 rs6000_override_options for more details.
3849 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3850 long double support is enabled. These values are returned in memory.
3852 int_size_in_bytes returns -1 for variable size objects, which go in
3853 memory always. The cast to unsigned makes -1 > 8. */
3856 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3858 /* In the darwin64 abi, try to use registers for larger structs
3860 if (rs6000_darwin64_abi
3861 && TREE_CODE (type) == RECORD_TYPE
3862 && int_size_in_bytes (type) > 0)
3864 CUMULATIVE_ARGS valcum;
3868 valcum.fregno = FP_ARG_MIN_REG;
3869 valcum.vregno = ALTIVEC_ARG_MIN_REG;
3870 /* Do a trial code generation as if this were going to be passed
3871 as an argument; if any part goes in memory, we return NULL. */
3872 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
3875 /* Otherwise fall through to more conventional ABI rules. */
3878 if (AGGREGATE_TYPE_P (type)
3879 && (TARGET_AIX_STRUCT_RET
3880 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3883 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
3884 modes only exist for GCC vector types if -maltivec. */
3885 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
3886 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
3889 /* Return synthetic vectors in memory. */
3890 if (TREE_CODE (type) == VECTOR_TYPE
3891 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
3893 static bool warned_for_return_big_vectors = false;
3894 if (!warned_for_return_big_vectors)
3896 warning (0, "GCC vector returned by reference: "
3897 "non-standard ABI extension with no compatibility guarantee");
3898 warned_for_return_big_vectors = true;
3903 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3909 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3910 for a call to a function whose data type is FNTYPE.
3911 For a library call, FNTYPE is 0.
3913 For incoming args we set the number of arguments in the prototype large
3914 so we never return a PARALLEL. */
3917 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3918 rtx libname ATTRIBUTE_UNUSED, int incoming,
3919 int libcall, int n_named_args)
3921 static CUMULATIVE_ARGS zero_cumulative;
3923 *cum = zero_cumulative;
3925 cum->fregno = FP_ARG_MIN_REG;
3926 cum->vregno = ALTIVEC_ARG_MIN_REG;
3927 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3928 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3929 ? CALL_LIBCALL : CALL_NORMAL);
3930 cum->sysv_gregno = GP_ARG_MIN_REG;
3931 cum->stdarg = fntype
3932 && (TYPE_ARG_TYPES (fntype) != 0
3933 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3934 != void_type_node));
3936 cum->nargs_prototype = 0;
3937 if (incoming || cum->prototype)
3938 cum->nargs_prototype = n_named_args;
3940 /* Check for a longcall attribute. */
3941 if ((!fntype && rs6000_default_long_calls)
3943 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3944 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
3945 cum->call_cookie |= CALL_LONG;
3947 if (TARGET_DEBUG_ARG)
3949 fprintf (stderr, "\ninit_cumulative_args:");
3952 tree ret_type = TREE_TYPE (fntype);
3953 fprintf (stderr, " ret code = %s,",
3954 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3957 if (cum->call_cookie & CALL_LONG)
3958 fprintf (stderr, " longcall,");
3960 fprintf (stderr, " proto = %d, nargs = %d\n",
3961 cum->prototype, cum->nargs_prototype);
3966 && TARGET_ALTIVEC_ABI
3967 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3969 error ("Cannot return value in vector register because"
3970 " altivec instructions are disabled, use -maltivec"
3971 " to enable them.");
3975 /* Return true if TYPE must be passed on the stack and not in registers. */
3978 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
3980 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
3981 return must_pass_in_stack_var_size (mode, type);
3983 return must_pass_in_stack_var_size_or_pad (mode, type);
3986 /* If defined, a C expression which determines whether, and in which
3987 direction, to pad out an argument with extra space. The value
3988 should be of type `enum direction': either `upward' to pad above
3989 the argument, `downward' to pad below, or `none' to inhibit
3992 For the AIX ABI structs are always stored left shifted in their
3996 function_arg_padding (enum machine_mode mode, tree type)
3998 #ifndef AGGREGATE_PADDING_FIXED
3999 #define AGGREGATE_PADDING_FIXED 0
4001 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4002 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4005 if (!AGGREGATE_PADDING_FIXED)
4007 /* GCC used to pass structures of the same size as integer types as
4008 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4009 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4010 passed padded downward, except that -mstrict-align further
4011 muddied the water in that multi-component structures of 2 and 4
4012 bytes in size were passed padded upward.
4014 The following arranges for best compatibility with previous
4015 versions of gcc, but removes the -mstrict-align dependency. */
4016 if (BYTES_BIG_ENDIAN)
4018 HOST_WIDE_INT size = 0;
4020 if (mode == BLKmode)
4022 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4023 size = int_size_in_bytes (type);
4026 size = GET_MODE_SIZE (mode);
4028 if (size == 1 || size == 2 || size == 4)
4034 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4036 if (type != 0 && AGGREGATE_TYPE_P (type))
4040 /* Fall back to the default. */
4041 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4044 /* If defined, a C expression that gives the alignment boundary, in bits,
4045 of an argument with the specified mode and type. If it is not defined,
4046 PARM_BOUNDARY is used for all arguments.
4048 V.4 wants long longs to be double word aligned.
4049 Doubleword align SPE vectors.
4050 Quadword align Altivec vectors.
4051 Quadword align large synthetic vector types. */
4054 function_arg_boundary (enum machine_mode mode, tree type)
4056 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4058 else if (SPE_VECTOR_MODE (mode)
4059 || (type && TREE_CODE (type) == VECTOR_TYPE
4060 && int_size_in_bytes (type) >= 8
4061 && int_size_in_bytes (type) < 16))
4063 else if (ALTIVEC_VECTOR_MODE (mode)
4064 || (type && TREE_CODE (type) == VECTOR_TYPE
4065 && int_size_in_bytes (type) >= 16))
4067 else if (rs6000_darwin64_abi && mode == BLKmode
4068 && type && TYPE_ALIGN (type) > 64)
4071 return PARM_BOUNDARY;
4074 /* For a function parm of MODE and TYPE, return the starting word in
4075 the parameter area. NWORDS of the parameter area are already used. */
4078 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4081 unsigned int parm_offset;
4083 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4084 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4085 return nwords + (-(parm_offset + nwords) & align);
4088 /* Compute the size (in words) of a function argument. */
4090 static unsigned long
4091 rs6000_arg_size (enum machine_mode mode, tree type)
4095 if (mode != BLKmode)
4096 size = GET_MODE_SIZE (mode);
4098 size = int_size_in_bytes (type);
4101 return (size + 3) >> 2;
4103 return (size + 7) >> 3;
4106 /* Use this to flush pending int fields. */
4109 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4110 HOST_WIDE_INT bitpos)
4112 unsigned int startbit, endbit;
4113 int intregs, intoffset;
4114 enum machine_mode mode;
4116 if (cum->intoffset == -1)
4119 intoffset = cum->intoffset;
4120 cum->intoffset = -1;
4122 if (intoffset % BITS_PER_WORD != 0)
4124 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4126 if (mode == BLKmode)
4128 /* We couldn't find an appropriate mode, which happens,
4129 e.g., in packed structs when there are 3 bytes to load.
4130 Back intoffset back to the beginning of the word in this
4132 intoffset = intoffset & -BITS_PER_WORD;
4136 startbit = intoffset & -BITS_PER_WORD;
4137 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4138 intregs = (endbit - startbit) / BITS_PER_WORD;
4139 cum->words += intregs;
4142 /* The darwin64 ABI calls for us to recurse down through structs,
4143 looking for elements passed in registers. Unfortunately, we have
4144 to track int register count here also because of misalignments
4145 in powerpc alignment mode. */
4148 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4150 HOST_WIDE_INT startbitpos)
4154 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4155 if (TREE_CODE (f) == FIELD_DECL)
4157 HOST_WIDE_INT bitpos = startbitpos;
4158 tree ftype = TREE_TYPE (f);
4159 enum machine_mode mode = TYPE_MODE (ftype);
4161 if (DECL_SIZE (f) != 0
4162 && host_integerp (bit_position (f), 1))
4163 bitpos += int_bit_position (f);
4165 /* ??? FIXME: else assume zero offset. */
4167 if (TREE_CODE (ftype) == RECORD_TYPE)
4168 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4169 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4171 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4172 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4173 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4175 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4177 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4181 else if (cum->intoffset == -1)
4182 cum->intoffset = bitpos;
4186 /* Update the data in CUM to advance over an argument
4187 of mode MODE and data type TYPE.
4188 (TYPE is null for libcalls where that information may not be available.)
4190 Note that for args passed by reference, function_arg will be called
4191 with MODE and TYPE set to that of the pointer to the arg, not the arg
4195 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4196 tree type, int named, int depth)
4200 /* Only tick off an argument if we're not recursing. */
4202 cum->nargs_prototype--;
4204 if (TARGET_ALTIVEC_ABI
4205 && (ALTIVEC_VECTOR_MODE (mode)
4206 || (type && TREE_CODE (type) == VECTOR_TYPE
4207 && int_size_in_bytes (type) == 16)))
4211 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4214 if (!TARGET_ALTIVEC)
4215 error ("Cannot pass argument in vector register because"
4216 " altivec instructions are disabled, use -maltivec"
4217 " to enable them.");
4219 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4220 even if it is going to be passed in a vector register.
4221 Darwin does the same for variable-argument functions. */
4222 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4223 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4233 /* Vector parameters must be 16-byte aligned. This places
4234 them at 2 mod 4 in terms of words in 32-bit mode, since
4235 the parameter save area starts at offset 24 from the
4236 stack. In 64-bit mode, they just have to start on an
4237 even word, since the parameter save area is 16-byte
4238 aligned. Space for GPRs is reserved even if the argument
4239 will be passed in memory. */
4241 align = (2 - cum->words) & 3;
4243 align = cum->words & 1;
4244 cum->words += align + rs6000_arg_size (mode, type);
4246 if (TARGET_DEBUG_ARG)
4248 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4250 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4251 cum->nargs_prototype, cum->prototype,
4252 GET_MODE_NAME (mode));
4256 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4258 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4261 else if (rs6000_darwin64_abi
4263 && TREE_CODE (type) == RECORD_TYPE
4264 && (size = int_size_in_bytes (type)) > 0)
4266 /* Variable sized types have size == -1 and are
4267 treated as if consisting entirely of ints.
4268 Pad to 16 byte boundary if needed. */
4269 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4270 && (cum->words % 2) != 0)
4272 /* For varargs, we can just go up by the size of the struct. */
4274 cum->words += (size + 7) / 8;
4277 /* It is tempting to say int register count just goes up by
4278 sizeof(type)/8, but this is wrong in a case such as
4279 { int; double; int; } [powerpc alignment]. We have to
4280 grovel through the fields for these too. */
4282 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4283 rs6000_darwin64_record_arg_advance_flush (cum,
4284 size * BITS_PER_UNIT);
4287 else if (DEFAULT_ABI == ABI_V4)
4289 if (TARGET_HARD_FLOAT && TARGET_FPRS
4290 && (mode == SFmode || mode == DFmode))
4292 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4297 cum->words += cum->words & 1;
4298 cum->words += rs6000_arg_size (mode, type);
4303 int n_words = rs6000_arg_size (mode, type);
4304 int gregno = cum->sysv_gregno;
4306 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4307 (r7,r8) or (r9,r10). As does any other 2 word item such
4308 as complex int due to a historical mistake. */
4310 gregno += (1 - gregno) & 1;
4312 /* Multi-reg args are not split between registers and stack. */
4313 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4315 /* Long long and SPE vectors are aligned on the stack.
4316 So are other 2 word items such as complex int due to
4317 a historical mistake. */
4319 cum->words += cum->words & 1;
4320 cum->words += n_words;
4323 /* Note: continuing to accumulate gregno past when we've started
4324 spilling to the stack indicates the fact that we've started
4325 spilling to the stack to expand_builtin_saveregs. */
4326 cum->sysv_gregno = gregno + n_words;
4329 if (TARGET_DEBUG_ARG)
4331 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4332 cum->words, cum->fregno);
4333 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4334 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4335 fprintf (stderr, "mode = %4s, named = %d\n",
4336 GET_MODE_NAME (mode), named);
4341 int n_words = rs6000_arg_size (mode, type);
4342 int start_words = cum->words;
4343 int align_words = rs6000_parm_start (mode, type, start_words);
4345 cum->words = align_words + n_words;
4347 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4348 && TARGET_HARD_FLOAT && TARGET_FPRS)
4349 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4351 if (TARGET_DEBUG_ARG)
4353 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4354 cum->words, cum->fregno);
4355 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4356 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4357 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4358 named, align_words - start_words, depth);
4364 spe_build_register_parallel (enum machine_mode mode, int gregno)
4371 r1 = gen_rtx_REG (DImode, gregno);
4372 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4373 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4376 r1 = gen_rtx_REG (DImode, gregno);
4377 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4378 r3 = gen_rtx_REG (DImode, gregno + 2);
4379 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4380 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4387 /* Determine where to put a SIMD argument on the SPE. */
4389 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4392 int gregno = cum->sysv_gregno;
4394 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4395 are passed and returned in a pair of GPRs for ABI compatibility. */
4396 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4398 int n_words = rs6000_arg_size (mode, type);
4400 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4402 gregno += (1 - gregno) & 1;
4404 /* Multi-reg args are not split between registers and stack. */
4405 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4408 return spe_build_register_parallel (mode, gregno);
4412 int n_words = rs6000_arg_size (mode, type);
4414 /* SPE vectors are put in odd registers. */
4415 if (n_words == 2 && (gregno & 1) == 0)
4418 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4421 enum machine_mode m = SImode;
4423 r1 = gen_rtx_REG (m, gregno);
4424 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4425 r2 = gen_rtx_REG (m, gregno + 1);
4426 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4427 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4434 if (gregno <= GP_ARG_MAX_REG)
4435 return gen_rtx_REG (mode, gregno);
4441 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4442 structure between cum->intoffset and bitpos to integer registers. */
4445 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4446 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4448 enum machine_mode mode;
4450 unsigned int startbit, endbit;
4451 int this_regno, intregs, intoffset;
4454 if (cum->intoffset == -1)
4457 intoffset = cum->intoffset;
4458 cum->intoffset = -1;
4460 /* If this is the trailing part of a word, try to only load that
4461 much into the register. Otherwise load the whole register. Note
4462 that in the latter case we may pick up unwanted bits. It's not a
4463 problem at the moment but may wish to revisit. */
4465 if (intoffset % BITS_PER_WORD != 0)
4467 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4469 if (mode == BLKmode)
4471 /* We couldn't find an appropriate mode, which happens,
4472 e.g., in packed structs when there are 3 bytes to load.
4473 Back intoffset back to the beginning of the word in this
4475 intoffset = intoffset & -BITS_PER_WORD;
4482 startbit = intoffset & -BITS_PER_WORD;
4483 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4484 intregs = (endbit - startbit) / BITS_PER_WORD;
4485 this_regno = cum->words + intoffset / BITS_PER_WORD;
4487 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4490 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4494 intoffset /= BITS_PER_UNIT;
4497 regno = GP_ARG_MIN_REG + this_regno;
4498 reg = gen_rtx_REG (mode, regno);
4500 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4503 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4507 while (intregs > 0);
4510 /* Recursive workhorse for the following. */
4513 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
4514 HOST_WIDE_INT startbitpos, rtx rvec[],
4519 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4520 if (TREE_CODE (f) == FIELD_DECL)
4522 HOST_WIDE_INT bitpos = startbitpos;
4523 tree ftype = TREE_TYPE (f);
4524 enum machine_mode mode = TYPE_MODE (ftype);
4526 if (DECL_SIZE (f) != 0
4527 && host_integerp (bit_position (f), 1))
4528 bitpos += int_bit_position (f);
4530 /* ??? FIXME: else assume zero offset. */
4532 if (TREE_CODE (ftype) == RECORD_TYPE)
4533 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4534 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
4539 case SCmode: mode = SFmode; break;
4540 case DCmode: mode = DFmode; break;
4541 case TCmode: mode = TFmode; break;
4545 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4547 = gen_rtx_EXPR_LIST (VOIDmode,
4548 gen_rtx_REG (mode, cum->fregno++),
4549 GEN_INT (bitpos / BITS_PER_UNIT));
4553 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4555 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4557 = gen_rtx_EXPR_LIST (VOIDmode,
4558 gen_rtx_REG (mode, cum->vregno++),
4559 GEN_INT (bitpos / BITS_PER_UNIT));
4561 else if (cum->intoffset == -1)
4562 cum->intoffset = bitpos;
4566 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4567 the register(s) to be used for each field and subfield of a struct
4568 being passed by value, along with the offset of where the
4569 register's value may be found in the block. FP fields go in FP
4570 register, vector fields go in vector registers, and everything
4571 else goes in int registers, packed as in memory.
4573 This code is also used for function return values. RETVAL indicates
4574 whether this is the case.
4576 Much of this is taken from the Sparc V9 port, which has a similar
4577 calling convention. */
4580 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4581 int named, bool retval)
4583 rtx rvec[FIRST_PSEUDO_REGISTER];
4584 int k = 1, kbase = 1;
4585 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4586 /* This is a copy; modifications are not visible to our caller. */
4587 CUMULATIVE_ARGS copy_cum = *orig_cum;
4588 CUMULATIVE_ARGS *cum = ©_cum;
4590 /* Pad to 16 byte boundary if needed. */
4591 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4592 && (cum->words % 2) != 0)
4599 /* Put entries into rvec[] for individual FP and vector fields, and
4600 for the chunks of memory that go in int regs. Note we start at
4601 element 1; 0 is reserved for an indication of using memory, and
4602 may or may not be filled in below. */
4603 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4604 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4606 /* If any part of the struct went on the stack put all of it there.
4607 This hack is because the generic code for
4608 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4609 parts of the struct are not at the beginning. */
4613 return NULL_RTX; /* doesn't go in registers at all */
4615 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4617 if (k > 1 || cum->use_stack)
4618 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
4623 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4626 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4630 rtx rvec[GP_ARG_NUM_REG + 1];
4632 if (align_words >= GP_ARG_NUM_REG)
4635 n_units = rs6000_arg_size (mode, type);
4637 /* Optimize the simple case where the arg fits in one gpr, except in
4638 the case of BLKmode due to assign_parms assuming that registers are
4639 BITS_PER_WORD wide. */
4641 || (n_units == 1 && mode != BLKmode))
4642 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4645 if (align_words + n_units > GP_ARG_NUM_REG)
4646 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4647 using a magic NULL_RTX component.
4648 FIXME: This is not strictly correct. Only some of the arg
4649 belongs in memory, not all of it. However, there isn't any way
4650 to do this currently, apart from building rtx descriptions for
4651 the pieces of memory we want stored. Due to bugs in the generic
4652 code we can't use the normal function_arg_partial_nregs scheme
4653 with the PARALLEL arg description we emit here.
4654 In any case, the code to store the whole arg to memory is often
4655 more efficient than code to store pieces, and we know that space
4656 is available in the right place for the whole arg. */
4657 /* FIXME: This should be fixed since the conversion to
4658 TARGET_ARG_PARTIAL_BYTES. */
4659 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4664 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4665 rtx off = GEN_INT (i++ * 4);
4666 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4668 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4670 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4673 /* Determine where to put an argument to a function.
4674 Value is zero to push the argument on the stack,
4675 or a hard register in which to store the argument.
4677 MODE is the argument's machine mode.
4678 TYPE is the data type of the argument (as a tree).
4679 This is null for libcalls where that information may
4681 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4682 the preceding args and about the function being called. It is
4683 not modified in this routine.
4684 NAMED is nonzero if this argument is a named parameter
4685 (otherwise it is an extra parameter matching an ellipsis).
4687 On RS/6000 the first eight words of non-FP are normally in registers
4688 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4689 Under V.4, the first 8 FP args are in registers.
4691 If this is floating-point and no prototype is specified, we use
4692 both an FP and integer register (or possibly FP reg and stack). Library
4693 functions (when CALL_LIBCALL is set) always have the proper types for args,
4694 so we can pass the FP value just in one register. emit_library_function
4695 doesn't support PARALLEL anyway.
4697 Note that for args passed by reference, function_arg will be called
4698 with MODE and TYPE set to that of the pointer to the arg, not the arg
4702 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4703 tree type, int named)
4705 enum rs6000_abi abi = DEFAULT_ABI;
4707 /* Return a marker to indicate whether CR1 needs to set or clear the
4708 bit that V.4 uses to say fp args were passed in registers.
4709 Assume that we don't need the marker for software floating point,
4710 or compiler generated library calls. */
4711 if (mode == VOIDmode)
4714 && cum->nargs_prototype < 0
4715 && (cum->call_cookie & CALL_LIBCALL) == 0
4716 && (cum->prototype || TARGET_NO_PROTOTYPE))
4718 /* For the SPE, we need to crxor CR6 always. */
4720 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4721 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4722 return GEN_INT (cum->call_cookie
4723 | ((cum->fregno == FP_ARG_MIN_REG)
4724 ? CALL_V4_SET_FP_ARGS
4725 : CALL_V4_CLEAR_FP_ARGS));
4728 return GEN_INT (cum->call_cookie);
4731 if (rs6000_darwin64_abi && mode == BLKmode
4732 && TREE_CODE (type) == RECORD_TYPE)
4734 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
4735 if (rslt != NULL_RTX)
4737 /* Else fall through to usual handling. */
4740 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4741 if (TARGET_64BIT && ! cum->prototype)
4743 /* Vector parameters get passed in vector register
4744 and also in GPRs or memory, in absence of prototype. */
4747 align_words = (cum->words + 1) & ~1;
4749 if (align_words >= GP_ARG_NUM_REG)
4755 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4757 return gen_rtx_PARALLEL (mode,
4759 gen_rtx_EXPR_LIST (VOIDmode,
4761 gen_rtx_EXPR_LIST (VOIDmode,
4762 gen_rtx_REG (mode, cum->vregno),
4766 return gen_rtx_REG (mode, cum->vregno);
4767 else if (TARGET_ALTIVEC_ABI
4768 && (ALTIVEC_VECTOR_MODE (mode)
4769 || (type && TREE_CODE (type) == VECTOR_TYPE
4770 && int_size_in_bytes (type) == 16)))
4772 if (named || abi == ABI_V4)
4776 /* Vector parameters to varargs functions under AIX or Darwin
4777 get passed in memory and possibly also in GPRs. */
4778 int align, align_words, n_words;
4779 enum machine_mode part_mode;
4781 /* Vector parameters must be 16-byte aligned. This places them at
4782 2 mod 4 in terms of words in 32-bit mode, since the parameter
4783 save area starts at offset 24 from the stack. In 64-bit mode,
4784 they just have to start on an even word, since the parameter
4785 save area is 16-byte aligned. */
4787 align = (2 - cum->words) & 3;
4789 align = cum->words & 1;
4790 align_words = cum->words + align;
4792 /* Out of registers? Memory, then. */
4793 if (align_words >= GP_ARG_NUM_REG)
4796 if (TARGET_32BIT && TARGET_POWERPC64)
4797 return rs6000_mixed_function_arg (mode, type, align_words);
4799 /* The vector value goes in GPRs. Only the part of the
4800 value in GPRs is reported here. */
4802 n_words = rs6000_arg_size (mode, type);
4803 if (align_words + n_words > GP_ARG_NUM_REG)
4804 /* Fortunately, there are only two possibilities, the value
4805 is either wholly in GPRs or half in GPRs and half not. */
4808 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4811 else if (TARGET_SPE_ABI && TARGET_SPE
4812 && (SPE_VECTOR_MODE (mode)
4813 || (TARGET_E500_DOUBLE && (mode == DFmode
4814 || mode == DCmode))))
4815 return rs6000_spe_function_arg (cum, mode, type);
4817 else if (abi == ABI_V4)
4819 if (TARGET_HARD_FLOAT && TARGET_FPRS
4820 && (mode == SFmode || mode == DFmode))
4822 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4823 return gen_rtx_REG (mode, cum->fregno);
4829 int n_words = rs6000_arg_size (mode, type);
4830 int gregno = cum->sysv_gregno;
4832 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4833 (r7,r8) or (r9,r10). As does any other 2 word item such
4834 as complex int due to a historical mistake. */
4836 gregno += (1 - gregno) & 1;
4838 /* Multi-reg args are not split between registers and stack. */
4839 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4842 if (TARGET_32BIT && TARGET_POWERPC64)
4843 return rs6000_mixed_function_arg (mode, type,
4844 gregno - GP_ARG_MIN_REG);
4845 return gen_rtx_REG (mode, gregno);
4850 int align_words = rs6000_parm_start (mode, type, cum->words);
4852 if (USE_FP_FOR_ARG_P (cum, mode, type))
4854 rtx rvec[GP_ARG_NUM_REG + 1];
4858 enum machine_mode fmode = mode;
4859 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4861 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4863 /* Currently, we only ever need one reg here because complex
4864 doubles are split. */
4865 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
4867 /* Long double split over regs and memory. */
4871 /* Do we also need to pass this arg in the parameter save
4874 && (cum->nargs_prototype <= 0
4875 || (DEFAULT_ABI == ABI_AIX
4877 && align_words >= GP_ARG_NUM_REG)));
4879 if (!needs_psave && mode == fmode)
4880 return gen_rtx_REG (fmode, cum->fregno);
4885 /* Describe the part that goes in gprs or the stack.
4886 This piece must come first, before the fprs. */
4887 if (align_words < GP_ARG_NUM_REG)
4889 unsigned long n_words = rs6000_arg_size (mode, type);
4891 if (align_words + n_words > GP_ARG_NUM_REG
4892 || (TARGET_32BIT && TARGET_POWERPC64))
4894 /* If this is partially on the stack, then we only
4895 include the portion actually in registers here. */
4896 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
4899 if (align_words + n_words > GP_ARG_NUM_REG
4900 && (TARGET_32BIT && TARGET_POWERPC64))
4901 /* Not all of the arg fits in gprs. Say that it
4902 goes in memory too, using a magic NULL_RTX
4903 component. Also see comment in
4904 rs6000_mixed_function_arg for why the normal
4905 function_arg_partial_nregs scheme doesn't work
4907 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
4911 r = gen_rtx_REG (rmode,
4912 GP_ARG_MIN_REG + align_words);
4913 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
4914 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4916 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
4920 /* The whole arg fits in gprs. */
4921 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4922 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4926 /* It's entirely in memory. */
4927 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4930 /* Describe where this piece goes in the fprs. */
4931 r = gen_rtx_REG (fmode, cum->fregno);
4932 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4934 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4936 else if (align_words < GP_ARG_NUM_REG)
4938 if (TARGET_32BIT && TARGET_POWERPC64)
4939 return rs6000_mixed_function_arg (mode, type, align_words);
4941 if (mode == BLKmode)
4944 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4951 /* For an arg passed partly in registers and partly in memory, this is
4952 the number of bytes passed in registers. For args passed entirely in
4953 registers or entirely in memory, zero. When an arg is described by a
4954 PARALLEL, perhaps using more than one register type, this function
4955 returns the number of bytes used by the first element of the PARALLEL. */
4958 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4959 tree type, bool named)
4964 if (DEFAULT_ABI == ABI_V4)
4967 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4968 && cum->nargs_prototype >= 0)
4971 /* In this complicated case we just disable the partial_nregs code. */
4972 if (rs6000_darwin64_abi && mode == BLKmode
4973 && TREE_CODE (type) == RECORD_TYPE
4974 && int_size_in_bytes (type) > 0)
4977 align_words = rs6000_parm_start (mode, type, cum->words);
4979 if (USE_FP_FOR_ARG_P (cum, mode, type)
4980 /* If we are passing this arg in the fixed parameter save area
4981 (gprs or memory) as well as fprs, then this function should
4982 return the number of bytes passed in the parameter save area
4983 rather than bytes passed in fprs. */
4985 && (cum->nargs_prototype <= 0
4986 || (DEFAULT_ABI == ABI_AIX
4988 && align_words >= GP_ARG_NUM_REG))))
4990 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4991 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
4992 else if (cum->nargs_prototype >= 0)
4996 if (align_words < GP_ARG_NUM_REG
4997 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
4998 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5000 if (ret != 0 && TARGET_DEBUG_ARG)
5001 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5006 /* A C expression that indicates when an argument must be passed by
5007 reference. If nonzero for an argument, a copy of that argument is
5008 made in memory and a pointer to the argument is passed instead of
5009 the argument itself. The pointer is passed in whatever way is
5010 appropriate for passing a pointer to that type.
5012 Under V.4, aggregates and long double are passed by reference.
5014 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5015 reference unless the AltiVec vector extension ABI is in force.
5017 As an extension to all ABIs, variable sized types are passed by
5021 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5022 enum machine_mode mode, tree type,
5023 bool named ATTRIBUTE_UNUSED)
5025 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
5027 if (TARGET_DEBUG_ARG)
5028 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5035 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5037 if (TARGET_DEBUG_ARG)
5038 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5042 if (int_size_in_bytes (type) < 0)
5044 if (TARGET_DEBUG_ARG)
5045 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5049 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5050 modes only exist for GCC vector types if -maltivec. */
5051 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5053 if (TARGET_DEBUG_ARG)
5054 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5058 /* Pass synthetic vectors in memory. */
5059 if (TREE_CODE (type) == VECTOR_TYPE
5060 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5062 static bool warned_for_pass_big_vectors = false;
5063 if (TARGET_DEBUG_ARG)
5064 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5065 if (!warned_for_pass_big_vectors)
5067 warning (0, "GCC vector passed by reference: "
5068 "non-standard ABI extension with no compatibility guarantee");
5069 warned_for_pass_big_vectors = true;
5078 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5081 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5086 for (i = 0; i < nregs; i++)
5088 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5089 if (reload_completed)
5091 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5094 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5095 i * GET_MODE_SIZE (reg_mode));
5098 tem = replace_equiv_address (tem, XEXP (tem, 0));
5102 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5106 /* Perform any needed actions needed for a function that is receiving a
5107 variable number of arguments.
5111 MODE and TYPE are the mode and type of the current parameter.
5113 PRETEND_SIZE is a variable that should be set to the amount of stack
5114 that must be pushed by the prolog to pretend that our caller pushed
5117 Normally, this macro will push all remaining incoming registers on the
5118 stack and set PRETEND_SIZE to the length of the registers pushed. */
5121 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5122 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5125 CUMULATIVE_ARGS next_cum;
5126 int reg_size = TARGET_32BIT ? 4 : 8;
5127 rtx save_area = NULL_RTX, mem;
5128 int first_reg_offset, set;
5130 /* Skip the last named argument. */
5132 function_arg_advance (&next_cum, mode, type, 1, 0);
5134 if (DEFAULT_ABI == ABI_V4)
5137 save_area = plus_constant (virtual_stack_vars_rtx,
5138 - RS6000_VARARGS_SIZE);
5140 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5144 first_reg_offset = next_cum.words;
5145 save_area = virtual_incoming_args_rtx;
5147 if (targetm.calls.must_pass_in_stack (mode, type))
5148 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5151 set = get_varargs_alias_set ();
5152 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5153 && cfun->va_list_gpr_size)
5155 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5157 if (va_list_gpr_counter_field)
5159 /* V4 va_list_gpr_size counts number of registers needed. */
5160 if (nregs > cfun->va_list_gpr_size)
5161 nregs = cfun->va_list_gpr_size;
5165 /* char * va_list instead counts number of bytes needed. */
5166 if (nregs > cfun->va_list_gpr_size / reg_size)
5167 nregs = cfun->va_list_gpr_size / reg_size;
5170 mem = gen_rtx_MEM (BLKmode,
5171 plus_constant (save_area,
5172 first_reg_offset * reg_size)),
5173 set_mem_alias_set (mem, set);
5174 set_mem_align (mem, BITS_PER_WORD);
5176 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5180 /* Save FP registers if needed. */
5181 if (DEFAULT_ABI == ABI_V4
5182 && TARGET_HARD_FLOAT && TARGET_FPRS
5184 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5185 && cfun->va_list_fpr_size)
5187 int fregno = next_cum.fregno, nregs;
5188 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5189 rtx lab = gen_label_rtx ();
5190 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5193 (gen_rtx_SET (VOIDmode,
5195 gen_rtx_IF_THEN_ELSE (VOIDmode,
5196 gen_rtx_NE (VOIDmode, cr1,
5198 gen_rtx_LABEL_REF (VOIDmode, lab),
5202 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5203 fregno++, off += 8, nregs++)
5205 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5206 set_mem_alias_set (mem, set);
5207 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5208 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5215 /* Create the va_list data type. */
5218 rs6000_build_builtin_va_list (void)
5220 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5222 /* For AIX, prefer 'char *' because that's what the system
5223 header files like. */
5224 if (DEFAULT_ABI != ABI_V4)
5225 return build_pointer_type (char_type_node);
5227 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5228 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5230 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5231 unsigned_char_type_node);
5232 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5233 unsigned_char_type_node);
5234 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5236 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5237 short_unsigned_type_node);
5238 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5240 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5243 va_list_gpr_counter_field = f_gpr;
5244 va_list_fpr_counter_field = f_fpr;
5246 DECL_FIELD_CONTEXT (f_gpr) = record;
5247 DECL_FIELD_CONTEXT (f_fpr) = record;
5248 DECL_FIELD_CONTEXT (f_res) = record;
5249 DECL_FIELD_CONTEXT (f_ovf) = record;
5250 DECL_FIELD_CONTEXT (f_sav) = record;
5252 TREE_CHAIN (record) = type_decl;
5253 TYPE_NAME (record) = type_decl;
5254 TYPE_FIELDS (record) = f_gpr;
5255 TREE_CHAIN (f_gpr) = f_fpr;
5256 TREE_CHAIN (f_fpr) = f_res;
5257 TREE_CHAIN (f_res) = f_ovf;
5258 TREE_CHAIN (f_ovf) = f_sav;
5260 layout_type (record);
5262 /* The correct type is an array type of one element. */
5263 return build_array_type (record, build_index_type (size_zero_node));
5266 /* Implement va_start. */
5269 rs6000_va_start (tree valist, rtx nextarg)
5271 HOST_WIDE_INT words, n_gpr, n_fpr;
5272 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5273 tree gpr, fpr, ovf, sav, t;
5275 /* Only SVR4 needs something special. */
5276 if (DEFAULT_ABI != ABI_V4)
5278 std_expand_builtin_va_start (valist, nextarg);
5282 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5283 f_fpr = TREE_CHAIN (f_gpr);
5284 f_res = TREE_CHAIN (f_fpr);
5285 f_ovf = TREE_CHAIN (f_res);
5286 f_sav = TREE_CHAIN (f_ovf);
5288 valist = build_va_arg_indirect_ref (valist);
5289 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5290 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5291 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5292 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5294 /* Count number of gp and fp argument registers used. */
5295 words = current_function_args_info.words;
5296 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5298 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5301 if (TARGET_DEBUG_ARG)
5302 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5303 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5304 words, n_gpr, n_fpr);
5306 if (cfun->va_list_gpr_size)
5308 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5309 build_int_cst (NULL_TREE, n_gpr));
5310 TREE_SIDE_EFFECTS (t) = 1;
5311 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5314 if (cfun->va_list_fpr_size)
5316 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5317 build_int_cst (NULL_TREE, n_fpr));
5318 TREE_SIDE_EFFECTS (t) = 1;
5319 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5322 /* Find the overflow area. */
5323 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5325 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5326 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5327 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5328 TREE_SIDE_EFFECTS (t) = 1;
5329 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5331 /* If there were no va_arg invocations, don't set up the register
5333 if (!cfun->va_list_gpr_size
5334 && !cfun->va_list_fpr_size
5335 && n_gpr < GP_ARG_NUM_REG
5336 && n_fpr < FP_ARG_V4_MAX_REG)
5339 /* Find the register save area. */
5340 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5341 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5342 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE));
5343 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5344 TREE_SIDE_EFFECTS (t) = 1;
5345 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5348 /* Implement va_arg. */
5351 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5353 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5354 tree gpr, fpr, ovf, sav, reg, t, u;
5355 int size, rsize, n_reg, sav_ofs, sav_scale;
5356 tree lab_false, lab_over, addr;
5358 tree ptrtype = build_pointer_type (type);
5360 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5362 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5363 return build_va_arg_indirect_ref (t);
5366 if (DEFAULT_ABI != ABI_V4)
5368 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5370 tree elem_type = TREE_TYPE (type);
5371 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5372 int elem_size = GET_MODE_SIZE (elem_mode);
5374 if (elem_size < UNITS_PER_WORD)
5376 tree real_part, imag_part;
5377 tree post = NULL_TREE;
5379 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5381 /* Copy the value into a temporary, lest the formal temporary
5382 be reused out from under us. */
5383 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5384 append_to_statement_list (post, pre_p);
5386 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5389 return build (COMPLEX_EXPR, type, real_part, imag_part);
5393 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5396 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5397 f_fpr = TREE_CHAIN (f_gpr);
5398 f_res = TREE_CHAIN (f_fpr);
5399 f_ovf = TREE_CHAIN (f_res);
5400 f_sav = TREE_CHAIN (f_ovf);
5402 valist = build_va_arg_indirect_ref (valist);
5403 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5404 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5405 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5406 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5408 size = int_size_in_bytes (type);
5409 rsize = (size + 3) / 4;
5412 if (TARGET_HARD_FLOAT && TARGET_FPRS
5413 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5415 /* FP args go in FP registers, if present. */
5420 if (TYPE_MODE (type) == DFmode)
5425 /* Otherwise into GP registers. */
5434 /* Pull the value out of the saved registers.... */
5437 addr = create_tmp_var (ptr_type_node, "addr");
5438 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5440 /* AltiVec vectors never go in registers when -mabi=altivec. */
5441 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5445 lab_false = create_artificial_label ();
5446 lab_over = create_artificial_label ();
5448 /* Long long and SPE vectors are aligned in the registers.
5449 As are any other 2 gpr item such as complex int due to a
5450 historical mistake. */
5454 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5455 size_int (n_reg - 1));
5456 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5459 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5460 t = build2 (GE_EXPR, boolean_type_node, u, t);
5461 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5462 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5463 gimplify_and_add (t, pre_p);
5467 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5469 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5470 u = build1 (CONVERT_EXPR, integer_type_node, u);
5471 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5472 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5474 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5475 gimplify_and_add (t, pre_p);
5477 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5478 gimplify_and_add (t, pre_p);
5480 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5481 append_to_statement_list (t, pre_p);
5485 /* Ensure that we don't find any more args in regs.
5486 Alignment has taken care of the n_reg == 2 case. */
5487 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5488 gimplify_and_add (t, pre_p);
5492 /* ... otherwise out of the overflow area. */
5494 /* Care for on-stack alignment if needed. */
5498 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5499 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5500 build_int_cst (NULL_TREE, -align));
5502 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5504 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5505 gimplify_and_add (u, pre_p);
5507 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5508 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5509 gimplify_and_add (t, pre_p);
5513 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5514 append_to_statement_list (t, pre_p);
5517 addr = fold_convert (ptrtype, addr);
5518 return build_va_arg_indirect_ref (addr);
5524 def_builtin (int mask, const char *name, tree type, int code)
5526 if (mask & target_flags)
5528 if (rs6000_builtin_decls[code])
5531 rs6000_builtin_decls[code] =
5532 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5537 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5539 static const struct builtin_description bdesc_3arg[] =
5541 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5542 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5543 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5544 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5545 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5546 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5547 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5548 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5549 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5550 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5551 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5552 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5553 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5554 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5555 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5556 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5557 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5558 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5559 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5560 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5561 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5562 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5563 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5565 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5566 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5567 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5568 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5569 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5570 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5571 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5572 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5573 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5574 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5575 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5576 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5577 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5578 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5579 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
5582 /* DST operations: void foo (void *, const int, const char). */
5584 static const struct builtin_description bdesc_dst[] =
5586 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5587 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5588 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5589 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5591 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5592 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5593 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5594 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
5597 /* Simple binary operations: VECc = foo (VECa, VECb). */
5599 static struct builtin_description bdesc_2arg[] =
5601 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5602 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5603 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5604 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5605 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5606 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5607 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5608 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5609 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5610 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5611 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5612 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5613 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5614 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5615 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5616 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5617 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5618 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5619 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5620 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5621 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5622 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5623 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5624 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5625 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5626 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5627 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5628 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5629 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5630 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5631 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5632 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5633 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5634 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5635 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5636 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5637 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5638 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5639 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5640 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5641 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5642 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5643 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5644 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5645 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5646 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5647 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5648 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5649 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5650 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5651 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5652 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5653 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5654 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5655 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5656 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5657 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5658 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5659 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5660 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5661 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5662 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5663 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5664 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5665 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5666 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5667 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5668 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5673 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5675 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5685 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5686 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5687 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5688 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5689 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5690 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5691 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5692 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5693 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5694 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5695 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5696 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5697 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5698 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5699 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5700 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5701 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5702 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5703 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5704 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5705 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5706 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5707 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5708 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5709 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5710 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5711 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5712 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5713 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5715 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
5716 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
5717 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
5718 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
5719 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
5720 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
5721 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
5722 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
5723 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
5724 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
5725 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
5726 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
5727 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
5728 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
5729 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
5730 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
5731 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
5732 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
5733 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
5734 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
5735 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
5736 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
5737 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
5738 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
5739 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
5740 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
5741 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
5742 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
5743 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
5744 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
5745 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
5746 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
5747 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
5748 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
5749 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
5750 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
5751 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
5752 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
5753 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
5754 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
5755 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
5756 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
5757 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
5758 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
5759 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
5760 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
5761 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
5762 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
5763 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
5764 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
5765 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
5766 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
5767 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
5768 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
5769 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
5770 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
5771 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
5772 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
5773 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
5774 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
5775 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
5776 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
5777 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
5778 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
5779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
5780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
5781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
5782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
5783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
5784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
5785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
5786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
5787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
5788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
5789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
5790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
5791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
5792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
5793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
5794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
5795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
5796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
5797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
5798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
5799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
5800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
5801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
5802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
5803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
5804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
5805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
5806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
5807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
5808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
5809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
5810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
5811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
5812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
5813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
5814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
5815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
5816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
5817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
5818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
5819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
5820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
5821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
5822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
5823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
5824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
5825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
5826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
5827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
5828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
5829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
5830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
5831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
5832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
5833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
5834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
5835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
5836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
5837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
5838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
5839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
5840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
5841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
5843 /* Place holder, leave as first spe builtin. */
5844 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5845 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5846 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5847 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5848 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5849 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5850 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5851 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5852 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5853 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5854 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5855 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5856 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5857 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5858 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5859 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5860 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5861 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5862 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5863 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5864 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5865 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5866 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5867 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5868 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5869 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5870 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5871 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5872 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5873 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5874 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5875 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5876 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5877 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5878 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5879 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5880 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5881 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5882 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5883 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5884 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5885 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5886 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5887 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5888 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5889 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5890 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5891 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5892 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5893 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5894 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5895 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5896 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5897 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5898 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5899 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5900 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5901 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5902 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5903 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5904 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5905 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5906 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5907 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5908 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5909 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5910 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5911 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5912 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5913 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5914 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5915 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5916 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5917 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5918 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5919 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5920 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5921 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5922 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5923 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5924 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5925 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5926 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5927 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5928 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5929 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5930 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5931 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5932 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5933 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5934 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5935 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5936 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5937 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5938 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5939 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5940 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5941 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5942 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5943 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5944 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5945 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5946 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5947 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5948 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5949 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5950 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5951 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5952 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5954 /* SPE binary operations expecting a 5-bit unsigned literal. */
5955 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5957 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5958 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5959 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5960 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5961 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5962 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5963 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5964 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5965 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5966 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5967 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5968 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5969 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5970 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5971 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5972 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5973 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5974 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5975 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5976 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5977 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5978 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5979 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5980 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5981 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5982 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5984 /* Place-holder. Leave as last binary SPE builtin. */
5985 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
5988 /* AltiVec predicates. */
5990 struct builtin_description_predicates
5992 const unsigned int mask;
5993 const enum insn_code icode;
5995 const char *const name;
5996 const enum rs6000_builtins code;
5999 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6001 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6002 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6003 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6004 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6005 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6007 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6008 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6009 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6010 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6011 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6012 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6013 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6015 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6016 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6017 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6020 /* SPE predicates. */
6021 static struct builtin_description bdesc_spe_predicates[] =
6023 /* Place-holder. Leave as first. */
6024 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6025 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6026 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6027 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6028 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6029 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6030 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6031 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6032 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6033 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6034 /* Place-holder. Leave as last. */
6035 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6038 /* SPE evsel predicates. */
6039 static struct builtin_description bdesc_spe_evsel[] =
6041 /* Place-holder. Leave as first. */
6042 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6043 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6044 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6045 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6046 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6047 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6048 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6049 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6050 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6051 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6052 /* Place-holder. Leave as last. */
6053 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6056 /* ABS* operations. */
6058 static const struct builtin_description bdesc_abs[] =
6060 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6061 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6062 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6063 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6064 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6065 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6066 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6069 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6072 static struct builtin_description bdesc_1arg[] =
6074 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6075 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6076 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6077 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6078 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6079 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6080 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6081 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6082 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6083 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6084 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6085 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6086 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6087 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6088 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6089 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6090 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6112 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6113 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6114 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6115 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6116 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6117 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6118 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6119 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6120 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6121 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6122 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6123 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6124 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6125 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6126 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6127 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6128 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6129 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6130 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6131 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6132 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6133 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6134 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6135 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6136 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6137 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6138 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6139 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6140 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6141 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6143 /* Place-holder. Leave as last unary SPE builtin. */
6144 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6148 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6151 tree arg0 = TREE_VALUE (arglist);
6152 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6153 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6154 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6156 if (icode == CODE_FOR_nothing)
6157 /* Builtin not supported on this processor. */
6160 /* If we got invalid arguments bail out before generating bad rtl. */
6161 if (arg0 == error_mark_node)
6164 if (icode == CODE_FOR_altivec_vspltisb
6165 || icode == CODE_FOR_altivec_vspltish
6166 || icode == CODE_FOR_altivec_vspltisw
6167 || icode == CODE_FOR_spe_evsplatfi
6168 || icode == CODE_FOR_spe_evsplati)
6170 /* Only allow 5-bit *signed* literals. */
6171 if (GET_CODE (op0) != CONST_INT
6172 || INTVAL (op0) > 0x1f
6173 || INTVAL (op0) < -0x1f)
6175 error ("argument 1 must be a 5-bit signed literal");
6181 || GET_MODE (target) != tmode
6182 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6183 target = gen_reg_rtx (tmode);
6185 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6186 op0 = copy_to_mode_reg (mode0, op0);
6188 pat = GEN_FCN (icode) (target, op0);
6197 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6199 rtx pat, scratch1, scratch2;
6200 tree arg0 = TREE_VALUE (arglist);
6201 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6202 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6203 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6205 /* If we have invalid arguments, bail out before generating bad rtl. */
6206 if (arg0 == error_mark_node)
6210 || GET_MODE (target) != tmode
6211 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6212 target = gen_reg_rtx (tmode);
6214 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6215 op0 = copy_to_mode_reg (mode0, op0);
6217 scratch1 = gen_reg_rtx (mode0);
6218 scratch2 = gen_reg_rtx (mode0);
6220 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6229 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6232 tree arg0 = TREE_VALUE (arglist);
6233 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6234 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6235 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6236 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6237 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6238 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6240 if (icode == CODE_FOR_nothing)
6241 /* Builtin not supported on this processor. */
6244 /* If we got invalid arguments bail out before generating bad rtl. */
6245 if (arg0 == error_mark_node || arg1 == error_mark_node)
6248 if (icode == CODE_FOR_altivec_vcfux
6249 || icode == CODE_FOR_altivec_vcfsx
6250 || icode == CODE_FOR_altivec_vctsxs
6251 || icode == CODE_FOR_altivec_vctuxs
6252 || icode == CODE_FOR_altivec_vspltb
6253 || icode == CODE_FOR_altivec_vsplth
6254 || icode == CODE_FOR_altivec_vspltw
6255 || icode == CODE_FOR_spe_evaddiw
6256 || icode == CODE_FOR_spe_evldd
6257 || icode == CODE_FOR_spe_evldh
6258 || icode == CODE_FOR_spe_evldw
6259 || icode == CODE_FOR_spe_evlhhesplat
6260 || icode == CODE_FOR_spe_evlhhossplat
6261 || icode == CODE_FOR_spe_evlhhousplat
6262 || icode == CODE_FOR_spe_evlwhe
6263 || icode == CODE_FOR_spe_evlwhos
6264 || icode == CODE_FOR_spe_evlwhou
6265 || icode == CODE_FOR_spe_evlwhsplat
6266 || icode == CODE_FOR_spe_evlwwsplat
6267 || icode == CODE_FOR_spe_evrlwi
6268 || icode == CODE_FOR_spe_evslwi
6269 || icode == CODE_FOR_spe_evsrwis
6270 || icode == CODE_FOR_spe_evsubifw
6271 || icode == CODE_FOR_spe_evsrwiu)
6273 /* Only allow 5-bit unsigned literals. */
6275 if (TREE_CODE (arg1) != INTEGER_CST
6276 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6278 error ("argument 2 must be a 5-bit unsigned literal");
6284 || GET_MODE (target) != tmode
6285 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6286 target = gen_reg_rtx (tmode);
6288 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6289 op0 = copy_to_mode_reg (mode0, op0);
6290 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6291 op1 = copy_to_mode_reg (mode1, op1);
6293 pat = GEN_FCN (icode) (target, op0, op1);
6302 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6303 tree arglist, rtx target)
6306 tree cr6_form = TREE_VALUE (arglist);
6307 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6308 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6309 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6310 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6311 enum machine_mode tmode = SImode;
6312 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6313 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6316 if (TREE_CODE (cr6_form) != INTEGER_CST)
6318 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6322 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6324 gcc_assert (mode0 == mode1);
6326 /* If we have invalid arguments, bail out before generating bad rtl. */
6327 if (arg0 == error_mark_node || arg1 == error_mark_node)
6331 || GET_MODE (target) != tmode
6332 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6333 target = gen_reg_rtx (tmode);
6335 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6336 op0 = copy_to_mode_reg (mode0, op0);
6337 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6338 op1 = copy_to_mode_reg (mode1, op1);
6340 scratch = gen_reg_rtx (mode0);
6342 pat = GEN_FCN (icode) (scratch, op0, op1,
6343 gen_rtx_SYMBOL_REF (Pmode, opcode));
6348 /* The vec_any* and vec_all* predicates use the same opcodes for two
6349 different operations, but the bits in CR6 will be different
6350 depending on what information we want. So we have to play tricks
6351 with CR6 to get the right bits out.
6353 If you think this is disgusting, look at the specs for the
6354 AltiVec predicates. */
6356 switch (cr6_form_int)
6359 emit_insn (gen_cr6_test_for_zero (target));
6362 emit_insn (gen_cr6_test_for_zero_reverse (target));
6365 emit_insn (gen_cr6_test_for_lt (target));
6368 emit_insn (gen_cr6_test_for_lt_reverse (target));
6371 error ("argument 1 of __builtin_altivec_predicate is out of range");
6379 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6382 tree arg0 = TREE_VALUE (arglist);
6383 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6384 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6385 enum machine_mode mode0 = Pmode;
6386 enum machine_mode mode1 = Pmode;
6387 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6388 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6390 if (icode == CODE_FOR_nothing)
6391 /* Builtin not supported on this processor. */
6394 /* If we got invalid arguments bail out before generating bad rtl. */
6395 if (arg0 == error_mark_node || arg1 == error_mark_node)
6399 || GET_MODE (target) != tmode
6400 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6401 target = gen_reg_rtx (tmode);
6403 op1 = copy_to_mode_reg (mode1, op1);
6405 if (op0 == const0_rtx)
6407 addr = gen_rtx_MEM (tmode, op1);
6411 op0 = copy_to_mode_reg (mode0, op0);
6412 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6415 pat = GEN_FCN (icode) (target, addr);
6425 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6427 tree arg0 = TREE_VALUE (arglist);
6428 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6429 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6430 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6431 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6432 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6434 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6435 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6436 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6438 /* Invalid arguments. Bail before doing anything stoopid! */
6439 if (arg0 == error_mark_node
6440 || arg1 == error_mark_node
6441 || arg2 == error_mark_node)
6444 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6445 op0 = copy_to_mode_reg (mode2, op0);
6446 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6447 op1 = copy_to_mode_reg (mode0, op1);
6448 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6449 op2 = copy_to_mode_reg (mode1, op2);
6451 pat = GEN_FCN (icode) (op1, op2, op0);
6458 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6460 tree arg0 = TREE_VALUE (arglist);
6461 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6462 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6463 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6464 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6465 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6467 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6468 enum machine_mode mode1 = Pmode;
6469 enum machine_mode mode2 = Pmode;
6471 /* Invalid arguments. Bail before doing anything stoopid! */
6472 if (arg0 == error_mark_node
6473 || arg1 == error_mark_node
6474 || arg2 == error_mark_node)
6477 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6478 op0 = copy_to_mode_reg (tmode, op0);
6480 op2 = copy_to_mode_reg (mode2, op2);
6482 if (op1 == const0_rtx)
6484 addr = gen_rtx_MEM (tmode, op2);
6488 op1 = copy_to_mode_reg (mode1, op1);
6489 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6492 pat = GEN_FCN (icode) (addr, op0);
6499 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6502 tree arg0 = TREE_VALUE (arglist);
6503 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6504 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6505 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6506 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6507 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6508 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6509 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6510 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6511 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6513 if (icode == CODE_FOR_nothing)
6514 /* Builtin not supported on this processor. */
6517 /* If we got invalid arguments bail out before generating bad rtl. */
6518 if (arg0 == error_mark_node
6519 || arg1 == error_mark_node
6520 || arg2 == error_mark_node)
6523 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6524 || icode == CODE_FOR_altivec_vsldoi_v4si
6525 || icode == CODE_FOR_altivec_vsldoi_v8hi
6526 || icode == CODE_FOR_altivec_vsldoi_v16qi)
6528 /* Only allow 4-bit unsigned literals. */
6530 if (TREE_CODE (arg2) != INTEGER_CST
6531 || TREE_INT_CST_LOW (arg2) & ~0xf)
6533 error ("argument 3 must be a 4-bit unsigned literal");
6539 || GET_MODE (target) != tmode
6540 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6541 target = gen_reg_rtx (tmode);
6543 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6544 op0 = copy_to_mode_reg (mode0, op0);
6545 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6546 op1 = copy_to_mode_reg (mode1, op1);
6547 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6548 op2 = copy_to_mode_reg (mode2, op2);
6550 pat = GEN_FCN (icode) (target, op0, op1, op2);
6558 /* Expand the lvx builtins. */
6560 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6562 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6563 tree arglist = TREE_OPERAND (exp, 1);
6564 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6566 enum machine_mode tmode, mode0;
6568 enum insn_code icode;
6572 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6573 icode = CODE_FOR_altivec_lvx_v16qi;
6575 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6576 icode = CODE_FOR_altivec_lvx_v8hi;
6578 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6579 icode = CODE_FOR_altivec_lvx_v4si;
6581 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6582 icode = CODE_FOR_altivec_lvx_v4sf;
6591 arg0 = TREE_VALUE (arglist);
6592 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6593 tmode = insn_data[icode].operand[0].mode;
6594 mode0 = insn_data[icode].operand[1].mode;
6597 || GET_MODE (target) != tmode
6598 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6599 target = gen_reg_rtx (tmode);
6601 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6602 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6604 pat = GEN_FCN (icode) (target, op0);
6611 /* Expand the stvx builtins. */
6613 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6616 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6617 tree arglist = TREE_OPERAND (exp, 1);
6618 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6620 enum machine_mode mode0, mode1;
6622 enum insn_code icode;
6626 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6627 icode = CODE_FOR_altivec_stvx_v16qi;
6629 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6630 icode = CODE_FOR_altivec_stvx_v8hi;
6632 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6633 icode = CODE_FOR_altivec_stvx_v4si;
6635 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6636 icode = CODE_FOR_altivec_stvx_v4sf;
6643 arg0 = TREE_VALUE (arglist);
6644 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6645 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6646 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6647 mode0 = insn_data[icode].operand[0].mode;
6648 mode1 = insn_data[icode].operand[1].mode;
6650 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6651 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6652 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6653 op1 = copy_to_mode_reg (mode1, op1);
6655 pat = GEN_FCN (icode) (op0, op1);
6663 /* Expand the dst builtins. */
6665 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6668 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6669 tree arglist = TREE_OPERAND (exp, 1);
6670 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6671 tree arg0, arg1, arg2;
6672 enum machine_mode mode0, mode1, mode2;
6673 rtx pat, op0, op1, op2;
6674 struct builtin_description *d;
6679 /* Handle DST variants. */
6680 d = (struct builtin_description *) bdesc_dst;
6681 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6682 if (d->code == fcode)
6684 arg0 = TREE_VALUE (arglist);
6685 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6686 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6687 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6688 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6689 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6690 mode0 = insn_data[d->icode].operand[0].mode;
6691 mode1 = insn_data[d->icode].operand[1].mode;
6692 mode2 = insn_data[d->icode].operand[2].mode;
6694 /* Invalid arguments, bail out before generating bad rtl. */
6695 if (arg0 == error_mark_node
6696 || arg1 == error_mark_node
6697 || arg2 == error_mark_node)
6702 if (TREE_CODE (arg2) != INTEGER_CST
6703 || TREE_INT_CST_LOW (arg2) & ~0x3)
6705 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
6709 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6710 op0 = copy_to_mode_reg (Pmode, op0);
6711 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6712 op1 = copy_to_mode_reg (mode1, op1);
6714 pat = GEN_FCN (d->icode) (op0, op1, op2);
6724 /* Expand the builtin in EXP and store the result in TARGET. Store
6725 true in *EXPANDEDP if we found a builtin to expand. */
6727 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6729 struct builtin_description *d;
6730 struct builtin_description_predicates *dp;
6732 enum insn_code icode;
6733 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6734 tree arglist = TREE_OPERAND (exp, 1);
6737 enum machine_mode tmode, mode0;
6738 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6740 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
6741 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
6744 error ("unresolved overload for Altivec builtin %qE", fndecl);
6748 target = altivec_expand_ld_builtin (exp, target, expandedp);
6752 target = altivec_expand_st_builtin (exp, target, expandedp);
6756 target = altivec_expand_dst_builtin (exp, target, expandedp);
6764 case ALTIVEC_BUILTIN_STVX:
6765 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6766 case ALTIVEC_BUILTIN_STVEBX:
6767 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6768 case ALTIVEC_BUILTIN_STVEHX:
6769 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6770 case ALTIVEC_BUILTIN_STVEWX:
6771 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6772 case ALTIVEC_BUILTIN_STVXL:
6773 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6775 case ALTIVEC_BUILTIN_MFVSCR:
6776 icode = CODE_FOR_altivec_mfvscr;
6777 tmode = insn_data[icode].operand[0].mode;
6780 || GET_MODE (target) != tmode
6781 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6782 target = gen_reg_rtx (tmode);
6784 pat = GEN_FCN (icode) (target);
6790 case ALTIVEC_BUILTIN_MTVSCR:
6791 icode = CODE_FOR_altivec_mtvscr;
6792 arg0 = TREE_VALUE (arglist);
6793 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6794 mode0 = insn_data[icode].operand[0].mode;
6796 /* If we got invalid arguments bail out before generating bad rtl. */
6797 if (arg0 == error_mark_node)
6800 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6801 op0 = copy_to_mode_reg (mode0, op0);
6803 pat = GEN_FCN (icode) (op0);
6808 case ALTIVEC_BUILTIN_DSSALL:
6809 emit_insn (gen_altivec_dssall ());
6812 case ALTIVEC_BUILTIN_DSS:
6813 icode = CODE_FOR_altivec_dss;
6814 arg0 = TREE_VALUE (arglist);
6816 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6817 mode0 = insn_data[icode].operand[0].mode;
6819 /* If we got invalid arguments bail out before generating bad rtl. */
6820 if (arg0 == error_mark_node)
6823 if (TREE_CODE (arg0) != INTEGER_CST
6824 || TREE_INT_CST_LOW (arg0) & ~0x3)
6826 error ("argument to dss must be a 2-bit unsigned literal");
6830 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6831 op0 = copy_to_mode_reg (mode0, op0);
6833 emit_insn (gen_altivec_dss (op0));
6837 /* Expand abs* operations. */
6838 d = (struct builtin_description *) bdesc_abs;
6839 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6840 if (d->code == fcode)
6841 return altivec_expand_abs_builtin (d->icode, arglist, target);
6843 /* Expand the AltiVec predicates. */
6844 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6845 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6846 if (dp->code == fcode)
6847 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
6850 /* LV* are funky. We initialized them differently. */
6853 case ALTIVEC_BUILTIN_LVSL:
6854 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6856 case ALTIVEC_BUILTIN_LVSR:
6857 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6859 case ALTIVEC_BUILTIN_LVEBX:
6860 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6862 case ALTIVEC_BUILTIN_LVEHX:
6863 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6865 case ALTIVEC_BUILTIN_LVEWX:
6866 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6868 case ALTIVEC_BUILTIN_LVXL:
6869 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6871 case ALTIVEC_BUILTIN_LVX:
6872 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6883 /* Binops that need to be initialized manually, but can be expanded
6884 automagically by rs6000_expand_binop_builtin. */
6885 static struct builtin_description bdesc_2arg_spe[] =
6887 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6888 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6889 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6890 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6891 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6892 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6893 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6894 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6895 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6896 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6897 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6898 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6899 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6900 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6901 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6902 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6903 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6904 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6905 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6906 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6907 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6908 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6911 /* Expand the builtin in EXP and store the result in TARGET. Store
6912 true in *EXPANDEDP if we found a builtin to expand.
6914 This expands the SPE builtins that are not simple unary and binary
6917 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6919 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6920 tree arglist = TREE_OPERAND (exp, 1);
6922 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6923 enum insn_code icode;
6924 enum machine_mode tmode, mode0;
6926 struct builtin_description *d;
6931 /* Syntax check for a 5-bit unsigned immediate. */
6934 case SPE_BUILTIN_EVSTDD:
6935 case SPE_BUILTIN_EVSTDH:
6936 case SPE_BUILTIN_EVSTDW:
6937 case SPE_BUILTIN_EVSTWHE:
6938 case SPE_BUILTIN_EVSTWHO:
6939 case SPE_BUILTIN_EVSTWWE:
6940 case SPE_BUILTIN_EVSTWWO:
6941 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6942 if (TREE_CODE (arg1) != INTEGER_CST
6943 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6945 error ("argument 2 must be a 5-bit unsigned literal");
6953 /* The evsplat*i instructions are not quite generic. */
6956 case SPE_BUILTIN_EVSPLATFI:
6957 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6959 case SPE_BUILTIN_EVSPLATI:
6960 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6966 d = (struct builtin_description *) bdesc_2arg_spe;
6967 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6968 if (d->code == fcode)
6969 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6971 d = (struct builtin_description *) bdesc_spe_predicates;
6972 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6973 if (d->code == fcode)
6974 return spe_expand_predicate_builtin (d->icode, arglist, target);
6976 d = (struct builtin_description *) bdesc_spe_evsel;
6977 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6978 if (d->code == fcode)
6979 return spe_expand_evsel_builtin (d->icode, arglist, target);
6983 case SPE_BUILTIN_EVSTDDX:
6984 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6985 case SPE_BUILTIN_EVSTDHX:
6986 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6987 case SPE_BUILTIN_EVSTDWX:
6988 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6989 case SPE_BUILTIN_EVSTWHEX:
6990 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6991 case SPE_BUILTIN_EVSTWHOX:
6992 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6993 case SPE_BUILTIN_EVSTWWEX:
6994 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6995 case SPE_BUILTIN_EVSTWWOX:
6996 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6997 case SPE_BUILTIN_EVSTDD:
6998 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6999 case SPE_BUILTIN_EVSTDH:
7000 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7001 case SPE_BUILTIN_EVSTDW:
7002 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7003 case SPE_BUILTIN_EVSTWHE:
7004 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7005 case SPE_BUILTIN_EVSTWHO:
7006 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7007 case SPE_BUILTIN_EVSTWWE:
7008 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7009 case SPE_BUILTIN_EVSTWWO:
7010 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7011 case SPE_BUILTIN_MFSPEFSCR:
7012 icode = CODE_FOR_spe_mfspefscr;
7013 tmode = insn_data[icode].operand[0].mode;
7016 || GET_MODE (target) != tmode
7017 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7018 target = gen_reg_rtx (tmode);
7020 pat = GEN_FCN (icode) (target);
7025 case SPE_BUILTIN_MTSPEFSCR:
7026 icode = CODE_FOR_spe_mtspefscr;
7027 arg0 = TREE_VALUE (arglist);
7028 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7029 mode0 = insn_data[icode].operand[0].mode;
7031 if (arg0 == error_mark_node)
7034 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7035 op0 = copy_to_mode_reg (mode0, op0);
7037 pat = GEN_FCN (icode) (op0);
7050 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7052 rtx pat, scratch, tmp;
7053 tree form = TREE_VALUE (arglist);
7054 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7055 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7056 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7057 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7058 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7059 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7063 if (TREE_CODE (form) != INTEGER_CST)
7065 error ("argument 1 of __builtin_spe_predicate must be a constant");
7069 form_int = TREE_INT_CST_LOW (form);
7071 gcc_assert (mode0 == mode1);
7073 if (arg0 == error_mark_node || arg1 == error_mark_node)
7077 || GET_MODE (target) != SImode
7078 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7079 target = gen_reg_rtx (SImode);
7081 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7082 op0 = copy_to_mode_reg (mode0, op0);
7083 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7084 op1 = copy_to_mode_reg (mode1, op1);
7086 scratch = gen_reg_rtx (CCmode);
7088 pat = GEN_FCN (icode) (scratch, op0, op1);
7093 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7094 _lower_. We use one compare, but look in different bits of the
7095 CR for each variant.
7097 There are 2 elements in each SPE simd type (upper/lower). The CR
7098 bits are set as follows:
7100 BIT0 | BIT 1 | BIT 2 | BIT 3
7101 U | L | (U | L) | (U & L)
7103 So, for an "all" relationship, BIT 3 would be set.
7104 For an "any" relationship, BIT 2 would be set. Etc.
7106 Following traditional nomenclature, these bits map to:
7108 BIT0 | BIT 1 | BIT 2 | BIT 3
7111 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7116 /* All variant. OV bit. */
7118 /* We need to get to the OV bit, which is the ORDERED bit. We
7119 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7120 that's ugly and will make validate_condition_mode die.
7121 So let's just use another pattern. */
7122 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7124 /* Any variant. EQ bit. */
7128 /* Upper variant. LT bit. */
7132 /* Lower variant. GT bit. */
7137 error ("argument 1 of __builtin_spe_predicate is out of range");
7141 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7142 emit_move_insn (target, tmp);
7147 /* The evsel builtins look like this:
7149 e = __builtin_spe_evsel_OP (a, b, c, d);
7153 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7154 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7158 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7161 tree arg0 = TREE_VALUE (arglist);
7162 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7163 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7164 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7165 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7166 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7167 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7168 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7169 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7170 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7172 gcc_assert (mode0 == mode1);
7174 if (arg0 == error_mark_node || arg1 == error_mark_node
7175 || arg2 == error_mark_node || arg3 == error_mark_node)
7179 || GET_MODE (target) != mode0
7180 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7181 target = gen_reg_rtx (mode0);
7183 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7184 op0 = copy_to_mode_reg (mode0, op0);
7185 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7186 op1 = copy_to_mode_reg (mode0, op1);
7187 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7188 op2 = copy_to_mode_reg (mode0, op2);
7189 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7190 op3 = copy_to_mode_reg (mode0, op3);
7192 /* Generate the compare. */
7193 scratch = gen_reg_rtx (CCmode);
7194 pat = GEN_FCN (icode) (scratch, op0, op1);
7199 if (mode0 == V2SImode)
7200 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7202 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7207 /* Expand an expression EXP that calls a built-in function,
7208 with result going to TARGET if that's convenient
7209 (and in mode MODE if that's convenient).
7210 SUBTARGET may be used as the target for computing one of EXP's operands.
7211 IGNORE is nonzero if the value is to be ignored. */
7214 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7215 enum machine_mode mode ATTRIBUTE_UNUSED,
7216 int ignore ATTRIBUTE_UNUSED)
7218 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7219 tree arglist = TREE_OPERAND (exp, 1);
7220 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7221 struct builtin_description *d;
7226 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7227 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7229 int icode = (int) CODE_FOR_altivec_lvsr;
7230 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7231 enum machine_mode mode = insn_data[icode].operand[1].mode;
7235 gcc_assert (TARGET_ALTIVEC);
7237 arg = TREE_VALUE (arglist);
7238 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7239 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7240 addr = memory_address (mode, op);
7241 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7245 /* For the load case need to negate the address. */
7246 op = gen_reg_rtx (GET_MODE (addr));
7247 emit_insn (gen_rtx_SET (VOIDmode, op,
7248 gen_rtx_NEG (GET_MODE (addr), addr)));
7250 op = gen_rtx_MEM (mode, op);
7253 || GET_MODE (target) != tmode
7254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7255 target = gen_reg_rtx (tmode);
7257 /*pat = gen_altivec_lvsr (target, op);*/
7258 pat = GEN_FCN (icode) (target, op);
7268 ret = altivec_expand_builtin (exp, target, &success);
7275 ret = spe_expand_builtin (exp, target, &success);
7281 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7283 /* Handle simple unary operations. */
7284 d = (struct builtin_description *) bdesc_1arg;
7285 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7286 if (d->code == fcode)
7287 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7289 /* Handle simple binary operations. */
7290 d = (struct builtin_description *) bdesc_2arg;
7291 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7292 if (d->code == fcode)
7293 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7296 /* Handle simple ternary operations. */
7297 d = (struct builtin_description *) bdesc_3arg;
7298 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7299 if (d->code == fcode)
7300 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7305 /* Handle simple ternary operations. */
7306 d = (struct builtin_description *) bdesc_3arg;
7307 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7308 if (d->code == fcode)
7309 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7316 build_opaque_vector_type (tree node, int nunits)
7318 node = copy_node (node);
7319 TYPE_MAIN_VARIANT (node) = node;
7320 return build_vector_type (node, nunits);
7324 rs6000_init_builtins (void)
7326 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7327 V2SF_type_node = build_vector_type (float_type_node, 2);
7328 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7329 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7330 V4SF_type_node = build_vector_type (float_type_node, 4);
7331 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7332 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7334 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7335 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7336 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7338 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7339 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7340 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7341 opaque_V4SI_type_node = copy_node (V4SI_type_node);
7343 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7344 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7345 'vector unsigned short'. */
7347 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7348 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7349 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7350 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7352 long_integer_type_internal_node = long_integer_type_node;
7353 long_unsigned_type_internal_node = long_unsigned_type_node;
7354 intQI_type_internal_node = intQI_type_node;
7355 uintQI_type_internal_node = unsigned_intQI_type_node;
7356 intHI_type_internal_node = intHI_type_node;
7357 uintHI_type_internal_node = unsigned_intHI_type_node;
7358 intSI_type_internal_node = intSI_type_node;
7359 uintSI_type_internal_node = unsigned_intSI_type_node;
7360 float_type_internal_node = float_type_node;
7361 void_type_internal_node = void_type_node;
7363 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7364 get_identifier ("__bool char"),
7365 bool_char_type_node));
7366 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7367 get_identifier ("__bool short"),
7368 bool_short_type_node));
7369 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7370 get_identifier ("__bool int"),
7371 bool_int_type_node));
7372 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7373 get_identifier ("__pixel"),
7376 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7377 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7378 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7379 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7381 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7382 get_identifier ("__vector unsigned char"),
7383 unsigned_V16QI_type_node));
7384 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7385 get_identifier ("__vector signed char"),
7387 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7388 get_identifier ("__vector __bool char"),
7389 bool_V16QI_type_node));
7391 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7392 get_identifier ("__vector unsigned short"),
7393 unsigned_V8HI_type_node));
7394 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7395 get_identifier ("__vector signed short"),
7397 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7398 get_identifier ("__vector __bool short"),
7399 bool_V8HI_type_node));
7401 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7402 get_identifier ("__vector unsigned int"),
7403 unsigned_V4SI_type_node));
7404 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7405 get_identifier ("__vector signed int"),
7407 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7408 get_identifier ("__vector __bool int"),
7409 bool_V4SI_type_node));
7411 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7412 get_identifier ("__vector float"),
7414 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7415 get_identifier ("__vector __pixel"),
7416 pixel_V8HI_type_node));
7419 spe_init_builtins ();
7421 altivec_init_builtins ();
7422 if (TARGET_ALTIVEC || TARGET_SPE)
7423 rs6000_common_init_builtins ();
7426 /* Search through a set of builtins and enable the mask bits.
7427 DESC is an array of builtins.
7428 SIZE is the total number of builtins.
7429 START is the builtin enum at which to start.
7430 END is the builtin enum at which to end. */
7432 enable_mask_for_builtins (struct builtin_description *desc, int size,
7433 enum rs6000_builtins start,
7434 enum rs6000_builtins end)
7438 for (i = 0; i < size; ++i)
7439 if (desc[i].code == start)
7445 for (; i < size; ++i)
7447 /* Flip all the bits on. */
7448 desc[i].mask = target_flags;
7449 if (desc[i].code == end)
7455 spe_init_builtins (void)
7457 tree endlink = void_list_node;
7458 tree puint_type_node = build_pointer_type (unsigned_type_node);
7459 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7460 struct builtin_description *d;
7463 tree v2si_ftype_4_v2si
7464 = build_function_type
7465 (opaque_V2SI_type_node,
7466 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7467 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7468 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7469 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7472 tree v2sf_ftype_4_v2sf
7473 = build_function_type
7474 (opaque_V2SF_type_node,
7475 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7476 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7477 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7478 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7481 tree int_ftype_int_v2si_v2si
7482 = build_function_type
7484 tree_cons (NULL_TREE, integer_type_node,
7485 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7486 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7489 tree int_ftype_int_v2sf_v2sf
7490 = build_function_type
7492 tree_cons (NULL_TREE, integer_type_node,
7493 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7494 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7497 tree void_ftype_v2si_puint_int
7498 = build_function_type (void_type_node,
7499 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7500 tree_cons (NULL_TREE, puint_type_node,
7501 tree_cons (NULL_TREE,
7505 tree void_ftype_v2si_puint_char
7506 = build_function_type (void_type_node,
7507 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7508 tree_cons (NULL_TREE, puint_type_node,
7509 tree_cons (NULL_TREE,
7513 tree void_ftype_v2si_pv2si_int
7514 = build_function_type (void_type_node,
7515 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7516 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7517 tree_cons (NULL_TREE,
7521 tree void_ftype_v2si_pv2si_char
7522 = build_function_type (void_type_node,
7523 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7524 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7525 tree_cons (NULL_TREE,
7530 = build_function_type (void_type_node,
7531 tree_cons (NULL_TREE, integer_type_node, endlink));
7534 = build_function_type (integer_type_node, endlink);
7536 tree v2si_ftype_pv2si_int
7537 = build_function_type (opaque_V2SI_type_node,
7538 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7539 tree_cons (NULL_TREE, integer_type_node,
7542 tree v2si_ftype_puint_int
7543 = build_function_type (opaque_V2SI_type_node,
7544 tree_cons (NULL_TREE, puint_type_node,
7545 tree_cons (NULL_TREE, integer_type_node,
7548 tree v2si_ftype_pushort_int
7549 = build_function_type (opaque_V2SI_type_node,
7550 tree_cons (NULL_TREE, pushort_type_node,
7551 tree_cons (NULL_TREE, integer_type_node,
7554 tree v2si_ftype_signed_char
7555 = build_function_type (opaque_V2SI_type_node,
7556 tree_cons (NULL_TREE, signed_char_type_node,
7559 /* The initialization of the simple binary and unary builtins is
7560 done in rs6000_common_init_builtins, but we have to enable the
7561 mask bits here manually because we have run out of `target_flags'
7562 bits. We really need to redesign this mask business. */
7564 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7565 ARRAY_SIZE (bdesc_2arg),
7568 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7569 ARRAY_SIZE (bdesc_1arg),
7571 SPE_BUILTIN_EVSUBFUSIAAW);
7572 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7573 ARRAY_SIZE (bdesc_spe_predicates),
7574 SPE_BUILTIN_EVCMPEQ,
7575 SPE_BUILTIN_EVFSTSTLT);
7576 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7577 ARRAY_SIZE (bdesc_spe_evsel),
7578 SPE_BUILTIN_EVSEL_CMPGTS,
7579 SPE_BUILTIN_EVSEL_FSTSTEQ);
7581 (*lang_hooks.decls.pushdecl)
7582 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7583 opaque_V2SI_type_node));
7585 /* Initialize irregular SPE builtins. */
7587 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7588 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7589 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7590 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7591 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7592 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7593 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7594 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7595 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7596 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7597 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7598 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7599 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7600 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7601 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7602 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7603 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7604 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7607 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7608 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7609 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7610 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7611 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7612 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7613 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7614 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7615 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7616 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7617 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7618 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7619 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7620 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7621 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7622 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7623 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7624 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7625 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7626 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7627 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7628 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7631 d = (struct builtin_description *) bdesc_spe_predicates;
7632 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7636 switch (insn_data[d->icode].operand[1].mode)
7639 type = int_ftype_int_v2si_v2si;
7642 type = int_ftype_int_v2sf_v2sf;
7648 def_builtin (d->mask, d->name, type, d->code);
7651 /* Evsel predicates. */
7652 d = (struct builtin_description *) bdesc_spe_evsel;
7653 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7657 switch (insn_data[d->icode].operand[1].mode)
7660 type = v2si_ftype_4_v2si;
7663 type = v2sf_ftype_4_v2sf;
7669 def_builtin (d->mask, d->name, type, d->code);
7674 altivec_init_builtins (void)
7676 struct builtin_description *d;
7677 struct builtin_description_predicates *dp;
7679 tree pfloat_type_node = build_pointer_type (float_type_node);
7680 tree pint_type_node = build_pointer_type (integer_type_node);
7681 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7682 tree pchar_type_node = build_pointer_type (char_type_node);
7684 tree pvoid_type_node = build_pointer_type (void_type_node);
7686 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7687 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7688 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7689 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7691 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7693 tree int_ftype_opaque
7694 = build_function_type_list (integer_type_node,
7695 opaque_V4SI_type_node, NULL_TREE);
7697 tree opaque_ftype_opaque_int
7698 = build_function_type_list (opaque_V4SI_type_node,
7699 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
7700 tree opaque_ftype_opaque_opaque_int
7701 = build_function_type_list (opaque_V4SI_type_node,
7702 opaque_V4SI_type_node, opaque_V4SI_type_node,
7703 integer_type_node, NULL_TREE);
7704 tree int_ftype_int_opaque_opaque
7705 = build_function_type_list (integer_type_node,
7706 integer_type_node, opaque_V4SI_type_node,
7707 opaque_V4SI_type_node, NULL_TREE);
7708 tree int_ftype_int_v4si_v4si
7709 = build_function_type_list (integer_type_node,
7710 integer_type_node, V4SI_type_node,
7711 V4SI_type_node, NULL_TREE);
7712 tree v4sf_ftype_pcfloat
7713 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7714 tree void_ftype_pfloat_v4sf
7715 = build_function_type_list (void_type_node,
7716 pfloat_type_node, V4SF_type_node, NULL_TREE);
7717 tree v4si_ftype_pcint
7718 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7719 tree void_ftype_pint_v4si
7720 = build_function_type_list (void_type_node,
7721 pint_type_node, V4SI_type_node, NULL_TREE);
7722 tree v8hi_ftype_pcshort
7723 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7724 tree void_ftype_pshort_v8hi
7725 = build_function_type_list (void_type_node,
7726 pshort_type_node, V8HI_type_node, NULL_TREE);
7727 tree v16qi_ftype_pcchar
7728 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7729 tree void_ftype_pchar_v16qi
7730 = build_function_type_list (void_type_node,
7731 pchar_type_node, V16QI_type_node, NULL_TREE);
7732 tree void_ftype_v4si
7733 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7734 tree v8hi_ftype_void
7735 = build_function_type (V8HI_type_node, void_list_node);
7736 tree void_ftype_void
7737 = build_function_type (void_type_node, void_list_node);
7739 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7741 tree opaque_ftype_long_pcvoid
7742 = build_function_type_list (opaque_V4SI_type_node,
7743 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7744 tree v16qi_ftype_long_pcvoid
7745 = build_function_type_list (V16QI_type_node,
7746 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7747 tree v8hi_ftype_long_pcvoid
7748 = build_function_type_list (V8HI_type_node,
7749 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7750 tree v4si_ftype_long_pcvoid
7751 = build_function_type_list (V4SI_type_node,
7752 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7754 tree void_ftype_opaque_long_pvoid
7755 = build_function_type_list (void_type_node,
7756 opaque_V4SI_type_node, long_integer_type_node,
7757 pvoid_type_node, NULL_TREE);
7758 tree void_ftype_v4si_long_pvoid
7759 = build_function_type_list (void_type_node,
7760 V4SI_type_node, long_integer_type_node,
7761 pvoid_type_node, NULL_TREE);
7762 tree void_ftype_v16qi_long_pvoid
7763 = build_function_type_list (void_type_node,
7764 V16QI_type_node, long_integer_type_node,
7765 pvoid_type_node, NULL_TREE);
7766 tree void_ftype_v8hi_long_pvoid
7767 = build_function_type_list (void_type_node,
7768 V8HI_type_node, long_integer_type_node,
7769 pvoid_type_node, NULL_TREE);
7770 tree int_ftype_int_v8hi_v8hi
7771 = build_function_type_list (integer_type_node,
7772 integer_type_node, V8HI_type_node,
7773 V8HI_type_node, NULL_TREE);
7774 tree int_ftype_int_v16qi_v16qi
7775 = build_function_type_list (integer_type_node,
7776 integer_type_node, V16QI_type_node,
7777 V16QI_type_node, NULL_TREE);
7778 tree int_ftype_int_v4sf_v4sf
7779 = build_function_type_list (integer_type_node,
7780 integer_type_node, V4SF_type_node,
7781 V4SF_type_node, NULL_TREE);
7782 tree v4si_ftype_v4si
7783 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7784 tree v8hi_ftype_v8hi
7785 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7786 tree v16qi_ftype_v16qi
7787 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7788 tree v4sf_ftype_v4sf
7789 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7790 tree void_ftype_pcvoid_int_int
7791 = build_function_type_list (void_type_node,
7792 pcvoid_type_node, integer_type_node,
7793 integer_type_node, NULL_TREE);
7795 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7796 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7797 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7798 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7799 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7800 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7801 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7802 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7803 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7804 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7805 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7806 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7807 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7808 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7809 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7810 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7812 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7814 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7816 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7818 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7820 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7822 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7823 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7824 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7826 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7827 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
7828 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
7829 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
7830 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
7831 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
7832 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
7833 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
7834 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
7835 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
7836 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
7837 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
7838 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
7839 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
7840 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
7842 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
7844 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
7845 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
7846 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
7847 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
7848 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
7849 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
7850 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
7851 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
7852 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
7853 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
7855 /* Add the DST variants. */
7856 d = (struct builtin_description *) bdesc_dst;
7857 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7858 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7860 /* Initialize the predicates. */
7861 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7862 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7864 enum machine_mode mode1;
7866 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7867 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
7872 mode1 = insn_data[dp->icode].operand[1].mode;
7877 type = int_ftype_int_opaque_opaque;
7880 type = int_ftype_int_v4si_v4si;
7883 type = int_ftype_int_v8hi_v8hi;
7886 type = int_ftype_int_v16qi_v16qi;
7889 type = int_ftype_int_v4sf_v4sf;
7895 def_builtin (dp->mask, dp->name, type, dp->code);
7898 /* Initialize the abs* operators. */
7899 d = (struct builtin_description *) bdesc_abs;
7900 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7902 enum machine_mode mode0;
7905 mode0 = insn_data[d->icode].operand[0].mode;
7910 type = v4si_ftype_v4si;
7913 type = v8hi_ftype_v8hi;
7916 type = v16qi_ftype_v16qi;
7919 type = v4sf_ftype_v4sf;
7925 def_builtin (d->mask, d->name, type, d->code);
7932 /* Initialize target builtin that implements
7933 targetm.vectorize.builtin_mask_for_load. */
7935 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
7936 v16qi_ftype_long_pcvoid,
7937 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
7939 tree_cons (get_identifier ("const"),
7940 NULL_TREE, NULL_TREE));
7941 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
7942 altivec_builtin_mask_for_load = decl;
7947 rs6000_common_init_builtins (void)
7949 struct builtin_description *d;
7952 tree v4sf_ftype_v4sf_v4sf_v16qi
7953 = build_function_type_list (V4SF_type_node,
7954 V4SF_type_node, V4SF_type_node,
7955 V16QI_type_node, NULL_TREE);
7956 tree v4si_ftype_v4si_v4si_v16qi
7957 = build_function_type_list (V4SI_type_node,
7958 V4SI_type_node, V4SI_type_node,
7959 V16QI_type_node, NULL_TREE);
7960 tree v8hi_ftype_v8hi_v8hi_v16qi
7961 = build_function_type_list (V8HI_type_node,
7962 V8HI_type_node, V8HI_type_node,
7963 V16QI_type_node, NULL_TREE);
7964 tree v16qi_ftype_v16qi_v16qi_v16qi
7965 = build_function_type_list (V16QI_type_node,
7966 V16QI_type_node, V16QI_type_node,
7967 V16QI_type_node, NULL_TREE);
7969 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7971 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7972 tree v16qi_ftype_int
7973 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7974 tree v8hi_ftype_v16qi
7975 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7976 tree v4sf_ftype_v4sf
7977 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7979 tree v2si_ftype_v2si_v2si
7980 = build_function_type_list (opaque_V2SI_type_node,
7981 opaque_V2SI_type_node,
7982 opaque_V2SI_type_node, NULL_TREE);
7984 tree v2sf_ftype_v2sf_v2sf
7985 = build_function_type_list (opaque_V2SF_type_node,
7986 opaque_V2SF_type_node,
7987 opaque_V2SF_type_node, NULL_TREE);
7989 tree v2si_ftype_int_int
7990 = build_function_type_list (opaque_V2SI_type_node,
7991 integer_type_node, integer_type_node,
7994 tree opaque_ftype_opaque
7995 = build_function_type_list (opaque_V4SI_type_node,
7996 opaque_V4SI_type_node, NULL_TREE);
7998 tree v2si_ftype_v2si
7999 = build_function_type_list (opaque_V2SI_type_node,
8000 opaque_V2SI_type_node, NULL_TREE);
8002 tree v2sf_ftype_v2sf
8003 = build_function_type_list (opaque_V2SF_type_node,
8004 opaque_V2SF_type_node, NULL_TREE);
8006 tree v2sf_ftype_v2si
8007 = build_function_type_list (opaque_V2SF_type_node,
8008 opaque_V2SI_type_node, NULL_TREE);
8010 tree v2si_ftype_v2sf
8011 = build_function_type_list (opaque_V2SI_type_node,
8012 opaque_V2SF_type_node, NULL_TREE);
8014 tree v2si_ftype_v2si_char
8015 = build_function_type_list (opaque_V2SI_type_node,
8016 opaque_V2SI_type_node,
8017 char_type_node, NULL_TREE);
8019 tree v2si_ftype_int_char
8020 = build_function_type_list (opaque_V2SI_type_node,
8021 integer_type_node, char_type_node, NULL_TREE);
8023 tree v2si_ftype_char
8024 = build_function_type_list (opaque_V2SI_type_node,
8025 char_type_node, NULL_TREE);
8027 tree int_ftype_int_int
8028 = build_function_type_list (integer_type_node,
8029 integer_type_node, integer_type_node,
8032 tree opaque_ftype_opaque_opaque
8033 = build_function_type_list (opaque_V4SI_type_node,
8034 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8035 tree v4si_ftype_v4si_v4si
8036 = build_function_type_list (V4SI_type_node,
8037 V4SI_type_node, V4SI_type_node, NULL_TREE);
8038 tree v4sf_ftype_v4si_int
8039 = build_function_type_list (V4SF_type_node,
8040 V4SI_type_node, integer_type_node, NULL_TREE);
8041 tree v4si_ftype_v4sf_int
8042 = build_function_type_list (V4SI_type_node,
8043 V4SF_type_node, integer_type_node, NULL_TREE);
8044 tree v4si_ftype_v4si_int
8045 = build_function_type_list (V4SI_type_node,
8046 V4SI_type_node, integer_type_node, NULL_TREE);
8047 tree v8hi_ftype_v8hi_int
8048 = build_function_type_list (V8HI_type_node,
8049 V8HI_type_node, integer_type_node, NULL_TREE);
8050 tree v16qi_ftype_v16qi_int
8051 = build_function_type_list (V16QI_type_node,
8052 V16QI_type_node, integer_type_node, NULL_TREE);
8053 tree v16qi_ftype_v16qi_v16qi_int
8054 = build_function_type_list (V16QI_type_node,
8055 V16QI_type_node, V16QI_type_node,
8056 integer_type_node, NULL_TREE);
8057 tree v8hi_ftype_v8hi_v8hi_int
8058 = build_function_type_list (V8HI_type_node,
8059 V8HI_type_node, V8HI_type_node,
8060 integer_type_node, NULL_TREE);
8061 tree v4si_ftype_v4si_v4si_int
8062 = build_function_type_list (V4SI_type_node,
8063 V4SI_type_node, V4SI_type_node,
8064 integer_type_node, NULL_TREE);
8065 tree v4sf_ftype_v4sf_v4sf_int
8066 = build_function_type_list (V4SF_type_node,
8067 V4SF_type_node, V4SF_type_node,
8068 integer_type_node, NULL_TREE);
8069 tree v4sf_ftype_v4sf_v4sf
8070 = build_function_type_list (V4SF_type_node,
8071 V4SF_type_node, V4SF_type_node, NULL_TREE);
8072 tree opaque_ftype_opaque_opaque_opaque
8073 = build_function_type_list (opaque_V4SI_type_node,
8074 opaque_V4SI_type_node, opaque_V4SI_type_node,
8075 opaque_V4SI_type_node, NULL_TREE);
8076 tree v4sf_ftype_v4sf_v4sf_v4si
8077 = build_function_type_list (V4SF_type_node,
8078 V4SF_type_node, V4SF_type_node,
8079 V4SI_type_node, NULL_TREE);
8080 tree v4sf_ftype_v4sf_v4sf_v4sf
8081 = build_function_type_list (V4SF_type_node,
8082 V4SF_type_node, V4SF_type_node,
8083 V4SF_type_node, NULL_TREE);
8084 tree v4si_ftype_v4si_v4si_v4si
8085 = build_function_type_list (V4SI_type_node,
8086 V4SI_type_node, V4SI_type_node,
8087 V4SI_type_node, NULL_TREE);
8088 tree v8hi_ftype_v8hi_v8hi
8089 = build_function_type_list (V8HI_type_node,
8090 V8HI_type_node, V8HI_type_node, NULL_TREE);
8091 tree v8hi_ftype_v8hi_v8hi_v8hi
8092 = build_function_type_list (V8HI_type_node,
8093 V8HI_type_node, V8HI_type_node,
8094 V8HI_type_node, NULL_TREE);
8095 tree v4si_ftype_v8hi_v8hi_v4si
8096 = build_function_type_list (V4SI_type_node,
8097 V8HI_type_node, V8HI_type_node,
8098 V4SI_type_node, NULL_TREE);
8099 tree v4si_ftype_v16qi_v16qi_v4si
8100 = build_function_type_list (V4SI_type_node,
8101 V16QI_type_node, V16QI_type_node,
8102 V4SI_type_node, NULL_TREE);
8103 tree v16qi_ftype_v16qi_v16qi
8104 = build_function_type_list (V16QI_type_node,
8105 V16QI_type_node, V16QI_type_node, NULL_TREE);
8106 tree v4si_ftype_v4sf_v4sf
8107 = build_function_type_list (V4SI_type_node,
8108 V4SF_type_node, V4SF_type_node, NULL_TREE);
8109 tree v8hi_ftype_v16qi_v16qi
8110 = build_function_type_list (V8HI_type_node,
8111 V16QI_type_node, V16QI_type_node, NULL_TREE);
8112 tree v4si_ftype_v8hi_v8hi
8113 = build_function_type_list (V4SI_type_node,
8114 V8HI_type_node, V8HI_type_node, NULL_TREE);
8115 tree v8hi_ftype_v4si_v4si
8116 = build_function_type_list (V8HI_type_node,
8117 V4SI_type_node, V4SI_type_node, NULL_TREE);
8118 tree v16qi_ftype_v8hi_v8hi
8119 = build_function_type_list (V16QI_type_node,
8120 V8HI_type_node, V8HI_type_node, NULL_TREE);
8121 tree v4si_ftype_v16qi_v4si
8122 = build_function_type_list (V4SI_type_node,
8123 V16QI_type_node, V4SI_type_node, NULL_TREE);
8124 tree v4si_ftype_v16qi_v16qi
8125 = build_function_type_list (V4SI_type_node,
8126 V16QI_type_node, V16QI_type_node, NULL_TREE);
8127 tree v4si_ftype_v8hi_v4si
8128 = build_function_type_list (V4SI_type_node,
8129 V8HI_type_node, V4SI_type_node, NULL_TREE);
8130 tree v4si_ftype_v8hi
8131 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8132 tree int_ftype_v4si_v4si
8133 = build_function_type_list (integer_type_node,
8134 V4SI_type_node, V4SI_type_node, NULL_TREE);
8135 tree int_ftype_v4sf_v4sf
8136 = build_function_type_list (integer_type_node,
8137 V4SF_type_node, V4SF_type_node, NULL_TREE);
8138 tree int_ftype_v16qi_v16qi
8139 = build_function_type_list (integer_type_node,
8140 V16QI_type_node, V16QI_type_node, NULL_TREE);
8141 tree int_ftype_v8hi_v8hi
8142 = build_function_type_list (integer_type_node,
8143 V8HI_type_node, V8HI_type_node, NULL_TREE);
8145 /* Add the simple ternary operators. */
8146 d = (struct builtin_description *) bdesc_3arg;
8147 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8149 enum machine_mode mode0, mode1, mode2, mode3;
8151 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8152 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8163 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8166 mode0 = insn_data[d->icode].operand[0].mode;
8167 mode1 = insn_data[d->icode].operand[1].mode;
8168 mode2 = insn_data[d->icode].operand[2].mode;
8169 mode3 = insn_data[d->icode].operand[3].mode;
8172 /* When all four are of the same mode. */
8173 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8178 type = opaque_ftype_opaque_opaque_opaque;
8181 type = v4si_ftype_v4si_v4si_v4si;
8184 type = v4sf_ftype_v4sf_v4sf_v4sf;
8187 type = v8hi_ftype_v8hi_v8hi_v8hi;
8190 type = v16qi_ftype_v16qi_v16qi_v16qi;
8196 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8201 type = v4si_ftype_v4si_v4si_v16qi;
8204 type = v4sf_ftype_v4sf_v4sf_v16qi;
8207 type = v8hi_ftype_v8hi_v8hi_v16qi;
8210 type = v16qi_ftype_v16qi_v16qi_v16qi;
8216 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8217 && mode3 == V4SImode)
8218 type = v4si_ftype_v16qi_v16qi_v4si;
8219 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8220 && mode3 == V4SImode)
8221 type = v4si_ftype_v8hi_v8hi_v4si;
8222 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8223 && mode3 == V4SImode)
8224 type = v4sf_ftype_v4sf_v4sf_v4si;
8226 /* vchar, vchar, vchar, 4 bit literal. */
8227 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8229 type = v16qi_ftype_v16qi_v16qi_int;
8231 /* vshort, vshort, vshort, 4 bit literal. */
8232 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8234 type = v8hi_ftype_v8hi_v8hi_int;
8236 /* vint, vint, vint, 4 bit literal. */
8237 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8239 type = v4si_ftype_v4si_v4si_int;
8241 /* vfloat, vfloat, vfloat, 4 bit literal. */
8242 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8244 type = v4sf_ftype_v4sf_v4sf_int;
8249 def_builtin (d->mask, d->name, type, d->code);
8252 /* Add the simple binary operators. */
8253 d = (struct builtin_description *) bdesc_2arg;
8254 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8256 enum machine_mode mode0, mode1, mode2;
8258 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8259 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8269 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8272 mode0 = insn_data[d->icode].operand[0].mode;
8273 mode1 = insn_data[d->icode].operand[1].mode;
8274 mode2 = insn_data[d->icode].operand[2].mode;
8277 /* When all three operands are of the same mode. */
8278 if (mode0 == mode1 && mode1 == mode2)
8283 type = opaque_ftype_opaque_opaque;
8286 type = v4sf_ftype_v4sf_v4sf;
8289 type = v4si_ftype_v4si_v4si;
8292 type = v16qi_ftype_v16qi_v16qi;
8295 type = v8hi_ftype_v8hi_v8hi;
8298 type = v2si_ftype_v2si_v2si;
8301 type = v2sf_ftype_v2sf_v2sf;
8304 type = int_ftype_int_int;
8311 /* A few other combos we really don't want to do manually. */
8313 /* vint, vfloat, vfloat. */
8314 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8315 type = v4si_ftype_v4sf_v4sf;
8317 /* vshort, vchar, vchar. */
8318 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8319 type = v8hi_ftype_v16qi_v16qi;
8321 /* vint, vshort, vshort. */
8322 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8323 type = v4si_ftype_v8hi_v8hi;
8325 /* vshort, vint, vint. */
8326 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8327 type = v8hi_ftype_v4si_v4si;
8329 /* vchar, vshort, vshort. */
8330 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8331 type = v16qi_ftype_v8hi_v8hi;
8333 /* vint, vchar, vint. */
8334 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8335 type = v4si_ftype_v16qi_v4si;
8337 /* vint, vchar, vchar. */
8338 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8339 type = v4si_ftype_v16qi_v16qi;
8341 /* vint, vshort, vint. */
8342 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8343 type = v4si_ftype_v8hi_v4si;
8345 /* vint, vint, 5 bit literal. */
8346 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8347 type = v4si_ftype_v4si_int;
8349 /* vshort, vshort, 5 bit literal. */
8350 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8351 type = v8hi_ftype_v8hi_int;
8353 /* vchar, vchar, 5 bit literal. */
8354 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8355 type = v16qi_ftype_v16qi_int;
8357 /* vfloat, vint, 5 bit literal. */
8358 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8359 type = v4sf_ftype_v4si_int;
8361 /* vint, vfloat, 5 bit literal. */
8362 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8363 type = v4si_ftype_v4sf_int;
8365 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8366 type = v2si_ftype_int_int;
8368 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8369 type = v2si_ftype_v2si_char;
8371 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8372 type = v2si_ftype_int_char;
8377 gcc_assert (mode0 == SImode);
8381 type = int_ftype_v4si_v4si;
8384 type = int_ftype_v4sf_v4sf;
8387 type = int_ftype_v16qi_v16qi;
8390 type = int_ftype_v8hi_v8hi;
8397 def_builtin (d->mask, d->name, type, d->code);
8400 /* Add the simple unary operators. */
8401 d = (struct builtin_description *) bdesc_1arg;
8402 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8404 enum machine_mode mode0, mode1;
8406 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8407 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8416 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8419 mode0 = insn_data[d->icode].operand[0].mode;
8420 mode1 = insn_data[d->icode].operand[1].mode;
8423 if (mode0 == V4SImode && mode1 == QImode)
8424 type = v4si_ftype_int;
8425 else if (mode0 == V8HImode && mode1 == QImode)
8426 type = v8hi_ftype_int;
8427 else if (mode0 == V16QImode && mode1 == QImode)
8428 type = v16qi_ftype_int;
8429 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8430 type = opaque_ftype_opaque;
8431 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8432 type = v4sf_ftype_v4sf;
8433 else if (mode0 == V8HImode && mode1 == V16QImode)
8434 type = v8hi_ftype_v16qi;
8435 else if (mode0 == V4SImode && mode1 == V8HImode)
8436 type = v4si_ftype_v8hi;
8437 else if (mode0 == V2SImode && mode1 == V2SImode)
8438 type = v2si_ftype_v2si;
8439 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8440 type = v2sf_ftype_v2sf;
8441 else if (mode0 == V2SFmode && mode1 == V2SImode)
8442 type = v2sf_ftype_v2si;
8443 else if (mode0 == V2SImode && mode1 == V2SFmode)
8444 type = v2si_ftype_v2sf;
8445 else if (mode0 == V2SImode && mode1 == QImode)
8446 type = v2si_ftype_char;
8450 def_builtin (d->mask, d->name, type, d->code);
8455 rs6000_init_libfuncs (void)
8457 if (!TARGET_HARD_FLOAT)
8460 if (DEFAULT_ABI != ABI_V4)
8462 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8464 /* AIX library routines for float->int conversion. */
8465 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8466 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8467 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8468 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8471 /* AIX/Darwin/64-bit Linux quad floating point routines. */
8472 if (!TARGET_XL_COMPAT)
8474 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
8475 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
8476 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
8477 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
8481 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8482 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8483 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8484 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8489 /* 32-bit SVR4 quad floating point routines. */
8491 set_optab_libfunc (add_optab, TFmode, "_q_add");
8492 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8493 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8494 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8495 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8496 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8497 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8499 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8500 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8501 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8502 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8503 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8504 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8506 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8507 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8508 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8509 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8510 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8511 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8512 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8517 /* Expand a block clear operation, and return 1 if successful. Return 0
8518 if we should let the compiler generate normal code.
8520 operands[0] is the destination
8521 operands[1] is the length
8522 operands[2] is the alignment */
8525 expand_block_clear (rtx operands[])
8527 rtx orig_dest = operands[0];
8528 rtx bytes_rtx = operands[1];
8529 rtx align_rtx = operands[2];
8530 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
8531 HOST_WIDE_INT align;
8532 HOST_WIDE_INT bytes;
8537 /* If this is not a fixed size move, just call memcpy */
8541 /* This must be a fixed size alignment */
8542 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
8543 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8545 /* Anything to clear? */
8546 bytes = INTVAL (bytes_rtx);
8550 /* Use the builtin memset after a point, to avoid huge code bloat.
8551 When optimize_size, avoid any significant code bloat; calling
8552 memset is about 4 instructions, so allow for one instruction to
8553 load zero and three to do clearing. */
8554 if (TARGET_ALTIVEC && align >= 128)
8556 else if (TARGET_POWERPC64 && align >= 32)
8561 if (optimize_size && bytes > 3 * clear_step)
8563 if (! optimize_size && bytes > 8 * clear_step)
8566 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8568 enum machine_mode mode = BLKmode;
8571 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
8576 else if (bytes >= 8 && TARGET_POWERPC64
8577 /* 64-bit loads and stores require word-aligned
8579 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8584 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8585 { /* move 4 bytes */
8589 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8590 { /* move 2 bytes */
8594 else /* move 1 byte at a time */
8600 dest = adjust_address (orig_dest, mode, offset);
8602 emit_move_insn (dest, CONST0_RTX (mode));
8609 /* Expand a block move operation, and return 1 if successful. Return 0
8610 if we should let the compiler generate normal code.
8612 operands[0] is the destination
8613 operands[1] is the source
8614 operands[2] is the length
8615 operands[3] is the alignment */
8617 #define MAX_MOVE_REG 4
8620 expand_block_move (rtx operands[])
8622 rtx orig_dest = operands[0];
8623 rtx orig_src = operands[1];
8624 rtx bytes_rtx = operands[2];
8625 rtx align_rtx = operands[3];
8626 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8631 rtx stores[MAX_MOVE_REG];
8634 /* If this is not a fixed size move, just call memcpy */
8638 /* This must be a fixed size alignment */
8639 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
8640 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8642 /* Anything to move? */
8643 bytes = INTVAL (bytes_rtx);
8647 /* store_one_arg depends on expand_block_move to handle at least the size of
8648 reg_parm_stack_space. */
8649 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8652 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8655 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8656 rtx (*mov) (rtx, rtx);
8658 enum machine_mode mode = BLKmode;
8661 /* Altivec first, since it will be faster than a string move
8662 when it applies, and usually not significantly larger. */
8663 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
8667 gen_func.mov = gen_movv4si;
8669 else if (TARGET_STRING
8670 && bytes > 24 /* move up to 32 bytes at a time */
8678 && ! fixed_regs[12])
8680 move_bytes = (bytes > 32) ? 32 : bytes;
8681 gen_func.movmemsi = gen_movmemsi_8reg;
8683 else if (TARGET_STRING
8684 && bytes > 16 /* move up to 24 bytes at a time */
8690 && ! fixed_regs[10])
8692 move_bytes = (bytes > 24) ? 24 : bytes;
8693 gen_func.movmemsi = gen_movmemsi_6reg;
8695 else if (TARGET_STRING
8696 && bytes > 8 /* move up to 16 bytes at a time */
8702 move_bytes = (bytes > 16) ? 16 : bytes;
8703 gen_func.movmemsi = gen_movmemsi_4reg;
8705 else if (bytes >= 8 && TARGET_POWERPC64
8706 /* 64-bit loads and stores require word-aligned
8708 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8712 gen_func.mov = gen_movdi;
8714 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8715 { /* move up to 8 bytes at a time */
8716 move_bytes = (bytes > 8) ? 8 : bytes;
8717 gen_func.movmemsi = gen_movmemsi_2reg;
8719 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8720 { /* move 4 bytes */
8723 gen_func.mov = gen_movsi;
8725 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8726 { /* move 2 bytes */
8729 gen_func.mov = gen_movhi;
8731 else if (TARGET_STRING && bytes > 1)
8732 { /* move up to 4 bytes at a time */
8733 move_bytes = (bytes > 4) ? 4 : bytes;
8734 gen_func.movmemsi = gen_movmemsi_1reg;
8736 else /* move 1 byte at a time */
8740 gen_func.mov = gen_movqi;
8743 src = adjust_address (orig_src, mode, offset);
8744 dest = adjust_address (orig_dest, mode, offset);
8746 if (mode != BLKmode)
8748 rtx tmp_reg = gen_reg_rtx (mode);
8750 emit_insn ((*gen_func.mov) (tmp_reg, src));
8751 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8754 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8757 for (i = 0; i < num_reg; i++)
8758 emit_insn (stores[i]);
8762 if (mode == BLKmode)
8764 /* Move the address into scratch registers. The movmemsi
8765 patterns require zero offset. */
8766 if (!REG_P (XEXP (src, 0)))
8768 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8769 src = replace_equiv_address (src, src_reg);
8771 set_mem_size (src, GEN_INT (move_bytes));
8773 if (!REG_P (XEXP (dest, 0)))
8775 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8776 dest = replace_equiv_address (dest, dest_reg);
8778 set_mem_size (dest, GEN_INT (move_bytes));
8780 emit_insn ((*gen_func.movmemsi) (dest, src,
8781 GEN_INT (move_bytes & 31),
8790 /* Return a string to perform a load_multiple operation.
8791 operands[0] is the vector.
8792 operands[1] is the source address.
8793 operands[2] is the first destination register. */
8796 rs6000_output_load_multiple (rtx operands[3])
8798 /* We have to handle the case where the pseudo used to contain the address
8799 is assigned to one of the output registers. */
8801 int words = XVECLEN (operands[0], 0);
8804 if (XVECLEN (operands[0], 0) == 1)
8805 return "{l|lwz} %2,0(%1)";
8807 for (i = 0; i < words; i++)
8808 if (refers_to_regno_p (REGNO (operands[2]) + i,
8809 REGNO (operands[2]) + i + 1, operands[1], 0))
8813 xop[0] = GEN_INT (4 * (words-1));
8814 xop[1] = operands[1];
8815 xop[2] = operands[2];
8816 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8821 xop[0] = GEN_INT (4 * (words-1));
8822 xop[1] = operands[1];
8823 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8824 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8829 for (j = 0; j < words; j++)
8832 xop[0] = GEN_INT (j * 4);
8833 xop[1] = operands[1];
8834 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8835 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8837 xop[0] = GEN_INT (i * 4);
8838 xop[1] = operands[1];
8839 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8844 return "{lsi|lswi} %2,%1,%N0";
8848 /* A validation routine: say whether CODE, a condition code, and MODE
8849 match. The other alternatives either don't make sense or should
8850 never be generated. */
8853 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8855 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
8856 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
8857 && GET_MODE_CLASS (mode) == MODE_CC);
8859 /* These don't make sense. */
8860 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
8861 || mode != CCUNSmode);
8863 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
8864 || mode == CCUNSmode);
8866 gcc_assert (mode == CCFPmode
8867 || (code != ORDERED && code != UNORDERED
8868 && code != UNEQ && code != LTGT
8869 && code != UNGT && code != UNLT
8870 && code != UNGE && code != UNLE));
8872 /* These should never be generated except for
8873 flag_finite_math_only. */
8874 gcc_assert (mode != CCFPmode
8875 || flag_finite_math_only
8876 || (code != LE && code != GE
8877 && code != UNEQ && code != LTGT
8878 && code != UNGT && code != UNLT));
8880 /* These are invalid; the information is not there. */
8881 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
8885 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8886 mask required to convert the result of a rotate insn into a shift
8887 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8890 includes_lshift_p (rtx shiftop, rtx andop)
8892 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8894 shift_mask <<= INTVAL (shiftop);
8896 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8899 /* Similar, but for right shift. */
8902 includes_rshift_p (rtx shiftop, rtx andop)
8904 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8906 shift_mask >>= INTVAL (shiftop);
8908 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8911 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8912 to perform a left shift. It must have exactly SHIFTOP least
8913 significant 0's, then one or more 1's, then zero or more 0's. */
8916 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8918 if (GET_CODE (andop) == CONST_INT)
8920 HOST_WIDE_INT c, lsb, shift_mask;
8923 if (c == 0 || c == ~0)
8927 shift_mask <<= INTVAL (shiftop);
8929 /* Find the least significant one bit. */
8932 /* It must coincide with the LSB of the shift mask. */
8933 if (-lsb != shift_mask)
8936 /* Invert to look for the next transition (if any). */
8939 /* Remove the low group of ones (originally low group of zeros). */
8942 /* Again find the lsb, and check we have all 1's above. */
8946 else if (GET_CODE (andop) == CONST_DOUBLE
8947 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8949 HOST_WIDE_INT low, high, lsb;
8950 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8952 low = CONST_DOUBLE_LOW (andop);
8953 if (HOST_BITS_PER_WIDE_INT < 64)
8954 high = CONST_DOUBLE_HIGH (andop);
8956 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8957 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8960 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8962 shift_mask_high = ~0;
8963 if (INTVAL (shiftop) > 32)
8964 shift_mask_high <<= INTVAL (shiftop) - 32;
8968 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8975 return high == -lsb;
8978 shift_mask_low = ~0;
8979 shift_mask_low <<= INTVAL (shiftop);
8983 if (-lsb != shift_mask_low)
8986 if (HOST_BITS_PER_WIDE_INT < 64)
8991 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8994 return high == -lsb;
8998 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9004 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9005 to perform a left shift. It must have SHIFTOP or more least
9006 significant 0's, with the remainder of the word 1's. */
9009 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9011 if (GET_CODE (andop) == CONST_INT)
9013 HOST_WIDE_INT c, lsb, shift_mask;
9016 shift_mask <<= INTVAL (shiftop);
9019 /* Find the least significant one bit. */
9022 /* It must be covered by the shift mask.
9023 This test also rejects c == 0. */
9024 if ((lsb & shift_mask) == 0)
9027 /* Check we have all 1's above the transition, and reject all 1's. */
9028 return c == -lsb && lsb != 1;
9030 else if (GET_CODE (andop) == CONST_DOUBLE
9031 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9033 HOST_WIDE_INT low, lsb, shift_mask_low;
9035 low = CONST_DOUBLE_LOW (andop);
9037 if (HOST_BITS_PER_WIDE_INT < 64)
9039 HOST_WIDE_INT high, shift_mask_high;
9041 high = CONST_DOUBLE_HIGH (andop);
9045 shift_mask_high = ~0;
9046 if (INTVAL (shiftop) > 32)
9047 shift_mask_high <<= INTVAL (shiftop) - 32;
9051 if ((lsb & shift_mask_high) == 0)
9054 return high == -lsb;
9060 shift_mask_low = ~0;
9061 shift_mask_low <<= INTVAL (shiftop);
9065 if ((lsb & shift_mask_low) == 0)
9068 return low == -lsb && lsb != 1;
9074 /* Return 1 if operands will generate a valid arguments to rlwimi
9075 instruction for insert with right shift in 64-bit mode. The mask may
9076 not start on the first bit or stop on the last bit because wrap-around
9077 effects of instruction do not correspond to semantics of RTL insn. */
9080 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9082 if (INTVAL (startop) < 64
9083 && INTVAL (startop) > 32
9084 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9085 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9086 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9087 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9088 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9094 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9095 for lfq and stfq insns iff the registers are hard registers. */
9098 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9100 /* We might have been passed a SUBREG. */
9101 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9104 /* We might have been passed non floating point registers. */
9105 if (!FP_REGNO_P (REGNO (reg1))
9106 || !FP_REGNO_P (REGNO (reg2)))
9109 return (REGNO (reg1) == REGNO (reg2) - 1);
9112 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9113 addr1 and addr2 must be in consecutive memory locations
9114 (addr2 == addr1 + 8). */
9117 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9123 /* The mems cannot be volatile. */
9124 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9127 addr1 = XEXP (mem1, 0);
9128 addr2 = XEXP (mem2, 0);
9130 /* Extract an offset (if used) from the first addr. */
9131 if (GET_CODE (addr1) == PLUS)
9133 /* If not a REG, return zero. */
9134 if (GET_CODE (XEXP (addr1, 0)) != REG)
9138 reg1 = REGNO (XEXP (addr1, 0));
9139 /* The offset must be constant! */
9140 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9142 offset1 = INTVAL (XEXP (addr1, 1));
9145 else if (GET_CODE (addr1) != REG)
9149 reg1 = REGNO (addr1);
9150 /* This was a simple (mem (reg)) expression. Offset is 0. */
9154 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9155 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9156 register as addr1. */
9157 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9159 if (GET_CODE (addr2) != PLUS)
9162 if (GET_CODE (XEXP (addr2, 0)) != REG
9163 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9166 if (reg1 != REGNO (XEXP (addr2, 0)))
9169 /* The offset for the second addr must be 8 more than the first addr. */
9170 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9173 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9178 /* Return the register class of a scratch register needed to copy IN into
9179 or out of a register in CLASS in MODE. If it can be done directly,
9180 NO_REGS is returned. */
9183 secondary_reload_class (enum reg_class class,
9184 enum machine_mode mode ATTRIBUTE_UNUSED,
9189 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9191 && MACHOPIC_INDIRECT
9195 /* We cannot copy a symbolic operand directly into anything
9196 other than BASE_REGS for TARGET_ELF. So indicate that a
9197 register from BASE_REGS is needed as an intermediate
9200 On Darwin, pic addresses require a load from memory, which
9201 needs a base register. */
9202 if (class != BASE_REGS
9203 && (GET_CODE (in) == SYMBOL_REF
9204 || GET_CODE (in) == HIGH
9205 || GET_CODE (in) == LABEL_REF
9206 || GET_CODE (in) == CONST))
9210 if (GET_CODE (in) == REG)
9213 if (regno >= FIRST_PSEUDO_REGISTER)
9215 regno = true_regnum (in);
9216 if (regno >= FIRST_PSEUDO_REGISTER)
9220 else if (GET_CODE (in) == SUBREG)
9222 regno = true_regnum (in);
9223 if (regno >= FIRST_PSEUDO_REGISTER)
9229 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9231 if (class == GENERAL_REGS || class == BASE_REGS
9232 || (regno >= 0 && INT_REGNO_P (regno)))
9235 /* Constants, memory, and FP registers can go into FP registers. */
9236 if ((regno == -1 || FP_REGNO_P (regno))
9237 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9240 /* Memory, and AltiVec registers can go into AltiVec registers. */
9241 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9242 && class == ALTIVEC_REGS)
9245 /* We can copy among the CR registers. */
9246 if ((class == CR_REGS || class == CR0_REGS)
9247 && regno >= 0 && CR_REGNO_P (regno))
9250 /* Otherwise, we need GENERAL_REGS. */
9251 return GENERAL_REGS;
9254 /* Given a comparison operation, return the bit number in CCR to test. We
9255 know this is a valid comparison.
9257 SCC_P is 1 if this is for an scc. That means that %D will have been
9258 used instead of %C, so the bits will be in different places.
9260 Return -1 if OP isn't a valid comparison for some reason. */
9263 ccr_bit (rtx op, int scc_p)
9265 enum rtx_code code = GET_CODE (op);
9266 enum machine_mode cc_mode;
9271 if (!COMPARISON_P (op))
9276 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9278 cc_mode = GET_MODE (reg);
9279 cc_regnum = REGNO (reg);
9280 base_bit = 4 * (cc_regnum - CR0_REGNO);
9282 validate_condition_mode (code, cc_mode);
9284 /* When generating a sCOND operation, only positive conditions are
9287 || code == EQ || code == GT || code == LT || code == UNORDERED
9288 || code == GTU || code == LTU);
9293 return scc_p ? base_bit + 3 : base_bit + 2;
9295 return base_bit + 2;
9296 case GT: case GTU: case UNLE:
9297 return base_bit + 1;
9298 case LT: case LTU: case UNGE:
9300 case ORDERED: case UNORDERED:
9301 return base_bit + 3;
9304 /* If scc, we will have done a cror to put the bit in the
9305 unordered position. So test that bit. For integer, this is ! LT
9306 unless this is an scc insn. */
9307 return scc_p ? base_bit + 3 : base_bit;
9310 return scc_p ? base_bit + 3 : base_bit + 1;
9317 /* Return the GOT register. */
9320 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9322 /* The second flow pass currently (June 1999) can't update
9323 regs_ever_live without disturbing other parts of the compiler, so
9324 update it here to make the prolog/epilogue code happy. */
9325 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9326 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9328 current_function_uses_pic_offset_table = 1;
9330 return pic_offset_table_rtx;
9333 /* Function to init struct machine_function.
9334 This will be called, via a pointer variable,
9335 from push_function_context. */
9337 static struct machine_function *
9338 rs6000_init_machine_status (void)
9340 return ggc_alloc_cleared (sizeof (machine_function));
9343 /* These macros test for integers and extract the low-order bits. */
9345 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9346 && GET_MODE (X) == VOIDmode)
9348 #define INT_LOWPART(X) \
9349 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9355 unsigned long val = INT_LOWPART (op);
9357 /* If the high bit is zero, the value is the first 1 bit we find
9359 if ((val & 0x80000000) == 0)
9361 gcc_assert (val & 0xffffffff);
9364 while (((val <<= 1) & 0x80000000) == 0)
9369 /* If the high bit is set and the low bit is not, or the mask is all
9370 1's, the value is zero. */
9371 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9374 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9377 while (((val >>= 1) & 1) != 0)
9387 unsigned long val = INT_LOWPART (op);
9389 /* If the low bit is zero, the value is the first 1 bit we find from
9393 gcc_assert (val & 0xffffffff);
9396 while (((val >>= 1) & 1) == 0)
9402 /* If the low bit is set and the high bit is not, or the mask is all
9403 1's, the value is 31. */
9404 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9407 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9410 while (((val <<= 1) & 0x80000000) != 0)
9416 /* Locate some local-dynamic symbol still in use by this function
9417 so that we can print its name in some tls_ld pattern. */
9420 rs6000_get_some_local_dynamic_name (void)
9424 if (cfun->machine->some_ld_name)
9425 return cfun->machine->some_ld_name;
9427 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9429 && for_each_rtx (&PATTERN (insn),
9430 rs6000_get_some_local_dynamic_name_1, 0))
9431 return cfun->machine->some_ld_name;
9436 /* Helper function for rs6000_get_some_local_dynamic_name. */
9439 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9443 if (GET_CODE (x) == SYMBOL_REF)
9445 const char *str = XSTR (x, 0);
9446 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9448 cfun->machine->some_ld_name = str;
9456 /* Write out a function code label. */
9459 rs6000_output_function_entry (FILE *file, const char *fname)
9461 if (fname[0] != '.')
9463 switch (DEFAULT_ABI)
9472 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
9481 RS6000_OUTPUT_BASENAME (file, fname);
9483 assemble_name (file, fname);
9486 /* Print an operand. Recognize special options, documented below. */
9489 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9490 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9492 #define SMALL_DATA_RELOC "sda21"
9493 #define SMALL_DATA_REG 0
9497 print_operand (FILE *file, rtx x, int code)
9501 unsigned HOST_WIDE_INT uval;
9506 /* Write out an instruction after the call which may be replaced
9507 with glue code by the loader. This depends on the AIX version. */
9508 asm_fprintf (file, RS6000_CALL_GLUE);
9511 /* %a is output_address. */
9514 /* If X is a constant integer whose low-order 5 bits are zero,
9515 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9516 in the AIX assembler where "sri" with a zero shift count
9517 writes a trash instruction. */
9518 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9525 /* If constant, low-order 16 bits of constant, unsigned.
9526 Otherwise, write normally. */
9528 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9530 print_operand (file, x, 0);
9534 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9535 for 64-bit mask direction. */
9536 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
9539 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9543 /* X is a CR register. Print the number of the GT bit of the CR. */
9544 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9545 output_operand_lossage ("invalid %%E value");
9547 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9551 /* Like 'J' but get to the EQ bit. */
9552 gcc_assert (GET_CODE (x) == REG);
9554 /* Bit 1 is EQ bit. */
9555 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9557 fprintf (file, "%d", i);
9561 /* X is a CR register. Print the number of the EQ bit of the CR */
9562 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9563 output_operand_lossage ("invalid %%E value");
9565 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9569 /* X is a CR register. Print the shift count needed to move it
9570 to the high-order four bits. */
9571 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9572 output_operand_lossage ("invalid %%f value");
9574 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9578 /* Similar, but print the count for the rotate in the opposite
9580 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9581 output_operand_lossage ("invalid %%F value");
9583 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9587 /* X is a constant integer. If it is negative, print "m",
9588 otherwise print "z". This is to make an aze or ame insn. */
9589 if (GET_CODE (x) != CONST_INT)
9590 output_operand_lossage ("invalid %%G value");
9591 else if (INTVAL (x) >= 0)
9598 /* If constant, output low-order five bits. Otherwise, write
9601 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9603 print_operand (file, x, 0);
9607 /* If constant, output low-order six bits. Otherwise, write
9610 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9612 print_operand (file, x, 0);
9616 /* Print `i' if this is a constant, else nothing. */
9622 /* Write the bit number in CCR for jump. */
9625 output_operand_lossage ("invalid %%j code");
9627 fprintf (file, "%d", i);
9631 /* Similar, but add one for shift count in rlinm for scc and pass
9632 scc flag to `ccr_bit'. */
9635 output_operand_lossage ("invalid %%J code");
9637 /* If we want bit 31, write a shift count of zero, not 32. */
9638 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9642 /* X must be a constant. Write the 1's complement of the
9645 output_operand_lossage ("invalid %%k value");
9647 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9651 /* X must be a symbolic constant on ELF. Write an
9652 expression suitable for an 'addi' that adds in the low 16
9654 if (GET_CODE (x) != CONST)
9656 print_operand_address (file, x);
9661 if (GET_CODE (XEXP (x, 0)) != PLUS
9662 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9663 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9664 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9665 output_operand_lossage ("invalid %%K value");
9666 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9668 /* For GNU as, there must be a non-alphanumeric character
9669 between 'l' and the number. The '-' is added by
9670 print_operand() already. */
9671 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9673 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9677 /* %l is output_asm_label. */
9680 /* Write second word of DImode or DFmode reference. Works on register
9681 or non-indexed memory only. */
9682 if (GET_CODE (x) == REG)
9683 fputs (reg_names[REGNO (x) + 1], file);
9684 else if (GET_CODE (x) == MEM)
9686 /* Handle possible auto-increment. Since it is pre-increment and
9687 we have already done it, we can just use an offset of word. */
9688 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9689 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9690 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9693 output_address (XEXP (adjust_address_nv (x, SImode,
9697 if (small_data_operand (x, GET_MODE (x)))
9698 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9699 reg_names[SMALL_DATA_REG]);
9704 /* MB value for a mask operand. */
9705 if (! mask_operand (x, SImode))
9706 output_operand_lossage ("invalid %%m value");
9708 fprintf (file, "%d", extract_MB (x));
9712 /* ME value for a mask operand. */
9713 if (! mask_operand (x, SImode))
9714 output_operand_lossage ("invalid %%M value");
9716 fprintf (file, "%d", extract_ME (x));
9719 /* %n outputs the negative of its operand. */
9722 /* Write the number of elements in the vector times 4. */
9723 if (GET_CODE (x) != PARALLEL)
9724 output_operand_lossage ("invalid %%N value");
9726 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9730 /* Similar, but subtract 1 first. */
9731 if (GET_CODE (x) != PARALLEL)
9732 output_operand_lossage ("invalid %%O value");
9734 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9738 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9740 || INT_LOWPART (x) < 0
9741 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9742 output_operand_lossage ("invalid %%p value");
9744 fprintf (file, "%d", i);
9748 /* The operand must be an indirect memory reference. The result
9749 is the register name. */
9750 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9751 || REGNO (XEXP (x, 0)) >= 32)
9752 output_operand_lossage ("invalid %%P value");
9754 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9758 /* This outputs the logical code corresponding to a boolean
9759 expression. The expression may have one or both operands
9760 negated (if one, only the first one). For condition register
9761 logical operations, it will also treat the negated
9762 CR codes as NOTs, but not handle NOTs of them. */
9764 const char *const *t = 0;
9766 enum rtx_code code = GET_CODE (x);
9767 static const char * const tbl[3][3] = {
9768 { "and", "andc", "nor" },
9769 { "or", "orc", "nand" },
9770 { "xor", "eqv", "xor" } };
9774 else if (code == IOR)
9776 else if (code == XOR)
9779 output_operand_lossage ("invalid %%q value");
9781 if (GET_CODE (XEXP (x, 0)) != NOT)
9785 if (GET_CODE (XEXP (x, 1)) == NOT)
9803 /* X is a CR register. Print the mask for `mtcrf'. */
9804 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9805 output_operand_lossage ("invalid %%R value");
9807 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9811 /* Low 5 bits of 32 - value */
9813 output_operand_lossage ("invalid %%s value");
9815 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9819 /* PowerPC64 mask position. All 0's is excluded.
9820 CONST_INT 32-bit mask is considered sign-extended so any
9821 transition must occur within the CONST_INT, not on the boundary. */
9822 if (! mask64_operand (x, DImode))
9823 output_operand_lossage ("invalid %%S value");
9825 uval = INT_LOWPART (x);
9827 if (uval & 1) /* Clear Left */
9829 #if HOST_BITS_PER_WIDE_INT > 64
9830 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9834 else /* Clear Right */
9837 #if HOST_BITS_PER_WIDE_INT > 64
9838 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9844 gcc_assert (i >= 0);
9845 fprintf (file, "%d", i);
9849 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9850 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
9852 /* Bit 3 is OV bit. */
9853 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9855 /* If we want bit 31, write a shift count of zero, not 32. */
9856 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9860 /* Print the symbolic name of a branch target register. */
9861 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9862 && REGNO (x) != COUNT_REGISTER_REGNUM))
9863 output_operand_lossage ("invalid %%T value");
9864 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9865 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9867 fputs ("ctr", file);
9871 /* High-order 16 bits of constant for use in unsigned operand. */
9873 output_operand_lossage ("invalid %%u value");
9875 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9876 (INT_LOWPART (x) >> 16) & 0xffff);
9880 /* High-order 16 bits of constant for use in signed operand. */
9882 output_operand_lossage ("invalid %%v value");
9884 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9885 (INT_LOWPART (x) >> 16) & 0xffff);
9889 /* Print `u' if this has an auto-increment or auto-decrement. */
9890 if (GET_CODE (x) == MEM
9891 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9892 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9897 /* Print the trap code for this operand. */
9898 switch (GET_CODE (x))
9901 fputs ("eq", file); /* 4 */
9904 fputs ("ne", file); /* 24 */
9907 fputs ("lt", file); /* 16 */
9910 fputs ("le", file); /* 20 */
9913 fputs ("gt", file); /* 8 */
9916 fputs ("ge", file); /* 12 */
9919 fputs ("llt", file); /* 2 */
9922 fputs ("lle", file); /* 6 */
9925 fputs ("lgt", file); /* 1 */
9928 fputs ("lge", file); /* 5 */
9936 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9939 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9940 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9942 print_operand (file, x, 0);
9946 /* MB value for a PowerPC64 rldic operand. */
9947 val = (GET_CODE (x) == CONST_INT
9948 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9953 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9954 if ((val <<= 1) < 0)
9957 #if HOST_BITS_PER_WIDE_INT == 32
9958 if (GET_CODE (x) == CONST_INT && i >= 0)
9959 i += 32; /* zero-extend high-part was all 0's */
9960 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9962 val = CONST_DOUBLE_LOW (x);
9968 for ( ; i < 64; i++)
9969 if ((val <<= 1) < 0)
9974 fprintf (file, "%d", i + 1);
9978 if (GET_CODE (x) == MEM
9979 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9984 /* Like 'L', for third word of TImode */
9985 if (GET_CODE (x) == REG)
9986 fputs (reg_names[REGNO (x) + 2], file);
9987 else if (GET_CODE (x) == MEM)
9989 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9990 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9991 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9993 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9994 if (small_data_operand (x, GET_MODE (x)))
9995 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9996 reg_names[SMALL_DATA_REG]);
10001 /* X is a SYMBOL_REF. Write out the name preceded by a
10002 period and without any trailing data in brackets. Used for function
10003 names. If we are configured for System V (or the embedded ABI) on
10004 the PowerPC, do not emit the period, since those systems do not use
10005 TOCs and the like. */
10006 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10008 /* Mark the decl as referenced so that cgraph will output the
10010 if (SYMBOL_REF_DECL (x))
10011 mark_decl_referenced (SYMBOL_REF_DECL (x));
10013 /* For macho, check to see if we need a stub. */
10016 const char *name = XSTR (x, 0);
10018 if (MACHOPIC_INDIRECT
10019 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10020 name = machopic_indirection_name (x, /*stub_p=*/true);
10022 assemble_name (file, name);
10024 else if (!DOT_SYMBOLS)
10025 assemble_name (file, XSTR (x, 0));
10027 rs6000_output_function_entry (file, XSTR (x, 0));
10031 /* Like 'L', for last word of TImode. */
10032 if (GET_CODE (x) == REG)
10033 fputs (reg_names[REGNO (x) + 3], file);
10034 else if (GET_CODE (x) == MEM)
10036 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10037 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10038 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10040 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10041 if (small_data_operand (x, GET_MODE (x)))
10042 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10043 reg_names[SMALL_DATA_REG]);
10047 /* Print AltiVec or SPE memory operand. */
10052 gcc_assert (GET_CODE (x) == MEM);
10058 /* Handle [reg]. */
10059 if (GET_CODE (tmp) == REG)
10061 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10064 /* Handle [reg+UIMM]. */
10065 else if (GET_CODE (tmp) == PLUS &&
10066 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10070 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10072 x = INTVAL (XEXP (tmp, 1));
10073 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10077 /* Fall through. Must be [reg+reg]. */
10080 && GET_CODE (tmp) == AND
10081 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10082 && INTVAL (XEXP (tmp, 1)) == -16)
10083 tmp = XEXP (tmp, 0);
10084 if (GET_CODE (tmp) == REG)
10085 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10088 gcc_assert (GET_CODE (tmp) == PLUS
10089 && GET_CODE (XEXP (tmp, 1)) == REG);
10091 if (REGNO (XEXP (tmp, 0)) == 0)
10092 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10093 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10095 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10096 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10102 if (GET_CODE (x) == REG)
10103 fprintf (file, "%s", reg_names[REGNO (x)]);
10104 else if (GET_CODE (x) == MEM)
10106 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10107 know the width from the mode. */
10108 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10109 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10110 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10111 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10112 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10113 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10115 output_address (XEXP (x, 0));
10118 output_addr_const (file, x);
10122 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10126 output_operand_lossage ("invalid %%xn code");
10130 /* Print the address of an operand. */
10133 print_operand_address (FILE *file, rtx x)
10135 if (GET_CODE (x) == REG)
10136 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10137 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10138 || GET_CODE (x) == LABEL_REF)
10140 output_addr_const (file, x);
10141 if (small_data_operand (x, GET_MODE (x)))
10142 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10143 reg_names[SMALL_DATA_REG]);
10145 gcc_assert (!TARGET_TOC);
10147 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10149 if (REGNO (XEXP (x, 0)) == 0)
10150 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10151 reg_names[ REGNO (XEXP (x, 0)) ]);
10153 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10154 reg_names[ REGNO (XEXP (x, 1)) ]);
10156 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10157 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10158 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10160 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10161 && CONSTANT_P (XEXP (x, 1)))
10163 output_addr_const (file, XEXP (x, 1));
10164 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10168 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10169 && CONSTANT_P (XEXP (x, 1)))
10171 fprintf (file, "lo16(");
10172 output_addr_const (file, XEXP (x, 1));
10173 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10176 else if (legitimate_constant_pool_address_p (x))
10178 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10180 rtx contains_minus = XEXP (x, 1);
10184 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10185 turn it into (sym) for output_addr_const. */
10186 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10187 contains_minus = XEXP (contains_minus, 0);
10189 minus = XEXP (contains_minus, 0);
10190 symref = XEXP (minus, 0);
10191 XEXP (contains_minus, 0) = symref;
10196 name = XSTR (symref, 0);
10197 newname = alloca (strlen (name) + sizeof ("@toc"));
10198 strcpy (newname, name);
10199 strcat (newname, "@toc");
10200 XSTR (symref, 0) = newname;
10202 output_addr_const (file, XEXP (x, 1));
10204 XSTR (symref, 0) = name;
10205 XEXP (contains_minus, 0) = minus;
10208 output_addr_const (file, XEXP (x, 1));
10210 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10213 gcc_unreachable ();
10216 /* Target hook for assembling integer objects. The PowerPC version has
10217 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10218 is defined. It also needs to handle DI-mode objects on 64-bit
10222 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10224 #ifdef RELOCATABLE_NEEDS_FIXUP
10225 /* Special handling for SI values. */
10226 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
10228 extern int in_toc_section (void);
10229 static int recurse = 0;
10231 /* For -mrelocatable, we mark all addresses that need to be fixed up
10232 in the .fixup section. */
10233 if (TARGET_RELOCATABLE
10234 && !in_toc_section ()
10235 && !in_text_section ()
10236 && !in_unlikely_text_section ()
10238 && GET_CODE (x) != CONST_INT
10239 && GET_CODE (x) != CONST_DOUBLE
10245 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10247 ASM_OUTPUT_LABEL (asm_out_file, buf);
10248 fprintf (asm_out_file, "\t.long\t(");
10249 output_addr_const (asm_out_file, x);
10250 fprintf (asm_out_file, ")@fixup\n");
10251 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10252 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10253 fprintf (asm_out_file, "\t.long\t");
10254 assemble_name (asm_out_file, buf);
10255 fprintf (asm_out_file, "\n\t.previous\n");
10259 /* Remove initial .'s to turn a -mcall-aixdesc function
10260 address into the address of the descriptor, not the function
10262 else if (GET_CODE (x) == SYMBOL_REF
10263 && XSTR (x, 0)[0] == '.'
10264 && DEFAULT_ABI == ABI_AIX)
10266 const char *name = XSTR (x, 0);
10267 while (*name == '.')
10270 fprintf (asm_out_file, "\t.long\t%s\n", name);
10274 #endif /* RELOCATABLE_NEEDS_FIXUP */
10275 return default_assemble_integer (x, size, aligned_p);
10278 #ifdef HAVE_GAS_HIDDEN
10279 /* Emit an assembler directive to set symbol visibility for DECL to
10280 VISIBILITY_TYPE. */
10283 rs6000_assemble_visibility (tree decl, int vis)
10285 /* Functions need to have their entry point symbol visibility set as
10286 well as their descriptor symbol visibility. */
10287 if (DEFAULT_ABI == ABI_AIX
10289 && TREE_CODE (decl) == FUNCTION_DECL)
10291 static const char * const visibility_types[] = {
10292 NULL, "internal", "hidden", "protected"
10295 const char *name, *type;
10297 name = ((* targetm.strip_name_encoding)
10298 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10299 type = visibility_types[vis];
10301 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10302 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10305 default_assemble_visibility (decl, vis);
10310 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10312 /* Reversal of FP compares takes care -- an ordered compare
10313 becomes an unordered compare and vice versa. */
10314 if (mode == CCFPmode
10315 && (!flag_finite_math_only
10316 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10317 || code == UNEQ || code == LTGT))
10318 return reverse_condition_maybe_unordered (code);
10320 return reverse_condition (code);
10323 /* Generate a compare for CODE. Return a brand-new rtx that
10324 represents the result of the compare. */
10327 rs6000_generate_compare (enum rtx_code code)
10329 enum machine_mode comp_mode;
10330 rtx compare_result;
10332 if (rs6000_compare_fp_p)
10333 comp_mode = CCFPmode;
10334 else if (code == GTU || code == LTU
10335 || code == GEU || code == LEU)
10336 comp_mode = CCUNSmode;
10337 else if ((code == EQ || code == NE)
10338 && GET_CODE (rs6000_compare_op0) == SUBREG
10339 && GET_CODE (rs6000_compare_op1) == SUBREG
10340 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10341 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10342 /* These are unsigned values, perhaps there will be a later
10343 ordering compare that can be shared with this one.
10344 Unfortunately we cannot detect the signedness of the operands
10345 for non-subregs. */
10346 comp_mode = CCUNSmode;
10348 comp_mode = CCmode;
10350 /* First, the compare. */
10351 compare_result = gen_reg_rtx (comp_mode);
10353 /* SPE FP compare instructions on the GPRs. Yuck! */
10354 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10355 && rs6000_compare_fp_p)
10357 rtx cmp, or_result, compare_result2;
10358 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10360 if (op_mode == VOIDmode)
10361 op_mode = GET_MODE (rs6000_compare_op1);
10363 /* Note: The E500 comparison instructions set the GT bit (x +
10364 1), on success. This explains the mess. */
10368 case EQ: case UNEQ: case NE: case LTGT:
10372 cmp = flag_unsafe_math_optimizations
10373 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10374 rs6000_compare_op1)
10375 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10376 rs6000_compare_op1);
10380 cmp = flag_unsafe_math_optimizations
10381 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10382 rs6000_compare_op1)
10383 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10384 rs6000_compare_op1);
10388 gcc_unreachable ();
10392 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10396 cmp = flag_unsafe_math_optimizations
10397 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10398 rs6000_compare_op1)
10399 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10400 rs6000_compare_op1);
10404 cmp = flag_unsafe_math_optimizations
10405 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10406 rs6000_compare_op1)
10407 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10408 rs6000_compare_op1);
10412 gcc_unreachable ();
10416 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10420 cmp = flag_unsafe_math_optimizations
10421 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10422 rs6000_compare_op1)
10423 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10424 rs6000_compare_op1);
10428 cmp = flag_unsafe_math_optimizations
10429 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
10430 rs6000_compare_op1)
10431 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
10432 rs6000_compare_op1);
10436 gcc_unreachable ();
10440 gcc_unreachable ();
10443 /* Synthesize LE and GE from LT/GT || EQ. */
10444 if (code == LE || code == GE || code == LEU || code == GEU)
10450 case LE: code = LT; break;
10451 case GE: code = GT; break;
10452 case LEU: code = LT; break;
10453 case GEU: code = GT; break;
10454 default: gcc_unreachable ();
10457 compare_result2 = gen_reg_rtx (CCFPmode);
10463 cmp = flag_unsafe_math_optimizations
10464 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10465 rs6000_compare_op1)
10466 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10467 rs6000_compare_op1);
10471 cmp = flag_unsafe_math_optimizations
10472 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
10473 rs6000_compare_op1)
10474 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
10475 rs6000_compare_op1);
10479 gcc_unreachable ();
10483 /* OR them together. */
10484 or_result = gen_reg_rtx (CCFPmode);
10485 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
10487 compare_result = or_result;
10492 if (code == NE || code == LTGT)
10502 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
10503 CLOBBERs to match cmptf_internal2 pattern. */
10504 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
10505 && GET_MODE (rs6000_compare_op0) == TFmode
10506 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
10507 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
10508 emit_insn (gen_rtx_PARALLEL (VOIDmode,
10510 gen_rtx_SET (VOIDmode,
10512 gen_rtx_COMPARE (comp_mode,
10513 rs6000_compare_op0,
10514 rs6000_compare_op1)),
10515 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10516 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10517 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10518 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10519 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10520 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10521 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
10522 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
10524 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10525 gen_rtx_COMPARE (comp_mode,
10526 rs6000_compare_op0,
10527 rs6000_compare_op1)));
10530 /* Some kinds of FP comparisons need an OR operation;
10531 under flag_finite_math_only we don't bother. */
10532 if (rs6000_compare_fp_p
10533 && !flag_finite_math_only
10534 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10535 && (code == LE || code == GE
10536 || code == UNEQ || code == LTGT
10537 || code == UNGT || code == UNLT))
10539 enum rtx_code or1, or2;
10540 rtx or1_rtx, or2_rtx, compare2_rtx;
10541 rtx or_result = gen_reg_rtx (CCEQmode);
10545 case LE: or1 = LT; or2 = EQ; break;
10546 case GE: or1 = GT; or2 = EQ; break;
10547 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10548 case LTGT: or1 = LT; or2 = GT; break;
10549 case UNGT: or1 = UNORDERED; or2 = GT; break;
10550 case UNLT: or1 = UNORDERED; or2 = LT; break;
10551 default: gcc_unreachable ();
10553 validate_condition_mode (or1, comp_mode);
10554 validate_condition_mode (or2, comp_mode);
10555 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10556 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10557 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10558 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10560 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10562 compare_result = or_result;
10566 validate_condition_mode (code, GET_MODE (compare_result));
10568 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10572 /* Emit the RTL for an sCOND pattern. */
10575 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10578 enum machine_mode op_mode;
10579 enum rtx_code cond_code;
10581 condition_rtx = rs6000_generate_compare (code);
10582 cond_code = GET_CODE (condition_rtx);
10584 if (TARGET_E500 && rs6000_compare_fp_p
10585 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10589 PUT_MODE (condition_rtx, SImode);
10590 t = XEXP (condition_rtx, 0);
10592 gcc_assert (cond_code == NE || cond_code == EQ);
10594 if (cond_code == NE)
10595 emit_insn (gen_e500_flip_gt_bit (t, t));
10597 emit_insn (gen_move_from_CR_gt_bit (result, t));
10601 if (cond_code == NE
10602 || cond_code == GE || cond_code == LE
10603 || cond_code == GEU || cond_code == LEU
10604 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10606 rtx not_result = gen_reg_rtx (CCEQmode);
10607 rtx not_op, rev_cond_rtx;
10608 enum machine_mode cc_mode;
10610 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10612 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10613 SImode, XEXP (condition_rtx, 0), const0_rtx);
10614 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10615 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10616 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10619 op_mode = GET_MODE (rs6000_compare_op0);
10620 if (op_mode == VOIDmode)
10621 op_mode = GET_MODE (rs6000_compare_op1);
10623 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10625 PUT_MODE (condition_rtx, DImode);
10626 convert_move (result, condition_rtx, 0);
10630 PUT_MODE (condition_rtx, SImode);
10631 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10635 /* Emit a branch of kind CODE to location LOC. */
10638 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10640 rtx condition_rtx, loc_ref;
10642 condition_rtx = rs6000_generate_compare (code);
10643 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10644 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10645 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10646 loc_ref, pc_rtx)));
10649 /* Return the string to output a conditional branch to LABEL, which is
10650 the operand number of the label, or -1 if the branch is really a
10651 conditional return.
10653 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10654 condition code register and its mode specifies what kind of
10655 comparison we made.
10657 REVERSED is nonzero if we should reverse the sense of the comparison.
10659 INSN is the insn. */
10662 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10664 static char string[64];
10665 enum rtx_code code = GET_CODE (op);
10666 rtx cc_reg = XEXP (op, 0);
10667 enum machine_mode mode = GET_MODE (cc_reg);
10668 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10669 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10670 int really_reversed = reversed ^ need_longbranch;
10676 validate_condition_mode (code, mode);
10678 /* Work out which way this really branches. We could use
10679 reverse_condition_maybe_unordered here always but this
10680 makes the resulting assembler clearer. */
10681 if (really_reversed)
10683 /* Reversal of FP compares takes care -- an ordered compare
10684 becomes an unordered compare and vice versa. */
10685 if (mode == CCFPmode)
10686 code = reverse_condition_maybe_unordered (code);
10688 code = reverse_condition (code);
10691 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10693 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10698 /* Opposite of GT. */
10707 gcc_unreachable ();
10713 /* Not all of these are actually distinct opcodes, but
10714 we distinguish them for clarity of the resulting assembler. */
10715 case NE: case LTGT:
10716 ccode = "ne"; break;
10717 case EQ: case UNEQ:
10718 ccode = "eq"; break;
10720 ccode = "ge"; break;
10721 case GT: case GTU: case UNGT:
10722 ccode = "gt"; break;
10724 ccode = "le"; break;
10725 case LT: case LTU: case UNLT:
10726 ccode = "lt"; break;
10727 case UNORDERED: ccode = "un"; break;
10728 case ORDERED: ccode = "nu"; break;
10729 case UNGE: ccode = "nl"; break;
10730 case UNLE: ccode = "ng"; break;
10732 gcc_unreachable ();
10735 /* Maybe we have a guess as to how likely the branch is.
10736 The old mnemonics don't have a way to specify this information. */
10738 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10739 if (note != NULL_RTX)
10741 /* PROB is the difference from 50%. */
10742 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10744 /* Only hint for highly probable/improbable branches on newer
10745 cpus as static prediction overrides processor dynamic
10746 prediction. For older cpus we may as well always hint, but
10747 assume not taken for branches that are very close to 50% as a
10748 mispredicted taken branch is more expensive than a
10749 mispredicted not-taken branch. */
10750 if (rs6000_always_hint
10751 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10753 if (abs (prob) > REG_BR_PROB_BASE / 20
10754 && ((prob > 0) ^ need_longbranch))
10762 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10764 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10766 /* We need to escape any '%' characters in the reg_names string.
10767 Assume they'd only be the first character.... */
10768 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10770 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10774 /* If the branch distance was too far, we may have to use an
10775 unconditional branch to go the distance. */
10776 if (need_longbranch)
10777 s += sprintf (s, ",$+8\n\tb %s", label);
10779 s += sprintf (s, ",%s", label);
10785 /* Return the string to flip the GT bit on a CR. */
10787 output_e500_flip_gt_bit (rtx dst, rtx src)
10789 static char string[64];
10792 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
10793 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
10796 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10797 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10799 sprintf (string, "crnot %d,%d", a, b);
10803 /* Return insn index for the vector compare instruction for given CODE,
10804 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
10808 get_vec_cmp_insn (enum rtx_code code,
10809 enum machine_mode dest_mode,
10810 enum machine_mode op_mode)
10812 if (!TARGET_ALTIVEC)
10813 return INSN_NOT_AVAILABLE;
10818 if (dest_mode == V16QImode && op_mode == V16QImode)
10819 return UNSPEC_VCMPEQUB;
10820 if (dest_mode == V8HImode && op_mode == V8HImode)
10821 return UNSPEC_VCMPEQUH;
10822 if (dest_mode == V4SImode && op_mode == V4SImode)
10823 return UNSPEC_VCMPEQUW;
10824 if (dest_mode == V4SImode && op_mode == V4SFmode)
10825 return UNSPEC_VCMPEQFP;
10828 if (dest_mode == V4SImode && op_mode == V4SFmode)
10829 return UNSPEC_VCMPGEFP;
10831 if (dest_mode == V16QImode && op_mode == V16QImode)
10832 return UNSPEC_VCMPGTSB;
10833 if (dest_mode == V8HImode && op_mode == V8HImode)
10834 return UNSPEC_VCMPGTSH;
10835 if (dest_mode == V4SImode && op_mode == V4SImode)
10836 return UNSPEC_VCMPGTSW;
10837 if (dest_mode == V4SImode && op_mode == V4SFmode)
10838 return UNSPEC_VCMPGTFP;
10841 if (dest_mode == V16QImode && op_mode == V16QImode)
10842 return UNSPEC_VCMPGTUB;
10843 if (dest_mode == V8HImode && op_mode == V8HImode)
10844 return UNSPEC_VCMPGTUH;
10845 if (dest_mode == V4SImode && op_mode == V4SImode)
10846 return UNSPEC_VCMPGTUW;
10851 return INSN_NOT_AVAILABLE;
10854 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
10855 DMODE is expected destination mode. This is a recursive function. */
10858 rs6000_emit_vector_compare (enum rtx_code rcode,
10860 enum machine_mode dmode)
10864 enum machine_mode dest_mode;
10865 enum machine_mode op_mode = GET_MODE (op1);
10867 gcc_assert (TARGET_ALTIVEC);
10868 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
10870 /* Floating point vector compare instructions uses destination V4SImode.
10871 Move destination to appropriate mode later. */
10872 if (dmode == V4SFmode)
10873 dest_mode = V4SImode;
10877 mask = gen_reg_rtx (dest_mode);
10878 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
10880 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
10882 bool swap_operands = false;
10883 bool try_again = false;
10888 swap_operands = true;
10893 swap_operands = true;
10897 /* Treat A != B as ~(A==B). */
10899 enum insn_code nor_code;
10900 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
10903 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
10904 gcc_assert (nor_code != CODE_FOR_nothing);
10905 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
10907 if (dmode != dest_mode)
10909 rtx temp = gen_reg_rtx (dest_mode);
10910 convert_move (temp, mask, 0);
10920 /* Try GT/GTU/LT/LTU OR EQ */
10923 enum insn_code ior_code;
10924 enum rtx_code new_code;
10945 gcc_unreachable ();
10948 c_rtx = rs6000_emit_vector_compare (new_code,
10949 op0, op1, dest_mode);
10950 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
10953 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
10954 gcc_assert (ior_code != CODE_FOR_nothing);
10955 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
10956 if (dmode != dest_mode)
10958 rtx temp = gen_reg_rtx (dest_mode);
10959 convert_move (temp, mask, 0);
10966 gcc_unreachable ();
10971 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
10972 /* You only get two chances. */
10973 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
10985 emit_insn (gen_rtx_SET (VOIDmode, mask,
10986 gen_rtx_UNSPEC (dest_mode,
10987 gen_rtvec (2, op0, op1),
10989 if (dmode != dest_mode)
10991 rtx temp = gen_reg_rtx (dest_mode);
10992 convert_move (temp, mask, 0);
10998 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
10999 valid insn doesn exist for given mode. */
11002 get_vsel_insn (enum machine_mode mode)
11007 return UNSPEC_VSEL4SI;
11010 return UNSPEC_VSEL4SF;
11013 return UNSPEC_VSEL8HI;
11016 return UNSPEC_VSEL16QI;
11019 return INSN_NOT_AVAILABLE;
11022 return INSN_NOT_AVAILABLE;
11025 /* Emit vector select insn where DEST is destination using
11026 operands OP1, OP2 and MASK. */
11029 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11032 enum machine_mode dest_mode = GET_MODE (dest);
11033 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11035 temp = gen_reg_rtx (dest_mode);
11037 /* For each vector element, select op1 when mask is 1 otherwise
11039 t = gen_rtx_SET (VOIDmode, temp,
11040 gen_rtx_UNSPEC (dest_mode,
11041 gen_rtvec (3, op2, op1, mask),
11044 emit_move_insn (dest, temp);
11048 /* Emit vector conditional expression.
11049 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11050 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11053 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11054 rtx cond, rtx cc_op0, rtx cc_op1)
11056 enum machine_mode dest_mode = GET_MODE (dest);
11057 enum rtx_code rcode = GET_CODE (cond);
11060 if (!TARGET_ALTIVEC)
11063 /* Get the vector mask for the given relational operations. */
11064 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11066 rs6000_emit_vector_select (dest, op1, op2, mask);
11071 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11072 operands of the last comparison is nonzero/true, FALSE_COND if it
11073 is zero/false. Return 0 if the hardware has no such operation. */
11076 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11078 enum rtx_code code = GET_CODE (op);
11079 rtx op0 = rs6000_compare_op0;
11080 rtx op1 = rs6000_compare_op1;
11081 REAL_VALUE_TYPE c1;
11082 enum machine_mode compare_mode = GET_MODE (op0);
11083 enum machine_mode result_mode = GET_MODE (dest);
11085 bool is_against_zero;
11087 /* These modes should always match. */
11088 if (GET_MODE (op1) != compare_mode
11089 /* In the isel case however, we can use a compare immediate, so
11090 op1 may be a small constant. */
11091 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11093 if (GET_MODE (true_cond) != result_mode)
11095 if (GET_MODE (false_cond) != result_mode)
11098 /* First, work out if the hardware can do this at all, or
11099 if it's too slow.... */
11100 if (! rs6000_compare_fp_p)
11103 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11106 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11107 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11110 is_against_zero = op1 == CONST0_RTX (compare_mode);
11112 /* A floating-point subtract might overflow, underflow, or produce
11113 an inexact result, thus changing the floating-point flags, so it
11114 can't be generated if we care about that. It's safe if one side
11115 of the construct is zero, since then no subtract will be
11117 if (GET_MODE_CLASS (compare_mode) == MODE_FLOAT
11118 && flag_trapping_math && ! is_against_zero)
11121 /* Eliminate half of the comparisons by switching operands, this
11122 makes the remaining code simpler. */
11123 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11124 || code == LTGT || code == LT || code == UNLE)
11126 code = reverse_condition_maybe_unordered (code);
11128 true_cond = false_cond;
11132 /* UNEQ and LTGT take four instructions for a comparison with zero,
11133 it'll probably be faster to use a branch here too. */
11134 if (code == UNEQ && HONOR_NANS (compare_mode))
11137 if (GET_CODE (op1) == CONST_DOUBLE)
11138 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11140 /* We're going to try to implement comparisons by performing
11141 a subtract, then comparing against zero. Unfortunately,
11142 Inf - Inf is NaN which is not zero, and so if we don't
11143 know that the operand is finite and the comparison
11144 would treat EQ different to UNORDERED, we can't do it. */
11145 if (HONOR_INFINITIES (compare_mode)
11146 && code != GT && code != UNGE
11147 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11148 /* Constructs of the form (a OP b ? a : b) are safe. */
11149 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11150 || (! rtx_equal_p (op0, true_cond)
11151 && ! rtx_equal_p (op1, true_cond))))
11154 /* At this point we know we can use fsel. */
11156 /* Reduce the comparison to a comparison against zero. */
11157 if (! is_against_zero)
11159 temp = gen_reg_rtx (compare_mode);
11160 emit_insn (gen_rtx_SET (VOIDmode, temp,
11161 gen_rtx_MINUS (compare_mode, op0, op1)));
11163 op1 = CONST0_RTX (compare_mode);
11166 /* If we don't care about NaNs we can reduce some of the comparisons
11167 down to faster ones. */
11168 if (! HONOR_NANS (compare_mode))
11174 true_cond = false_cond;
11187 /* Now, reduce everything down to a GE. */
11194 temp = gen_reg_rtx (compare_mode);
11195 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11200 temp = gen_reg_rtx (compare_mode);
11201 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11206 temp = gen_reg_rtx (compare_mode);
11207 emit_insn (gen_rtx_SET (VOIDmode, temp,
11208 gen_rtx_NEG (compare_mode,
11209 gen_rtx_ABS (compare_mode, op0))));
11214 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11215 temp = gen_reg_rtx (result_mode);
11216 emit_insn (gen_rtx_SET (VOIDmode, temp,
11217 gen_rtx_IF_THEN_ELSE (result_mode,
11218 gen_rtx_GE (VOIDmode,
11220 true_cond, false_cond)));
11221 false_cond = true_cond;
11224 temp = gen_reg_rtx (compare_mode);
11225 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11230 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11231 temp = gen_reg_rtx (result_mode);
11232 emit_insn (gen_rtx_SET (VOIDmode, temp,
11233 gen_rtx_IF_THEN_ELSE (result_mode,
11234 gen_rtx_GE (VOIDmode,
11236 true_cond, false_cond)));
11237 true_cond = false_cond;
11240 temp = gen_reg_rtx (compare_mode);
11241 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11246 gcc_unreachable ();
11249 emit_insn (gen_rtx_SET (VOIDmode, dest,
11250 gen_rtx_IF_THEN_ELSE (result_mode,
11251 gen_rtx_GE (VOIDmode,
11253 true_cond, false_cond)));
11257 /* Same as above, but for ints (isel). */
11260 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11262 rtx condition_rtx, cr;
11264 /* All isel implementations thus far are 32-bits. */
11265 if (GET_MODE (rs6000_compare_op0) != SImode)
11268 /* We still have to do the compare, because isel doesn't do a
11269 compare, it just looks at the CRx bits set by a previous compare
11271 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11272 cr = XEXP (condition_rtx, 0);
11274 if (GET_MODE (cr) == CCmode)
11275 emit_insn (gen_isel_signed (dest, condition_rtx,
11276 true_cond, false_cond, cr));
11278 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11279 true_cond, false_cond, cr));
11285 output_isel (rtx *operands)
11287 enum rtx_code code;
11289 code = GET_CODE (operands[1]);
11290 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11292 PUT_CODE (operands[1], reverse_condition (code));
11293 return "isel %0,%3,%2,%j1";
11296 return "isel %0,%2,%3,%j1";
11300 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11302 enum machine_mode mode = GET_MODE (op0);
11306 if (code == SMAX || code == SMIN)
11311 if (code == SMAX || code == UMAX)
11312 target = emit_conditional_move (dest, c, op0, op1, mode,
11313 op0, op1, mode, 0);
11315 target = emit_conditional_move (dest, c, op0, op1, mode,
11316 op1, op0, mode, 0);
11317 gcc_assert (target);
11318 if (target != dest)
11319 emit_move_insn (dest, target);
11322 /* Emit instructions to perform a load-reserved/store-conditional operation.
11323 The operation performed is an atomic
11324 (set M (CODE:MODE M OP))
11325 If not NULL, BEFORE is atomically set to M before the operation, and
11326 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11327 If SYNC_P then a memory barrier is emitted before the operation.
11328 Either OP or M may be wrapped in a NOT operation. */
11331 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11332 rtx m, rtx op, rtx before_param, rtx after_param,
11335 enum machine_mode used_mode;
11336 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11339 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11340 rtx shift = NULL_RTX;
11343 emit_insn (gen_memory_barrier ());
11345 if (GET_CODE (m) == NOT)
11346 used_m = XEXP (m, 0);
11350 /* If this is smaller than SImode, we'll have to use SImode with
11352 if (mode == QImode || mode == HImode)
11356 if (MEM_ALIGN (used_m) >= 32)
11359 if (BYTES_BIG_ENDIAN)
11360 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
11362 shift = GEN_INT (ishift);
11366 rtx addrSI, aligned_addr;
11368 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11369 XEXP (used_m, 0)));
11370 shift = gen_reg_rtx (SImode);
11372 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
11375 aligned_addr = expand_binop (Pmode, and_optab,
11377 GEN_INT (-4), NULL_RTX,
11378 1, OPTAB_LIB_WIDEN);
11379 used_m = change_address (used_m, SImode, aligned_addr);
11380 set_mem_align (used_m, 32);
11381 /* It's safe to keep the old alias set of USED_M, because
11382 the operation is atomic and only affects the original
11384 if (GET_CODE (m) == NOT)
11385 m = gen_rtx_NOT (SImode, used_m);
11390 if (GET_CODE (op) == NOT)
11392 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11393 oldop = gen_rtx_NOT (SImode, oldop);
11396 oldop = lowpart_subreg (SImode, op, mode);
11401 newop = expand_binop (SImode, and_optab,
11402 oldop, GEN_INT (imask), NULL_RTX,
11403 1, OPTAB_LIB_WIDEN);
11404 emit_insn (gen_ashlsi3 (newop, newop, shift));
11408 newop = expand_binop (SImode, ior_optab,
11409 oldop, GEN_INT (~imask), NULL_RTX,
11410 1, OPTAB_LIB_WIDEN);
11411 emit_insn (gen_ashlsi3 (newop, newop, shift));
11418 newop = expand_binop (SImode, and_optab,
11419 oldop, GEN_INT (imask), NULL_RTX,
11420 1, OPTAB_LIB_WIDEN);
11421 emit_insn (gen_ashlsi3 (newop, newop, shift));
11423 mask = gen_reg_rtx (SImode);
11424 emit_move_insn (mask, GEN_INT (imask));
11425 emit_insn (gen_ashlsi3 (mask, mask, shift));
11427 newop = gen_rtx_AND (SImode, gen_rtx_PLUS (SImode, m, newop),
11429 newop = gen_rtx_IOR (SImode, newop,
11430 gen_rtx_AND (SImode,
11431 gen_rtx_NOT (SImode, mask),
11437 gcc_unreachable ();
11441 used_mode = SImode;
11442 before = gen_reg_rtx (used_mode);
11443 after = gen_reg_rtx (used_mode);
11448 before = before_param;
11449 after = after_param;
11451 if (before == NULL_RTX)
11452 before = gen_reg_rtx (used_mode);
11453 if (after == NULL_RTX)
11454 after = gen_reg_rtx (used_mode);
11457 if (code == PLUS && used_mode != mode)
11458 the_op = op; /* Computed above. */
11459 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
11460 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
11462 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
11464 set_after = gen_rtx_SET (VOIDmode, after, the_op);
11465 set_before = gen_rtx_SET (VOIDmode, before, used_m);
11466 set_atomic = gen_rtx_SET (VOIDmode, used_m,
11467 gen_rtx_UNSPEC (used_mode, gen_rtvec (1, the_op),
11469 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
11471 if (code == PLUS && used_mode != mode)
11472 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
11473 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
11475 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
11476 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
11478 /* Shift and mask the return values properly. */
11479 if (used_mode != mode && before_param)
11481 emit_insn (gen_lshrsi3 (before, before, shift));
11482 convert_move (before_param, before, 1);
11485 if (used_mode != mode && after_param)
11487 emit_insn (gen_lshrsi3 (after, after, shift));
11488 convert_move (after_param, after, 1);
11491 /* The previous sequence will end with a branch that's dependent on
11492 the conditional store, so placing an isync will ensure that no
11493 other instructions (especially, no load or store instructions)
11494 can start before the atomic operation completes. */
11496 emit_insn (gen_isync ());
11499 /* Emit instructions to move SRC to DST. Called by splitters for
11500 multi-register moves. It will emit at most one instruction for
11501 each register that is accessed; that is, it won't emit li/lis pairs
11502 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11506 rs6000_split_multireg_move (rtx dst, rtx src)
11508 /* The register number of the first register being moved. */
11510 /* The mode that is to be moved. */
11511 enum machine_mode mode;
11512 /* The mode that the move is being done in, and its size. */
11513 enum machine_mode reg_mode;
11515 /* The number of registers that will be moved. */
11518 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11519 mode = GET_MODE (dst);
11520 nregs = HARD_REGNO_NREGS (reg, mode);
11521 if (FP_REGNO_P (reg))
11523 else if (ALTIVEC_REGNO_P (reg))
11524 reg_mode = V16QImode;
11526 reg_mode = word_mode;
11527 reg_mode_size = GET_MODE_SIZE (reg_mode);
11529 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
11531 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11533 /* Move register range backwards, if we might have destructive
11536 for (i = nregs - 1; i >= 0; i--)
11537 emit_insn (gen_rtx_SET (VOIDmode,
11538 simplify_gen_subreg (reg_mode, dst, mode,
11539 i * reg_mode_size),
11540 simplify_gen_subreg (reg_mode, src, mode,
11541 i * reg_mode_size)));
11547 bool used_update = false;
11549 if (MEM_P (src) && INT_REGNO_P (reg))
11553 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11554 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11557 breg = XEXP (XEXP (src, 0), 0);
11558 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
11559 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11560 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
11561 emit_insn (TARGET_32BIT
11562 ? gen_addsi3 (breg, breg, delta_rtx)
11563 : gen_adddi3 (breg, breg, delta_rtx));
11564 src = gen_rtx_MEM (mode, breg);
11566 else if (! offsettable_memref_p (src))
11568 rtx newsrc, basereg;
11569 basereg = gen_rtx_REG (Pmode, reg);
11570 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11571 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11572 MEM_COPY_ATTRIBUTES (newsrc, src);
11576 breg = XEXP (src, 0);
11577 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
11578 breg = XEXP (breg, 0);
11580 /* If the base register we are using to address memory is
11581 also a destination reg, then change that register last. */
11583 && REGNO (breg) >= REGNO (dst)
11584 && REGNO (breg) < REGNO (dst) + nregs)
11585 j = REGNO (breg) - REGNO (dst);
11588 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11592 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11593 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11596 breg = XEXP (XEXP (dst, 0), 0);
11597 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
11598 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11599 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
11601 /* We have to update the breg before doing the store.
11602 Use store with update, if available. */
11606 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11607 emit_insn (TARGET_32BIT
11608 ? (TARGET_POWERPC64
11609 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
11610 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
11611 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
11612 used_update = true;
11615 emit_insn (TARGET_32BIT
11616 ? gen_addsi3 (breg, breg, delta_rtx)
11617 : gen_adddi3 (breg, breg, delta_rtx));
11618 dst = gen_rtx_MEM (mode, breg);
11621 gcc_assert (offsettable_memref_p (dst));
11624 for (i = 0; i < nregs; i++)
11626 /* Calculate index to next subword. */
11631 /* If compiler already emitted move of first word by
11632 store with update, no need to do anything. */
11633 if (j == 0 && used_update)
11636 emit_insn (gen_rtx_SET (VOIDmode,
11637 simplify_gen_subreg (reg_mode, dst, mode,
11638 j * reg_mode_size),
11639 simplify_gen_subreg (reg_mode, src, mode,
11640 j * reg_mode_size)));
11646 /* This page contains routines that are used to determine what the
11647 function prologue and epilogue code will do and write them out. */
11649 /* Return the first fixed-point register that is required to be
11650 saved. 32 if none. */
11653 first_reg_to_save (void)
11657 /* Find lowest numbered live register. */
11658 for (first_reg = 13; first_reg <= 31; first_reg++)
11659 if (regs_ever_live[first_reg]
11660 && (! call_used_regs[first_reg]
11661 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11662 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11663 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11664 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11669 && current_function_uses_pic_offset_table
11670 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11671 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11677 /* Similar, for FP regs. */
11680 first_fp_reg_to_save (void)
11684 /* Find lowest numbered live register. */
11685 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11686 if (regs_ever_live[first_reg])
11692 /* Similar, for AltiVec regs. */
11695 first_altivec_reg_to_save (void)
11699 /* Stack frame remains as is unless we are in AltiVec ABI. */
11700 if (! TARGET_ALTIVEC_ABI)
11701 return LAST_ALTIVEC_REGNO + 1;
11703 /* Find lowest numbered live register. */
11704 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11705 if (regs_ever_live[i])
11711 /* Return a 32-bit mask of the AltiVec registers we need to set in
11712 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11713 the 32-bit word is 0. */
11715 static unsigned int
11716 compute_vrsave_mask (void)
11718 unsigned int i, mask = 0;
11720 /* First, find out if we use _any_ altivec registers. */
11721 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11722 if (regs_ever_live[i])
11723 mask |= ALTIVEC_REG_BIT (i);
11728 /* Next, remove the argument registers from the set. These must
11729 be in the VRSAVE mask set by the caller, so we don't need to add
11730 them in again. More importantly, the mask we compute here is
11731 used to generate CLOBBERs in the set_vrsave insn, and we do not
11732 wish the argument registers to die. */
11733 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11734 mask &= ~ALTIVEC_REG_BIT (i);
11736 /* Similarly, remove the return value from the set. */
11739 diddle_return_value (is_altivec_return_reg, &yes);
11741 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11747 /* For a very restricted set of circumstances, we can cut down the
11748 size of prologues/epilogues by calling our own save/restore-the-world
11752 compute_save_world_info (rs6000_stack_t *info_ptr)
11754 info_ptr->world_save_p = 1;
11755 info_ptr->world_save_p
11756 = (WORLD_SAVE_P (info_ptr)
11757 && DEFAULT_ABI == ABI_DARWIN
11758 && ! (current_function_calls_setjmp && flag_exceptions)
11759 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
11760 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
11761 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
11762 && info_ptr->cr_save_p);
11764 /* This will not work in conjunction with sibcalls. Make sure there
11765 are none. (This check is expensive, but seldom executed.) */
11766 if (WORLD_SAVE_P (info_ptr))
11769 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
11770 if ( GET_CODE (insn) == CALL_INSN
11771 && SIBLING_CALL_P (insn))
11773 info_ptr->world_save_p = 0;
11778 if (WORLD_SAVE_P (info_ptr))
11780 /* Even if we're not touching VRsave, make sure there's room on the
11781 stack for it, if it looks like we're calling SAVE_WORLD, which
11782 will attempt to save it. */
11783 info_ptr->vrsave_size = 4;
11785 /* "Save" the VRsave register too if we're saving the world. */
11786 if (info_ptr->vrsave_mask == 0)
11787 info_ptr->vrsave_mask = compute_vrsave_mask ();
11789 /* Because the Darwin register save/restore routines only handle
11790 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
11792 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
11793 && (info_ptr->first_altivec_reg_save
11794 >= FIRST_SAVED_ALTIVEC_REGNO));
11801 is_altivec_return_reg (rtx reg, void *xyes)
11803 bool *yes = (bool *) xyes;
11804 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11809 /* Calculate the stack information for the current function. This is
11810 complicated by having two separate calling sequences, the AIX calling
11811 sequence and the V.4 calling sequence.
11813 AIX (and Darwin/Mac OS X) stack frames look like:
11815 SP----> +---------------------------------------+
11816 | back chain to caller | 0 0
11817 +---------------------------------------+
11818 | saved CR | 4 8 (8-11)
11819 +---------------------------------------+
11821 +---------------------------------------+
11822 | reserved for compilers | 12 24
11823 +---------------------------------------+
11824 | reserved for binders | 16 32
11825 +---------------------------------------+
11826 | saved TOC pointer | 20 40
11827 +---------------------------------------+
11828 | Parameter save area (P) | 24 48
11829 +---------------------------------------+
11830 | Alloca space (A) | 24+P etc.
11831 +---------------------------------------+
11832 | Local variable space (L) | 24+P+A
11833 +---------------------------------------+
11834 | Float/int conversion temporary (X) | 24+P+A+L
11835 +---------------------------------------+
11836 | Save area for AltiVec registers (W) | 24+P+A+L+X
11837 +---------------------------------------+
11838 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11839 +---------------------------------------+
11840 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11841 +---------------------------------------+
11842 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11843 +---------------------------------------+
11844 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11845 +---------------------------------------+
11846 old SP->| back chain to caller's caller |
11847 +---------------------------------------+
11849 The required alignment for AIX configurations is two words (i.e., 8
11853 V.4 stack frames look like:
11855 SP----> +---------------------------------------+
11856 | back chain to caller | 0
11857 +---------------------------------------+
11858 | caller's saved LR | 4
11859 +---------------------------------------+
11860 | Parameter save area (P) | 8
11861 +---------------------------------------+
11862 | Alloca space (A) | 8+P
11863 +---------------------------------------+
11864 | Varargs save area (V) | 8+P+A
11865 +---------------------------------------+
11866 | Local variable space (L) | 8+P+A+V
11867 +---------------------------------------+
11868 | Float/int conversion temporary (X) | 8+P+A+V+L
11869 +---------------------------------------+
11870 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11871 +---------------------------------------+
11872 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11873 +---------------------------------------+
11874 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11875 +---------------------------------------+
11876 | SPE: area for 64-bit GP registers |
11877 +---------------------------------------+
11878 | SPE alignment padding |
11879 +---------------------------------------+
11880 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11881 +---------------------------------------+
11882 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11883 +---------------------------------------+
11884 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11885 +---------------------------------------+
11886 old SP->| back chain to caller's caller |
11887 +---------------------------------------+
11889 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11890 given. (But note below and in sysv4.h that we require only 8 and
11891 may round up the size of our stack frame anyways. The historical
11892 reason is early versions of powerpc-linux which didn't properly
11893 align the stack at program startup. A happy side-effect is that
11894 -mno-eabi libraries can be used with -meabi programs.)
11896 The EABI configuration defaults to the V.4 layout. However,
11897 the stack alignment requirements may differ. If -mno-eabi is not
11898 given, the required stack alignment is 8 bytes; if -mno-eabi is
11899 given, the required alignment is 16 bytes. (But see V.4 comment
11902 #ifndef ABI_STACK_BOUNDARY
11903 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11906 static rs6000_stack_t *
11907 rs6000_stack_info (void)
11909 static rs6000_stack_t info, zero_info;
11910 rs6000_stack_t *info_ptr = &info;
11911 int reg_size = TARGET_32BIT ? 4 : 8;
11914 HOST_WIDE_INT non_fixed_size;
11916 /* Zero all fields portably. */
11921 /* Cache value so we don't rescan instruction chain over and over. */
11922 if (cfun->machine->insn_chain_scanned_p == 0)
11923 cfun->machine->insn_chain_scanned_p
11924 = spe_func_has_64bit_regs_p () + 1;
11925 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
11928 /* Select which calling sequence. */
11929 info_ptr->abi = DEFAULT_ABI;
11931 /* Calculate which registers need to be saved & save area size. */
11932 info_ptr->first_gp_reg_save = first_reg_to_save ();
11933 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11934 even if it currently looks like we won't. */
11935 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11936 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11937 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11938 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11939 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11941 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11943 /* For the SPE, we have an additional upper 32-bits on each GPR.
11944 Ideally we should save the entire 64-bits only when the upper
11945 half is used in SIMD instructions. Since we only record
11946 registers live (not the size they are used in), this proves
11947 difficult because we'd have to traverse the instruction chain at
11948 the right time, taking reload into account. This is a real pain,
11949 so we opt to save the GPRs in 64-bits always if but one register
11950 gets used in 64-bits. Otherwise, all the registers in the frame
11951 get saved in 32-bits.
11953 So... since when we save all GPRs (except the SP) in 64-bits, the
11954 traditional GP save area will be empty. */
11955 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11956 info_ptr->gp_size = 0;
11958 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11959 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11961 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11962 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11963 - info_ptr->first_altivec_reg_save);
11965 /* Does this function call anything? */
11966 info_ptr->calls_p = (! current_function_is_leaf
11967 || cfun->machine->ra_needs_full_frame);
11969 /* Determine if we need to save the link register. */
11970 if (rs6000_ra_ever_killed ()
11971 || (DEFAULT_ABI == ABI_AIX
11972 && current_function_profile
11973 && !TARGET_PROFILE_KERNEL)
11974 #ifdef TARGET_RELOCATABLE
11975 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11977 || (info_ptr->first_fp_reg_save != 64
11978 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11979 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11980 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11981 || (DEFAULT_ABI == ABI_DARWIN
11983 && current_function_uses_pic_offset_table)
11984 || info_ptr->calls_p)
11986 info_ptr->lr_save_p = 1;
11987 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11990 /* Determine if we need to save the condition code registers. */
11991 if (regs_ever_live[CR2_REGNO]
11992 || regs_ever_live[CR3_REGNO]
11993 || regs_ever_live[CR4_REGNO])
11995 info_ptr->cr_save_p = 1;
11996 if (DEFAULT_ABI == ABI_V4)
11997 info_ptr->cr_size = reg_size;
12000 /* If the current function calls __builtin_eh_return, then we need
12001 to allocate stack space for registers that will hold data for
12002 the exception handler. */
12003 if (current_function_calls_eh_return)
12006 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12009 /* SPE saves EH registers in 64-bits. */
12010 ehrd_size = i * (TARGET_SPE_ABI
12011 && info_ptr->spe_64bit_regs_used != 0
12012 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12017 /* Determine various sizes. */
12018 info_ptr->reg_size = reg_size;
12019 info_ptr->fixed_size = RS6000_SAVE_AREA;
12020 info_ptr->varargs_size = RS6000_VARARGS_AREA;
12021 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12022 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12023 TARGET_ALTIVEC ? 16 : 8);
12025 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12026 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12028 info_ptr->spe_gp_size = 0;
12030 if (TARGET_ALTIVEC_ABI)
12031 info_ptr->vrsave_mask = compute_vrsave_mask ();
12033 info_ptr->vrsave_mask = 0;
12035 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12036 info_ptr->vrsave_size = 4;
12038 info_ptr->vrsave_size = 0;
12040 compute_save_world_info (info_ptr);
12042 /* Calculate the offsets. */
12043 switch (DEFAULT_ABI)
12047 gcc_unreachable ();
12051 info_ptr->fp_save_offset = - info_ptr->fp_size;
12052 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12054 if (TARGET_ALTIVEC_ABI)
12056 info_ptr->vrsave_save_offset
12057 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12059 /* Align stack so vector save area is on a quadword boundary. */
12060 if (info_ptr->altivec_size != 0)
12061 info_ptr->altivec_padding_size
12062 = 16 - (-info_ptr->vrsave_save_offset % 16);
12064 info_ptr->altivec_padding_size = 0;
12066 info_ptr->altivec_save_offset
12067 = info_ptr->vrsave_save_offset
12068 - info_ptr->altivec_padding_size
12069 - info_ptr->altivec_size;
12071 /* Adjust for AltiVec case. */
12072 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12075 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12076 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12077 info_ptr->lr_save_offset = 2*reg_size;
12081 info_ptr->fp_save_offset = - info_ptr->fp_size;
12082 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12083 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12085 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12087 /* Align stack so SPE GPR save area is aligned on a
12088 double-word boundary. */
12089 if (info_ptr->spe_gp_size != 0)
12090 info_ptr->spe_padding_size
12091 = 8 - (-info_ptr->cr_save_offset % 8);
12093 info_ptr->spe_padding_size = 0;
12095 info_ptr->spe_gp_save_offset
12096 = info_ptr->cr_save_offset
12097 - info_ptr->spe_padding_size
12098 - info_ptr->spe_gp_size;
12100 /* Adjust for SPE case. */
12101 info_ptr->toc_save_offset
12102 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12104 else if (TARGET_ALTIVEC_ABI)
12106 info_ptr->vrsave_save_offset
12107 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12109 /* Align stack so vector save area is on a quadword boundary. */
12110 if (info_ptr->altivec_size != 0)
12111 info_ptr->altivec_padding_size
12112 = 16 - (-info_ptr->vrsave_save_offset % 16);
12114 info_ptr->altivec_padding_size = 0;
12116 info_ptr->altivec_save_offset
12117 = info_ptr->vrsave_save_offset
12118 - info_ptr->altivec_padding_size
12119 - info_ptr->altivec_size;
12121 /* Adjust for AltiVec case. */
12122 info_ptr->toc_save_offset
12123 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12126 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12127 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12128 info_ptr->lr_save_offset = reg_size;
12132 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12133 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12134 + info_ptr->gp_size
12135 + info_ptr->altivec_size
12136 + info_ptr->altivec_padding_size
12137 + info_ptr->spe_gp_size
12138 + info_ptr->spe_padding_size
12140 + info_ptr->cr_size
12141 + info_ptr->lr_size
12142 + info_ptr->vrsave_size
12143 + info_ptr->toc_size,
12146 non_fixed_size = (info_ptr->vars_size
12147 + info_ptr->parm_size
12148 + info_ptr->save_size
12149 + info_ptr->varargs_size);
12151 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12152 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12154 /* Determine if we need to allocate any stack frame:
12156 For AIX we need to push the stack if a frame pointer is needed
12157 (because the stack might be dynamically adjusted), if we are
12158 debugging, if we make calls, or if the sum of fp_save, gp_save,
12159 and local variables are more than the space needed to save all
12160 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12161 + 18*8 = 288 (GPR13 reserved).
12163 For V.4 we don't have the stack cushion that AIX uses, but assume
12164 that the debugger can handle stackless frames. */
12166 if (info_ptr->calls_p)
12167 info_ptr->push_p = 1;
12169 else if (DEFAULT_ABI == ABI_V4)
12170 info_ptr->push_p = non_fixed_size != 0;
12172 else if (frame_pointer_needed)
12173 info_ptr->push_p = 1;
12175 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12176 info_ptr->push_p = 1;
12179 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12181 /* Zero offsets if we're not saving those registers. */
12182 if (info_ptr->fp_size == 0)
12183 info_ptr->fp_save_offset = 0;
12185 if (info_ptr->gp_size == 0)
12186 info_ptr->gp_save_offset = 0;
12188 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12189 info_ptr->altivec_save_offset = 0;
12191 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12192 info_ptr->vrsave_save_offset = 0;
12194 if (! TARGET_SPE_ABI
12195 || info_ptr->spe_64bit_regs_used == 0
12196 || info_ptr->spe_gp_size == 0)
12197 info_ptr->spe_gp_save_offset = 0;
12199 if (! info_ptr->lr_save_p)
12200 info_ptr->lr_save_offset = 0;
12202 if (! info_ptr->cr_save_p)
12203 info_ptr->cr_save_offset = 0;
12205 if (! info_ptr->toc_save_p)
12206 info_ptr->toc_save_offset = 0;
12211 /* Return true if the current function uses any GPRs in 64-bit SIMD
12215 spe_func_has_64bit_regs_p (void)
12219 /* Functions that save and restore all the call-saved registers will
12220 need to save/restore the registers in 64-bits. */
12221 if (current_function_calls_eh_return
12222 || current_function_calls_setjmp
12223 || current_function_has_nonlocal_goto)
12226 insns = get_insns ();
12228 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12234 /* FIXME: This should be implemented with attributes...
12236 (set_attr "spe64" "true")....then,
12237 if (get_spe64(insn)) return true;
12239 It's the only reliable way to do the stuff below. */
12241 i = PATTERN (insn);
12242 if (GET_CODE (i) == SET)
12244 enum machine_mode mode = GET_MODE (SET_SRC (i));
12246 if (SPE_VECTOR_MODE (mode))
12248 if (TARGET_E500_DOUBLE && mode == DFmode)
12258 debug_stack_info (rs6000_stack_t *info)
12260 const char *abi_string;
12263 info = rs6000_stack_info ();
12265 fprintf (stderr, "\nStack information for function %s:\n",
12266 ((current_function_decl && DECL_NAME (current_function_decl))
12267 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12272 default: abi_string = "Unknown"; break;
12273 case ABI_NONE: abi_string = "NONE"; break;
12274 case ABI_AIX: abi_string = "AIX"; break;
12275 case ABI_DARWIN: abi_string = "Darwin"; break;
12276 case ABI_V4: abi_string = "V.4"; break;
12279 fprintf (stderr, "\tABI = %5s\n", abi_string);
12281 if (TARGET_ALTIVEC_ABI)
12282 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12284 if (TARGET_SPE_ABI)
12285 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12287 if (info->first_gp_reg_save != 32)
12288 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12290 if (info->first_fp_reg_save != 64)
12291 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
12293 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12294 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12295 info->first_altivec_reg_save);
12297 if (info->lr_save_p)
12298 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
12300 if (info->cr_save_p)
12301 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12303 if (info->toc_save_p)
12304 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
12306 if (info->vrsave_mask)
12307 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
12310 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
12313 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
12315 if (info->gp_save_offset)
12316 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
12318 if (info->fp_save_offset)
12319 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
12321 if (info->altivec_save_offset)
12322 fprintf (stderr, "\taltivec_save_offset = %5d\n",
12323 info->altivec_save_offset);
12325 if (info->spe_gp_save_offset)
12326 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
12327 info->spe_gp_save_offset);
12329 if (info->vrsave_save_offset)
12330 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
12331 info->vrsave_save_offset);
12333 if (info->lr_save_offset)
12334 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
12336 if (info->cr_save_offset)
12337 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
12339 if (info->toc_save_offset)
12340 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
12342 if (info->varargs_save_offset)
12343 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
12345 if (info->total_size)
12346 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12349 if (info->varargs_size)
12350 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
12352 if (info->vars_size)
12353 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12356 if (info->parm_size)
12357 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
12359 if (info->fixed_size)
12360 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
12363 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
12365 if (info->spe_gp_size)
12366 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
12369 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
12371 if (info->altivec_size)
12372 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
12374 if (info->vrsave_size)
12375 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
12377 if (info->altivec_padding_size)
12378 fprintf (stderr, "\taltivec_padding_size= %5d\n",
12379 info->altivec_padding_size);
12381 if (info->spe_padding_size)
12382 fprintf (stderr, "\tspe_padding_size = %5d\n",
12383 info->spe_padding_size);
12386 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
12389 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
12391 if (info->toc_size)
12392 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
12394 if (info->save_size)
12395 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
12397 if (info->reg_size != 4)
12398 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
12400 fprintf (stderr, "\n");
12404 rs6000_return_addr (int count, rtx frame)
12406 /* Currently we don't optimize very well between prolog and body
12407 code and for PIC code the code can be actually quite bad, so
12408 don't try to be too clever here. */
12409 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
12411 cfun->machine->ra_needs_full_frame = 1;
12418 plus_constant (copy_to_reg
12419 (gen_rtx_MEM (Pmode,
12420 memory_address (Pmode, frame))),
12421 RETURN_ADDRESS_OFFSET)));
12424 cfun->machine->ra_need_lr = 1;
12425 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
12428 /* Say whether a function is a candidate for sibcall handling or not.
12429 We do not allow indirect calls to be optimized into sibling calls.
12430 Also, we can't do it if there are any vector parameters; there's
12431 nowhere to put the VRsave code so it works; note that functions with
12432 vector parameters are required to have a prototype, so the argument
12433 type info must be available here. (The tail recursion case can work
12434 with vector parameters, but there's no way to distinguish here.) */
12436 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
12441 if (TARGET_ALTIVEC_VRSAVE)
12443 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
12444 type; type = TREE_CHAIN (type))
12446 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
12450 if (DEFAULT_ABI == ABI_DARWIN
12451 || (*targetm.binds_local_p) (decl))
12453 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12455 if (!lookup_attribute ("longcall", attr_list)
12456 || lookup_attribute ("shortcall", attr_list))
12464 rs6000_ra_ever_killed (void)
12470 if (current_function_is_thunk)
12473 /* regs_ever_live has LR marked as used if any sibcalls are present,
12474 but this should not force saving and restoring in the
12475 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12476 clobbers LR, so that is inappropriate. */
12478 /* Also, the prologue can generate a store into LR that
12479 doesn't really count, like this:
12482 bcl to set PIC register
12486 When we're called from the epilogue, we need to avoid counting
12487 this as a store. */
12489 push_topmost_sequence ();
12490 top = get_insns ();
12491 pop_topmost_sequence ();
12492 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12494 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12498 if (FIND_REG_INC_NOTE (insn, reg))
12500 else if (GET_CODE (insn) == CALL_INSN
12501 && !SIBLING_CALL_P (insn))
12503 else if (set_of (reg, insn) != NULL_RTX
12504 && !prologue_epilogue_contains (insn))
12511 /* Add a REG_MAYBE_DEAD note to the insn. */
12513 rs6000_maybe_dead (rtx insn)
12515 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12520 /* Emit instructions needed to load the TOC register.
12521 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12522 a constant pool; or for SVR4 -fpic. */
12525 rs6000_emit_load_toc_table (int fromprolog)
12528 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12530 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12532 rtx temp = (fromprolog
12533 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12534 : gen_reg_rtx (Pmode));
12535 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
12537 rs6000_maybe_dead (insn);
12538 insn = emit_move_insn (dest, temp);
12540 rs6000_maybe_dead (insn);
12542 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12545 rtx tempLR = (fromprolog
12546 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12547 : gen_reg_rtx (Pmode));
12548 rtx temp0 = (fromprolog
12549 ? gen_rtx_REG (Pmode, 0)
12550 : gen_reg_rtx (Pmode));
12556 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12557 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12559 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12560 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12562 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12564 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12565 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12573 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12574 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
12575 emit_move_insn (dest, tempLR);
12576 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12578 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12580 rs6000_maybe_dead (insn);
12582 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12584 /* This is for AIX code running in non-PIC ELF32. */
12587 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12588 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12590 insn = emit_insn (gen_elf_high (dest, realsym));
12592 rs6000_maybe_dead (insn);
12593 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12595 rs6000_maybe_dead (insn);
12599 gcc_assert (DEFAULT_ABI == ABI_AIX);
12602 insn = emit_insn (gen_load_toc_aix_si (dest));
12604 insn = emit_insn (gen_load_toc_aix_di (dest));
12606 rs6000_maybe_dead (insn);
12610 /* Emit instructions to restore the link register after determining where
12611 its value has been stored. */
12614 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12616 rs6000_stack_t *info = rs6000_stack_info ();
12619 operands[0] = source;
12620 operands[1] = scratch;
12622 if (info->lr_save_p)
12624 rtx frame_rtx = stack_pointer_rtx;
12625 HOST_WIDE_INT sp_offset = 0;
12628 if (frame_pointer_needed
12629 || current_function_calls_alloca
12630 || info->total_size > 32767)
12632 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12633 frame_rtx = operands[1];
12635 else if (info->push_p)
12636 sp_offset = info->total_size;
12638 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12639 tmp = gen_rtx_MEM (Pmode, tmp);
12640 emit_move_insn (tmp, operands[0]);
12643 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12646 static GTY(()) int set = -1;
12649 get_TOC_alias_set (void)
12652 set = new_alias_set ();
12656 /* This returns nonzero if the current function uses the TOC. This is
12657 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12658 is generated by the ABI_V4 load_toc_* patterns. */
12665 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12668 rtx pat = PATTERN (insn);
12671 if (GET_CODE (pat) == PARALLEL)
12672 for (i = 0; i < XVECLEN (pat, 0); i++)
12674 rtx sub = XVECEXP (pat, 0, i);
12675 if (GET_CODE (sub) == USE)
12677 sub = XEXP (sub, 0);
12678 if (GET_CODE (sub) == UNSPEC
12679 && XINT (sub, 1) == UNSPEC_TOC)
12689 create_TOC_reference (rtx symbol)
12691 return gen_rtx_PLUS (Pmode,
12692 gen_rtx_REG (Pmode, TOC_REGISTER),
12693 gen_rtx_CONST (Pmode,
12694 gen_rtx_MINUS (Pmode, symbol,
12695 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12698 /* If _Unwind_* has been called from within the same module,
12699 toc register is not guaranteed to be saved to 40(1) on function
12700 entry. Save it there in that case. */
12703 rs6000_aix_emit_builtin_unwind_init (void)
12706 rtx stack_top = gen_reg_rtx (Pmode);
12707 rtx opcode_addr = gen_reg_rtx (Pmode);
12708 rtx opcode = gen_reg_rtx (SImode);
12709 rtx tocompare = gen_reg_rtx (SImode);
12710 rtx no_toc_save_needed = gen_label_rtx ();
12712 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12713 emit_move_insn (stack_top, mem);
12715 mem = gen_rtx_MEM (Pmode,
12716 gen_rtx_PLUS (Pmode, stack_top,
12717 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12718 emit_move_insn (opcode_addr, mem);
12719 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12720 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12721 : 0xE8410028, SImode));
12723 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12724 SImode, NULL_RTX, NULL_RTX,
12725 no_toc_save_needed);
12727 mem = gen_rtx_MEM (Pmode,
12728 gen_rtx_PLUS (Pmode, stack_top,
12729 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12730 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12731 emit_label (no_toc_save_needed);
12734 /* This ties together stack memory (MEM with an alias set of
12735 rs6000_sr_alias_set) and the change to the stack pointer. */
12738 rs6000_emit_stack_tie (void)
12740 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12742 set_mem_alias_set (mem, rs6000_sr_alias_set);
12743 emit_insn (gen_stack_tie (mem));
12746 /* Emit the correct code for allocating stack space, as insns.
12747 If COPY_R12, make sure a copy of the old frame is left in r12.
12748 The generated code may use hard register 0 as a temporary. */
12751 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12754 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12755 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12756 rtx todec = gen_int_mode (-size, Pmode);
12758 if (INTVAL (todec) != -size)
12760 warning (0, "stack frame too large");
12761 emit_insn (gen_trap ());
12765 if (current_function_limit_stack)
12767 if (REG_P (stack_limit_rtx)
12768 && REGNO (stack_limit_rtx) > 1
12769 && REGNO (stack_limit_rtx) <= 31)
12771 emit_insn (TARGET_32BIT
12772 ? gen_addsi3 (tmp_reg,
12775 : gen_adddi3 (tmp_reg,
12779 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12782 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12784 && DEFAULT_ABI == ABI_V4)
12786 rtx toload = gen_rtx_CONST (VOIDmode,
12787 gen_rtx_PLUS (Pmode,
12791 emit_insn (gen_elf_high (tmp_reg, toload));
12792 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12793 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12797 warning (0, "stack limit expression is not supported");
12800 if (copy_r12 || ! TARGET_UPDATE)
12801 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12807 /* Need a note here so that try_split doesn't get confused. */
12808 if (get_last_insn () == NULL_RTX)
12809 emit_note (NOTE_INSN_DELETED);
12810 insn = emit_move_insn (tmp_reg, todec);
12811 try_split (PATTERN (insn), insn, 0);
12815 insn = emit_insn (TARGET_32BIT
12816 ? gen_movsi_update (stack_reg, stack_reg,
12818 : gen_movdi_di_update (stack_reg, stack_reg,
12819 todec, stack_reg));
12823 insn = emit_insn (TARGET_32BIT
12824 ? gen_addsi3 (stack_reg, stack_reg, todec)
12825 : gen_adddi3 (stack_reg, stack_reg, todec));
12826 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12827 gen_rtx_REG (Pmode, 12));
12830 RTX_FRAME_RELATED_P (insn) = 1;
12832 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12833 gen_rtx_SET (VOIDmode, stack_reg,
12834 gen_rtx_PLUS (Pmode, stack_reg,
12839 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12840 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12841 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12842 deduce these equivalences by itself so it wasn't necessary to hold
12843 its hand so much. */
12846 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12847 rtx reg2, rtx rreg)
12851 /* copy_rtx will not make unique copies of registers, so we need to
12852 ensure we don't have unwanted sharing here. */
12854 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12857 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12859 real = copy_rtx (PATTERN (insn));
12861 if (reg2 != NULL_RTX)
12862 real = replace_rtx (real, reg2, rreg);
12864 real = replace_rtx (real, reg,
12865 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12866 STACK_POINTER_REGNUM),
12869 /* We expect that 'real' is either a SET or a PARALLEL containing
12870 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12871 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12873 if (GET_CODE (real) == SET)
12877 temp = simplify_rtx (SET_SRC (set));
12879 SET_SRC (set) = temp;
12880 temp = simplify_rtx (SET_DEST (set));
12882 SET_DEST (set) = temp;
12883 if (GET_CODE (SET_DEST (set)) == MEM)
12885 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12887 XEXP (SET_DEST (set), 0) = temp;
12894 gcc_assert (GET_CODE (real) == PARALLEL);
12895 for (i = 0; i < XVECLEN (real, 0); i++)
12896 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
12898 rtx set = XVECEXP (real, 0, i);
12900 temp = simplify_rtx (SET_SRC (set));
12902 SET_SRC (set) = temp;
12903 temp = simplify_rtx (SET_DEST (set));
12905 SET_DEST (set) = temp;
12906 if (GET_CODE (SET_DEST (set)) == MEM)
12908 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12910 XEXP (SET_DEST (set), 0) = temp;
12912 RTX_FRAME_RELATED_P (set) = 1;
12917 real = spe_synthesize_frame_save (real);
12919 RTX_FRAME_RELATED_P (insn) = 1;
12920 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12925 /* Given an SPE frame note, return a PARALLEL of SETs with the
12926 original note, plus a synthetic register save. */
12929 spe_synthesize_frame_save (rtx real)
12931 rtx synth, offset, reg, real2;
12933 if (GET_CODE (real) != SET
12934 || GET_MODE (SET_SRC (real)) != V2SImode)
12937 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12938 frame related note. The parallel contains a set of the register
12939 being saved, and another set to a synthetic register (n+1200).
12940 This is so we can differentiate between 64-bit and 32-bit saves.
12941 Words cannot describe this nastiness. */
12943 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
12944 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
12945 && GET_CODE (SET_SRC (real)) == REG);
12948 (set (mem (plus (reg x) (const y)))
12951 (set (mem (plus (reg x) (const y+4)))
12955 real2 = copy_rtx (real);
12956 PUT_MODE (SET_DEST (real2), SImode);
12957 reg = SET_SRC (real2);
12958 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12959 synth = copy_rtx (real2);
12961 if (BYTES_BIG_ENDIAN)
12963 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12964 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12967 reg = SET_SRC (synth);
12969 synth = replace_rtx (synth, reg,
12970 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12972 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12973 synth = replace_rtx (synth, offset,
12974 GEN_INT (INTVAL (offset)
12975 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12977 RTX_FRAME_RELATED_P (synth) = 1;
12978 RTX_FRAME_RELATED_P (real2) = 1;
12979 if (BYTES_BIG_ENDIAN)
12980 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12982 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12987 /* Returns an insn that has a vrsave set operation with the
12988 appropriate CLOBBERs. */
12991 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12994 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12995 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12998 = gen_rtx_SET (VOIDmode,
13000 gen_rtx_UNSPEC_VOLATILE (SImode,
13001 gen_rtvec (2, reg, vrsave),
13006 /* We need to clobber the registers in the mask so the scheduler
13007 does not move sets to VRSAVE before sets of AltiVec registers.
13009 However, if the function receives nonlocal gotos, reload will set
13010 all call saved registers live. We will end up with:
13012 (set (reg 999) (mem))
13013 (parallel [ (set (reg vrsave) (unspec blah))
13014 (clobber (reg 999))])
13016 The clobber will cause the store into reg 999 to be dead, and
13017 flow will attempt to delete an epilogue insn. In this case, we
13018 need an unspec use/set of the register. */
13020 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13021 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13023 if (!epiloguep || call_used_regs [i])
13024 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13025 gen_rtx_REG (V4SImode, i));
13028 rtx reg = gen_rtx_REG (V4SImode, i);
13031 = gen_rtx_SET (VOIDmode,
13033 gen_rtx_UNSPEC (V4SImode,
13034 gen_rtvec (1, reg), 27));
13038 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13040 for (i = 0; i < nclobs; ++i)
13041 XVECEXP (insn, 0, i) = clobs[i];
13046 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13047 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13050 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13051 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13053 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13054 rtx replacea, replaceb;
13056 int_rtx = GEN_INT (offset);
13058 /* Some cases that need register indexed addressing. */
13059 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13060 || (TARGET_E500_DOUBLE && mode == DFmode)
13062 && SPE_VECTOR_MODE (mode)
13063 && !SPE_CONST_OFFSET_OK (offset)))
13065 /* Whomever calls us must make sure r11 is available in the
13066 flow path of instructions in the prologue. */
13067 offset_rtx = gen_rtx_REG (Pmode, 11);
13068 emit_move_insn (offset_rtx, int_rtx);
13070 replacea = offset_rtx;
13071 replaceb = int_rtx;
13075 offset_rtx = int_rtx;
13076 replacea = NULL_RTX;
13077 replaceb = NULL_RTX;
13080 reg = gen_rtx_REG (mode, regno);
13081 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13082 mem = gen_rtx_MEM (mode, addr);
13083 set_mem_alias_set (mem, rs6000_sr_alias_set);
13085 insn = emit_move_insn (mem, reg);
13087 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13090 /* Emit an offset memory reference suitable for a frame store, while
13091 converting to a valid addressing mode. */
13094 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13096 rtx int_rtx, offset_rtx;
13098 int_rtx = GEN_INT (offset);
13100 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13101 || (TARGET_E500_DOUBLE && mode == DFmode))
13103 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13104 emit_move_insn (offset_rtx, int_rtx);
13107 offset_rtx = int_rtx;
13109 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13112 #ifndef TARGET_FIX_AND_CONTINUE
13113 #define TARGET_FIX_AND_CONTINUE 0
13116 /* Emit function prologue as insns. */
13119 rs6000_emit_prologue (void)
13121 rs6000_stack_t *info = rs6000_stack_info ();
13122 enum machine_mode reg_mode = Pmode;
13123 int reg_size = TARGET_32BIT ? 4 : 8;
13124 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13125 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13126 rtx frame_reg_rtx = sp_reg_rtx;
13127 rtx cr_save_rtx = NULL_RTX;
13129 int saving_FPRs_inline;
13130 int using_store_multiple;
13131 HOST_WIDE_INT sp_offset = 0;
13133 if (TARGET_FIX_AND_CONTINUE)
13135 /* gdb on darwin arranges to forward a function from the old
13136 address by modifying the first 5 instructions of the function
13137 to branch to the overriding function. This is necessary to
13138 permit function pointers that point to the old function to
13139 actually forward to the new function. */
13140 emit_insn (gen_nop ());
13141 emit_insn (gen_nop ());
13142 emit_insn (gen_nop ());
13143 emit_insn (gen_nop ());
13144 emit_insn (gen_nop ());
13147 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13149 reg_mode = V2SImode;
13153 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13154 && (!TARGET_SPE_ABI
13155 || info->spe_64bit_regs_used == 0)
13156 && info->first_gp_reg_save < 31);
13157 saving_FPRs_inline = (info->first_fp_reg_save == 64
13158 || FP_SAVE_INLINE (info->first_fp_reg_save)
13159 || current_function_calls_eh_return
13160 || cfun->machine->ra_need_lr);
13162 /* For V.4, update stack before we do any saving and set back pointer. */
13164 && (DEFAULT_ABI == ABI_V4
13165 || current_function_calls_eh_return))
13167 if (info->total_size < 32767)
13168 sp_offset = info->total_size;
13170 frame_reg_rtx = frame_ptr_rtx;
13171 rs6000_emit_allocate_stack (info->total_size,
13172 (frame_reg_rtx != sp_reg_rtx
13173 && (info->cr_save_p
13175 || info->first_fp_reg_save < 64
13176 || info->first_gp_reg_save < 32
13178 if (frame_reg_rtx != sp_reg_rtx)
13179 rs6000_emit_stack_tie ();
13182 /* Handle world saves specially here. */
13183 if (WORLD_SAVE_P (info))
13189 /* save_world expects lr in r0. */
13190 if (info->lr_save_p)
13192 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13193 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13194 RTX_FRAME_RELATED_P (insn) = 1;
13197 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13198 assumptions about the offsets of various bits of the stack
13200 gcc_assert (info->gp_save_offset == -220
13201 && info->fp_save_offset == -144
13202 && info->lr_save_offset == 8
13203 && info->cr_save_offset == 4
13206 && (!current_function_calls_eh_return
13207 || info->ehrd_offset == -432)
13208 && info->vrsave_save_offset == -224
13209 && info->altivec_save_offset == (-224 -16 -192));
13211 treg = gen_rtx_REG (SImode, 11);
13212 emit_move_insn (treg, GEN_INT (-info->total_size));
13214 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13215 in R11. It also clobbers R12, so beware! */
13217 /* Preserve CR2 for save_world prologues */
13219 sz += 32 - info->first_gp_reg_save;
13220 sz += 64 - info->first_fp_reg_save;
13221 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
13222 p = rtvec_alloc (sz);
13224 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
13225 gen_rtx_REG (Pmode,
13226 LINK_REGISTER_REGNUM));
13227 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13228 gen_rtx_SYMBOL_REF (Pmode,
13230 /* We do floats first so that the instruction pattern matches
13232 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13234 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13235 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13236 GEN_INT (info->fp_save_offset
13237 + sp_offset + 8 * i));
13238 rtx mem = gen_rtx_MEM (DFmode, addr);
13239 set_mem_alias_set (mem, rs6000_sr_alias_set);
13241 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13243 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13245 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13246 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13247 GEN_INT (info->altivec_save_offset
13248 + sp_offset + 16 * i));
13249 rtx mem = gen_rtx_MEM (V4SImode, addr);
13250 set_mem_alias_set (mem, rs6000_sr_alias_set);
13252 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13254 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13256 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13257 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13258 GEN_INT (info->gp_save_offset
13259 + sp_offset + reg_size * i));
13260 rtx mem = gen_rtx_MEM (reg_mode, addr);
13261 set_mem_alias_set (mem, rs6000_sr_alias_set);
13263 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13267 /* CR register traditionally saved as CR2. */
13268 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13269 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13270 GEN_INT (info->cr_save_offset
13272 rtx mem = gen_rtx_MEM (reg_mode, addr);
13273 set_mem_alias_set (mem, rs6000_sr_alias_set);
13275 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13277 /* Prevent any attempt to delete the setting of r0 and treg! */
13278 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
13279 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
13280 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
13282 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13283 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13284 NULL_RTX, NULL_RTX);
13286 if (current_function_calls_eh_return)
13291 unsigned int regno = EH_RETURN_DATA_REGNO (i);
13292 if (regno == INVALID_REGNUM)
13294 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13295 info->ehrd_offset + sp_offset
13296 + reg_size * (int) i,
13302 /* Save AltiVec registers if needed. */
13303 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13307 /* There should be a non inline version of this, for when we
13308 are saving lots of vector registers. */
13309 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13310 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13312 rtx areg, savereg, mem;
13315 offset = info->altivec_save_offset + sp_offset
13316 + 16 * (i - info->first_altivec_reg_save);
13318 savereg = gen_rtx_REG (V4SImode, i);
13320 areg = gen_rtx_REG (Pmode, 0);
13321 emit_move_insn (areg, GEN_INT (offset));
13323 /* AltiVec addressing mode is [reg+reg]. */
13324 mem = gen_rtx_MEM (V4SImode,
13325 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
13327 set_mem_alias_set (mem, rs6000_sr_alias_set);
13329 insn = emit_move_insn (mem, savereg);
13331 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13332 areg, GEN_INT (offset));
13336 /* VRSAVE is a bit vector representing which AltiVec registers
13337 are used. The OS uses this to determine which vector
13338 registers to save on a context switch. We need to save
13339 VRSAVE on the stack frame, add whatever AltiVec registers we
13340 used in this function, and do the corresponding magic in the
13343 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13344 && !WORLD_SAVE_P (info) && info->vrsave_mask != 0)
13346 rtx reg, mem, vrsave;
13349 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
13350 as frame_reg_rtx and r11 as the static chain pointer for
13351 nested functions. */
13352 reg = gen_rtx_REG (SImode, 0);
13353 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13355 emit_insn (gen_get_vrsave_internal (reg));
13357 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
13360 offset = info->vrsave_save_offset + sp_offset;
13362 = gen_rtx_MEM (SImode,
13363 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
13364 set_mem_alias_set (mem, rs6000_sr_alias_set);
13365 insn = emit_move_insn (mem, reg);
13367 /* Include the registers in the mask. */
13368 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
13370 insn = emit_insn (generate_set_vrsave (reg, info, 0));
13373 /* If we use the link register, get it into r0. */
13374 if (!WORLD_SAVE_P (info) && info->lr_save_p)
13376 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13377 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13378 RTX_FRAME_RELATED_P (insn) = 1;
13381 /* If we need to save CR, put it into r12. */
13382 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
13386 cr_save_rtx = gen_rtx_REG (SImode, 12);
13387 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13388 RTX_FRAME_RELATED_P (insn) = 1;
13389 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13390 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13391 But that's OK. All we have to do is specify that _one_ condition
13392 code register is saved in this stack slot. The thrower's epilogue
13393 will then restore all the call-saved registers.
13394 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13395 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
13396 gen_rtx_REG (SImode, CR2_REGNO));
13397 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13402 /* Do any required saving of fpr's. If only one or two to save, do
13403 it ourselves. Otherwise, call function. */
13404 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
13407 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13408 if ((regs_ever_live[info->first_fp_reg_save+i]
13409 && ! call_used_regs[info->first_fp_reg_save+i]))
13410 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
13411 info->first_fp_reg_save + i,
13412 info->fp_save_offset + sp_offset + 8 * i,
13415 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
13419 const char *alloc_rname;
13421 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
13423 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
13424 gen_rtx_REG (Pmode,
13425 LINK_REGISTER_REGNUM));
13426 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
13427 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
13428 alloc_rname = ggc_strdup (rname);
13429 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13430 gen_rtx_SYMBOL_REF (Pmode,
13432 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13434 rtx addr, reg, mem;
13435 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13436 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13437 GEN_INT (info->fp_save_offset
13438 + sp_offset + 8*i));
13439 mem = gen_rtx_MEM (DFmode, addr);
13440 set_mem_alias_set (mem, rs6000_sr_alias_set);
13442 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
13444 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13445 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13446 NULL_RTX, NULL_RTX);
13449 /* Save GPRs. This is done as a PARALLEL if we are using
13450 the store-multiple instructions. */
13451 if (!WORLD_SAVE_P (info) && using_store_multiple)
13455 p = rtvec_alloc (32 - info->first_gp_reg_save);
13456 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13458 rtx addr, reg, mem;
13459 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13460 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13461 GEN_INT (info->gp_save_offset
13464 mem = gen_rtx_MEM (reg_mode, addr);
13465 set_mem_alias_set (mem, rs6000_sr_alias_set);
13467 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
13469 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13470 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13471 NULL_RTX, NULL_RTX);
13473 else if (!WORLD_SAVE_P (info))
13476 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13477 if ((regs_ever_live[info->first_gp_reg_save+i]
13478 && (! call_used_regs[info->first_gp_reg_save+i]
13479 || (i+info->first_gp_reg_save
13480 == RS6000_PIC_OFFSET_TABLE_REGNUM
13481 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13482 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13483 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13484 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13486 rtx addr, reg, mem;
13487 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13489 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13491 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13494 if (!SPE_CONST_OFFSET_OK (offset))
13496 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13497 emit_move_insn (b, GEN_INT (offset));
13500 b = GEN_INT (offset);
13502 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13503 mem = gen_rtx_MEM (V2SImode, addr);
13504 set_mem_alias_set (mem, rs6000_sr_alias_set);
13505 insn = emit_move_insn (mem, reg);
13507 if (GET_CODE (b) == CONST_INT)
13508 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13509 NULL_RTX, NULL_RTX);
13511 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13512 b, GEN_INT (offset));
13516 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13517 GEN_INT (info->gp_save_offset
13520 mem = gen_rtx_MEM (reg_mode, addr);
13521 set_mem_alias_set (mem, rs6000_sr_alias_set);
13523 insn = emit_move_insn (mem, reg);
13524 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13525 NULL_RTX, NULL_RTX);
13530 /* ??? There's no need to emit actual instructions here, but it's the
13531 easiest way to get the frame unwind information emitted. */
13532 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
13534 unsigned int i, regno;
13536 /* In AIX ABI we need to pretend we save r2 here. */
13539 rtx addr, reg, mem;
13541 reg = gen_rtx_REG (reg_mode, 2);
13542 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13543 GEN_INT (sp_offset + 5 * reg_size));
13544 mem = gen_rtx_MEM (reg_mode, addr);
13545 set_mem_alias_set (mem, rs6000_sr_alias_set);
13547 insn = emit_move_insn (mem, reg);
13548 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13549 NULL_RTX, NULL_RTX);
13550 PATTERN (insn) = gen_blockage ();
13555 regno = EH_RETURN_DATA_REGNO (i);
13556 if (regno == INVALID_REGNUM)
13559 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13560 info->ehrd_offset + sp_offset
13561 + reg_size * (int) i,
13566 /* Save lr if we used it. */
13567 if (!WORLD_SAVE_P (info) && info->lr_save_p)
13569 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13570 GEN_INT (info->lr_save_offset + sp_offset));
13571 rtx reg = gen_rtx_REG (Pmode, 0);
13572 rtx mem = gen_rtx_MEM (Pmode, addr);
13573 /* This should not be of rs6000_sr_alias_set, because of
13574 __builtin_return_address. */
13576 insn = emit_move_insn (mem, reg);
13577 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13578 NULL_RTX, NULL_RTX);
13581 /* Save CR if we use any that must be preserved. */
13582 if (!WORLD_SAVE_P (info) && info->cr_save_p)
13584 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13585 GEN_INT (info->cr_save_offset + sp_offset));
13586 rtx mem = gen_rtx_MEM (SImode, addr);
13587 /* See the large comment above about why CR2_REGNO is used. */
13588 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13590 set_mem_alias_set (mem, rs6000_sr_alias_set);
13592 /* If r12 was used to hold the original sp, copy cr into r0 now
13594 if (REGNO (frame_reg_rtx) == 12)
13598 cr_save_rtx = gen_rtx_REG (SImode, 0);
13599 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13600 RTX_FRAME_RELATED_P (insn) = 1;
13601 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13602 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13607 insn = emit_move_insn (mem, cr_save_rtx);
13609 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13610 NULL_RTX, NULL_RTX);
13613 /* Update stack and set back pointer unless this is V.4,
13614 for which it was done previously. */
13615 if (!WORLD_SAVE_P (info) && info->push_p
13616 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13617 rs6000_emit_allocate_stack (info->total_size, FALSE);
13619 /* Set frame pointer, if needed. */
13620 if (frame_pointer_needed)
13622 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13624 RTX_FRAME_RELATED_P (insn) = 1;
13627 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13628 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13629 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
13630 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13632 /* If emit_load_toc_table will use the link register, we need to save
13633 it. We use R12 for this purpose because emit_load_toc_table
13634 can use register 0. This allows us to use a plain 'blr' to return
13635 from the procedure more often. */
13636 int save_LR_around_toc_setup = (TARGET_ELF
13637 && DEFAULT_ABI != ABI_AIX
13639 && ! info->lr_save_p
13640 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
13641 if (save_LR_around_toc_setup)
13643 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13645 insn = emit_move_insn (frame_ptr_rtx, lr);
13646 rs6000_maybe_dead (insn);
13647 RTX_FRAME_RELATED_P (insn) = 1;
13649 rs6000_emit_load_toc_table (TRUE);
13651 insn = emit_move_insn (lr, frame_ptr_rtx);
13652 rs6000_maybe_dead (insn);
13653 RTX_FRAME_RELATED_P (insn) = 1;
13656 rs6000_emit_load_toc_table (TRUE);
13660 if (DEFAULT_ABI == ABI_DARWIN
13661 && flag_pic && current_function_uses_pic_offset_table)
13663 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13664 rtx src = machopic_function_base_sym ();
13666 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
13668 insn = emit_move_insn (gen_rtx_REG (Pmode,
13669 RS6000_PIC_OFFSET_TABLE_REGNUM),
13671 rs6000_maybe_dead (insn);
13676 /* Write function prologue. */
13679 rs6000_output_function_prologue (FILE *file,
13680 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13682 rs6000_stack_t *info = rs6000_stack_info ();
13684 if (TARGET_DEBUG_STACK)
13685 debug_stack_info (info);
13687 /* Write .extern for any function we will call to save and restore
13689 if (info->first_fp_reg_save < 64
13690 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13691 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13692 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13693 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13694 RESTORE_FP_SUFFIX);
13696 /* Write .extern for AIX common mode routines, if needed. */
13697 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13699 fputs ("\t.extern __mulh\n", file);
13700 fputs ("\t.extern __mull\n", file);
13701 fputs ("\t.extern __divss\n", file);
13702 fputs ("\t.extern __divus\n", file);
13703 fputs ("\t.extern __quoss\n", file);
13704 fputs ("\t.extern __quous\n", file);
13705 common_mode_defined = 1;
13708 if (! HAVE_prologue)
13712 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13713 the "toplevel" insn chain. */
13714 emit_note (NOTE_INSN_DELETED);
13715 rs6000_emit_prologue ();
13716 emit_note (NOTE_INSN_DELETED);
13718 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13722 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13724 INSN_ADDRESSES_NEW (insn, addr);
13729 if (TARGET_DEBUG_STACK)
13730 debug_rtx_list (get_insns (), 100);
13731 final (get_insns (), file, FALSE);
13735 rs6000_pic_labelno++;
13738 /* Emit function epilogue as insns.
13740 At present, dwarf2out_frame_debug_expr doesn't understand
13741 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13742 anywhere in the epilogue. Most of the insns below would in any case
13743 need special notes to explain where r11 is in relation to the stack. */
13746 rs6000_emit_epilogue (int sibcall)
13748 rs6000_stack_t *info;
13749 int restoring_FPRs_inline;
13750 int using_load_multiple;
13751 int using_mfcr_multiple;
13752 int use_backchain_to_restore_sp;
13754 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13755 rtx frame_reg_rtx = sp_reg_rtx;
13756 enum machine_mode reg_mode = Pmode;
13757 int reg_size = TARGET_32BIT ? 4 : 8;
13760 info = rs6000_stack_info ();
13762 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13764 reg_mode = V2SImode;
13768 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13769 && (!TARGET_SPE_ABI
13770 || info->spe_64bit_regs_used == 0)
13771 && info->first_gp_reg_save < 31);
13772 restoring_FPRs_inline = (sibcall
13773 || current_function_calls_eh_return
13774 || info->first_fp_reg_save == 64
13775 || FP_SAVE_INLINE (info->first_fp_reg_save));
13776 use_backchain_to_restore_sp = (frame_pointer_needed
13777 || current_function_calls_alloca
13778 || info->total_size > 32767);
13779 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13780 || rs6000_cpu == PROCESSOR_PPC603
13781 || rs6000_cpu == PROCESSOR_PPC750
13784 if (WORLD_SAVE_P (info))
13788 const char *alloc_rname;
13791 /* eh_rest_world_r10 will return to the location saved in the LR
13792 stack slot (which is not likely to be our caller.)
13793 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
13794 rest_world is similar, except any R10 parameter is ignored.
13795 The exception-handling stuff that was here in 2.95 is no
13796 longer necessary. */
13800 + 32 - info->first_gp_reg_save
13801 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
13802 + 63 + 1 - info->first_fp_reg_save);
13804 strcpy (rname, ((current_function_calls_eh_return) ?
13805 "*eh_rest_world_r10" : "*rest_world"));
13806 alloc_rname = ggc_strdup (rname);
13809 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
13810 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13811 gen_rtx_REG (Pmode,
13812 LINK_REGISTER_REGNUM));
13814 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
13815 /* The instruction pattern requires a clobber here;
13816 it is shared with the restVEC helper. */
13818 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
13821 /* CR register traditionally saved as CR2. */
13822 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13823 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13824 GEN_INT (info->cr_save_offset));
13825 rtx mem = gen_rtx_MEM (reg_mode, addr);
13826 set_mem_alias_set (mem, rs6000_sr_alias_set);
13828 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13831 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13833 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13834 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13835 GEN_INT (info->gp_save_offset
13837 rtx mem = gen_rtx_MEM (reg_mode, addr);
13838 set_mem_alias_set (mem, rs6000_sr_alias_set);
13840 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13842 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13844 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13845 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13846 GEN_INT (info->altivec_save_offset
13848 rtx mem = gen_rtx_MEM (V4SImode, addr);
13849 set_mem_alias_set (mem, rs6000_sr_alias_set);
13851 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13853 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
13855 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13856 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13857 GEN_INT (info->fp_save_offset
13859 rtx mem = gen_rtx_MEM (DFmode, addr);
13860 set_mem_alias_set (mem, rs6000_sr_alias_set);
13862 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13865 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
13867 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
13869 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
13871 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
13873 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
13874 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13879 /* If we have a frame pointer, a call to alloca, or a large stack
13880 frame, restore the old stack pointer using the backchain. Otherwise,
13881 we know what size to update it with. */
13882 if (use_backchain_to_restore_sp)
13884 /* Under V.4, don't reset the stack pointer until after we're done
13885 loading the saved registers. */
13886 if (DEFAULT_ABI == ABI_V4)
13887 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
13889 emit_move_insn (frame_reg_rtx,
13890 gen_rtx_MEM (Pmode, sp_reg_rtx));
13893 else if (info->push_p)
13895 if (DEFAULT_ABI == ABI_V4
13896 || current_function_calls_eh_return)
13897 sp_offset = info->total_size;
13900 emit_insn (TARGET_32BIT
13901 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13902 GEN_INT (info->total_size))
13903 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13904 GEN_INT (info->total_size)));
13908 /* Restore AltiVec registers if needed. */
13909 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13913 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13914 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13916 rtx addr, areg, mem;
13918 areg = gen_rtx_REG (Pmode, 0);
13920 (areg, GEN_INT (info->altivec_save_offset
13922 + 16 * (i - info->first_altivec_reg_save)));
13924 /* AltiVec addressing mode is [reg+reg]. */
13925 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
13926 mem = gen_rtx_MEM (V4SImode, addr);
13927 set_mem_alias_set (mem, rs6000_sr_alias_set);
13929 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
13933 /* Restore VRSAVE if needed. */
13934 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13935 && info->vrsave_mask != 0)
13937 rtx addr, mem, reg;
13939 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13940 GEN_INT (info->vrsave_save_offset + sp_offset));
13941 mem = gen_rtx_MEM (SImode, addr);
13942 set_mem_alias_set (mem, rs6000_sr_alias_set);
13943 reg = gen_rtx_REG (SImode, 12);
13944 emit_move_insn (reg, mem);
13946 emit_insn (generate_set_vrsave (reg, info, 1));
13949 /* Get the old lr if we saved it. */
13950 if (info->lr_save_p)
13952 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
13953 info->lr_save_offset + sp_offset);
13955 set_mem_alias_set (mem, rs6000_sr_alias_set);
13957 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
13960 /* Get the old cr if we saved it. */
13961 if (info->cr_save_p)
13963 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13964 GEN_INT (info->cr_save_offset + sp_offset));
13965 rtx mem = gen_rtx_MEM (SImode, addr);
13967 set_mem_alias_set (mem, rs6000_sr_alias_set);
13969 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
13972 /* Set LR here to try to overlap restores below. */
13973 if (info->lr_save_p)
13974 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
13975 gen_rtx_REG (Pmode, 0));
13977 /* Load exception handler data registers, if needed. */
13978 if (current_function_calls_eh_return)
13980 unsigned int i, regno;
13984 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13985 GEN_INT (sp_offset + 5 * reg_size));
13986 rtx mem = gen_rtx_MEM (reg_mode, addr);
13988 set_mem_alias_set (mem, rs6000_sr_alias_set);
13990 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
13997 regno = EH_RETURN_DATA_REGNO (i);
13998 if (regno == INVALID_REGNUM)
14001 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14002 info->ehrd_offset + sp_offset
14003 + reg_size * (int) i);
14004 set_mem_alias_set (mem, rs6000_sr_alias_set);
14006 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14010 /* Restore GPRs. This is done as a PARALLEL if we are using
14011 the load-multiple instructions. */
14012 if (using_load_multiple)
14015 p = rtvec_alloc (32 - info->first_gp_reg_save);
14016 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14018 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14019 GEN_INT (info->gp_save_offset
14022 rtx mem = gen_rtx_MEM (reg_mode, addr);
14024 set_mem_alias_set (mem, rs6000_sr_alias_set);
14027 gen_rtx_SET (VOIDmode,
14028 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14031 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14034 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14035 if ((regs_ever_live[info->first_gp_reg_save+i]
14036 && (! call_used_regs[info->first_gp_reg_save+i]
14037 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14038 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14039 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14040 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14041 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14043 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14044 GEN_INT (info->gp_save_offset
14047 rtx mem = gen_rtx_MEM (reg_mode, addr);
14049 /* Restore 64-bit quantities for SPE. */
14050 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14052 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14055 if (!SPE_CONST_OFFSET_OK (offset))
14057 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14058 emit_move_insn (b, GEN_INT (offset));
14061 b = GEN_INT (offset);
14063 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14064 mem = gen_rtx_MEM (V2SImode, addr);
14067 set_mem_alias_set (mem, rs6000_sr_alias_set);
14069 emit_move_insn (gen_rtx_REG (reg_mode,
14070 info->first_gp_reg_save + i), mem);
14073 /* Restore fpr's if we need to do it without calling a function. */
14074 if (restoring_FPRs_inline)
14075 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14076 if ((regs_ever_live[info->first_fp_reg_save+i]
14077 && ! call_used_regs[info->first_fp_reg_save+i]))
14080 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14081 GEN_INT (info->fp_save_offset
14084 mem = gen_rtx_MEM (DFmode, addr);
14085 set_mem_alias_set (mem, rs6000_sr_alias_set);
14087 emit_move_insn (gen_rtx_REG (DFmode,
14088 info->first_fp_reg_save + i),
14092 /* If we saved cr, restore it here. Just those that were used. */
14093 if (info->cr_save_p)
14095 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14098 if (using_mfcr_multiple)
14100 for (i = 0; i < 8; i++)
14101 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14103 gcc_assert (count);
14106 if (using_mfcr_multiple && count > 1)
14111 p = rtvec_alloc (count);
14114 for (i = 0; i < 8; i++)
14115 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14117 rtvec r = rtvec_alloc (2);
14118 RTVEC_ELT (r, 0) = r12_rtx;
14119 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14120 RTVEC_ELT (p, ndx) =
14121 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14122 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14125 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14126 gcc_assert (ndx == count);
14129 for (i = 0; i < 8; i++)
14130 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14132 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14138 /* If this is V.4, unwind the stack pointer after all of the loads
14139 have been done. We need to emit a block here so that sched
14140 doesn't decide to move the sp change before the register restores
14141 (which may not have any obvious dependency on the stack). This
14142 doesn't hurt performance, because there is no scheduling that can
14143 be done after this point. */
14144 if (DEFAULT_ABI == ABI_V4
14145 || current_function_calls_eh_return)
14147 if (frame_reg_rtx != sp_reg_rtx)
14148 rs6000_emit_stack_tie ();
14150 if (use_backchain_to_restore_sp)
14152 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14154 else if (sp_offset != 0)
14156 emit_insn (TARGET_32BIT
14157 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14158 GEN_INT (sp_offset))
14159 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14160 GEN_INT (sp_offset)));
14164 if (current_function_calls_eh_return)
14166 rtx sa = EH_RETURN_STACKADJ_RTX;
14167 emit_insn (TARGET_32BIT
14168 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14169 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14175 if (! restoring_FPRs_inline)
14176 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14178 p = rtvec_alloc (2);
14180 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14181 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14182 gen_rtx_REG (Pmode,
14183 LINK_REGISTER_REGNUM));
14185 /* If we have to restore more than two FP registers, branch to the
14186 restore function. It will return to our caller. */
14187 if (! restoring_FPRs_inline)
14191 const char *alloc_rname;
14193 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
14194 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
14195 alloc_rname = ggc_strdup (rname);
14196 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14197 gen_rtx_SYMBOL_REF (Pmode,
14200 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14203 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
14204 GEN_INT (info->fp_save_offset + 8*i));
14205 mem = gen_rtx_MEM (DFmode, addr);
14206 set_mem_alias_set (mem, rs6000_sr_alias_set);
14208 RTVEC_ELT (p, i+3) =
14209 gen_rtx_SET (VOIDmode,
14210 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
14215 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14219 /* Write function epilogue. */
14222 rs6000_output_function_epilogue (FILE *file,
14223 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14225 rs6000_stack_t *info = rs6000_stack_info ();
14227 if (! HAVE_epilogue)
14229 rtx insn = get_last_insn ();
14230 /* If the last insn was a BARRIER, we don't have to write anything except
14231 the trace table. */
14232 if (GET_CODE (insn) == NOTE)
14233 insn = prev_nonnote_insn (insn);
14234 if (insn == 0 || GET_CODE (insn) != BARRIER)
14236 /* This is slightly ugly, but at least we don't have two
14237 copies of the epilogue-emitting code. */
14240 /* A NOTE_INSN_DELETED is supposed to be at the start
14241 and end of the "toplevel" insn chain. */
14242 emit_note (NOTE_INSN_DELETED);
14243 rs6000_emit_epilogue (FALSE);
14244 emit_note (NOTE_INSN_DELETED);
14246 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14250 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14252 INSN_ADDRESSES_NEW (insn, addr);
14257 if (TARGET_DEBUG_STACK)
14258 debug_rtx_list (get_insns (), 100);
14259 final (get_insns (), file, FALSE);
14265 macho_branch_islands ();
14266 /* Mach-O doesn't support labels at the end of objects, so if
14267 it looks like we might want one, insert a NOP. */
14269 rtx insn = get_last_insn ();
14272 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
14273 insn = PREV_INSN (insn);
14277 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
14278 fputs ("\tnop\n", file);
14282 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14285 We don't output a traceback table if -finhibit-size-directive was
14286 used. The documentation for -finhibit-size-directive reads
14287 ``don't output a @code{.size} assembler directive, or anything
14288 else that would cause trouble if the function is split in the
14289 middle, and the two halves are placed at locations far apart in
14290 memory.'' The traceback table has this property, since it
14291 includes the offset from the start of the function to the
14292 traceback table itself.
14294 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14295 different traceback table. */
14296 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
14297 && rs6000_traceback != traceback_none)
14299 const char *fname = NULL;
14300 const char *language_string = lang_hooks.name;
14301 int fixed_parms = 0, float_parms = 0, parm_info = 0;
14303 int optional_tbtab;
14305 if (rs6000_traceback == traceback_full)
14306 optional_tbtab = 1;
14307 else if (rs6000_traceback == traceback_part)
14308 optional_tbtab = 0;
14310 optional_tbtab = !optimize_size && !TARGET_ELF;
14312 if (optional_tbtab)
14314 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
14315 while (*fname == '.') /* V.4 encodes . in the name */
14318 /* Need label immediately before tbtab, so we can compute
14319 its offset from the function start. */
14320 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14321 ASM_OUTPUT_LABEL (file, fname);
14324 /* The .tbtab pseudo-op can only be used for the first eight
14325 expressions, since it can't handle the possibly variable
14326 length fields that follow. However, if you omit the optional
14327 fields, the assembler outputs zeros for all optional fields
14328 anyways, giving each variable length field is minimum length
14329 (as defined in sys/debug.h). Thus we can not use the .tbtab
14330 pseudo-op at all. */
14332 /* An all-zero word flags the start of the tbtab, for debuggers
14333 that have to find it by searching forward from the entry
14334 point or from the current pc. */
14335 fputs ("\t.long 0\n", file);
14337 /* Tbtab format type. Use format type 0. */
14338 fputs ("\t.byte 0,", file);
14340 /* Language type. Unfortunately, there does not seem to be any
14341 official way to discover the language being compiled, so we
14342 use language_string.
14343 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14344 Java is 13. Objective-C is 14. */
14345 if (! strcmp (language_string, "GNU C"))
14347 else if (! strcmp (language_string, "GNU F77")
14348 || ! strcmp (language_string, "GNU F95"))
14350 else if (! strcmp (language_string, "GNU Pascal"))
14352 else if (! strcmp (language_string, "GNU Ada"))
14354 else if (! strcmp (language_string, "GNU C++"))
14356 else if (! strcmp (language_string, "GNU Java"))
14358 else if (! strcmp (language_string, "GNU Objective-C"))
14361 gcc_unreachable ();
14362 fprintf (file, "%d,", i);
14364 /* 8 single bit fields: global linkage (not set for C extern linkage,
14365 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14366 from start of procedure stored in tbtab, internal function, function
14367 has controlled storage, function has no toc, function uses fp,
14368 function logs/aborts fp operations. */
14369 /* Assume that fp operations are used if any fp reg must be saved. */
14370 fprintf (file, "%d,",
14371 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
14373 /* 6 bitfields: function is interrupt handler, name present in
14374 proc table, function calls alloca, on condition directives
14375 (controls stack walks, 3 bits), saves condition reg, saves
14377 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14378 set up as a frame pointer, even when there is no alloca call. */
14379 fprintf (file, "%d,",
14380 ((optional_tbtab << 6)
14381 | ((optional_tbtab & frame_pointer_needed) << 5)
14382 | (info->cr_save_p << 1)
14383 | (info->lr_save_p)));
14385 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14387 fprintf (file, "%d,",
14388 (info->push_p << 7) | (64 - info->first_fp_reg_save));
14390 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14391 fprintf (file, "%d,", (32 - first_reg_to_save ()));
14393 if (optional_tbtab)
14395 /* Compute the parameter info from the function decl argument
14398 int next_parm_info_bit = 31;
14400 for (decl = DECL_ARGUMENTS (current_function_decl);
14401 decl; decl = TREE_CHAIN (decl))
14403 rtx parameter = DECL_INCOMING_RTL (decl);
14404 enum machine_mode mode = GET_MODE (parameter);
14406 if (GET_CODE (parameter) == REG)
14408 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
14426 gcc_unreachable ();
14429 /* If only one bit will fit, don't or in this entry. */
14430 if (next_parm_info_bit > 0)
14431 parm_info |= (bits << (next_parm_info_bit - 1));
14432 next_parm_info_bit -= 2;
14436 fixed_parms += ((GET_MODE_SIZE (mode)
14437 + (UNITS_PER_WORD - 1))
14439 next_parm_info_bit -= 1;
14445 /* Number of fixed point parameters. */
14446 /* This is actually the number of words of fixed point parameters; thus
14447 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14448 fprintf (file, "%d,", fixed_parms);
14450 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14452 /* This is actually the number of fp registers that hold parameters;
14453 and thus the maximum value is 13. */
14454 /* Set parameters on stack bit if parameters are not in their original
14455 registers, regardless of whether they are on the stack? Xlc
14456 seems to set the bit when not optimizing. */
14457 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
14459 if (! optional_tbtab)
14462 /* Optional fields follow. Some are variable length. */
14464 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14465 11 double float. */
14466 /* There is an entry for each parameter in a register, in the order that
14467 they occur in the parameter list. Any intervening arguments on the
14468 stack are ignored. If the list overflows a long (max possible length
14469 34 bits) then completely leave off all elements that don't fit. */
14470 /* Only emit this long if there was at least one parameter. */
14471 if (fixed_parms || float_parms)
14472 fprintf (file, "\t.long %d\n", parm_info);
14474 /* Offset from start of code to tb table. */
14475 fputs ("\t.long ", file);
14476 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14478 RS6000_OUTPUT_BASENAME (file, fname);
14480 assemble_name (file, fname);
14482 rs6000_output_function_entry (file, fname);
14485 /* Interrupt handler mask. */
14486 /* Omit this long, since we never set the interrupt handler bit
14489 /* Number of CTL (controlled storage) anchors. */
14490 /* Omit this long, since the has_ctl bit is never set above. */
14492 /* Displacement into stack of each CTL anchor. */
14493 /* Omit this list of longs, because there are no CTL anchors. */
14495 /* Length of function name. */
14498 fprintf (file, "\t.short %d\n", (int) strlen (fname));
14500 /* Function name. */
14501 assemble_string (fname, strlen (fname));
14503 /* Register for alloca automatic storage; this is always reg 31.
14504 Only emit this if the alloca bit was set above. */
14505 if (frame_pointer_needed)
14506 fputs ("\t.byte 31\n", file);
14508 fputs ("\t.align 2\n", file);
14512 /* A C compound statement that outputs the assembler code for a thunk
14513 function, used to implement C++ virtual function calls with
14514 multiple inheritance. The thunk acts as a wrapper around a virtual
14515 function, adjusting the implicit object parameter before handing
14516 control off to the real function.
14518 First, emit code to add the integer DELTA to the location that
14519 contains the incoming first argument. Assume that this argument
14520 contains a pointer, and is the one used to pass the `this' pointer
14521 in C++. This is the incoming argument *before* the function
14522 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14523 values of all other incoming arguments.
14525 After the addition, emit code to jump to FUNCTION, which is a
14526 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14527 not touch the return address. Hence returning from FUNCTION will
14528 return to whoever called the current `thunk'.
14530 The effect must be as if FUNCTION had been called directly with the
14531 adjusted first argument. This macro is responsible for emitting
14532 all of the code for a thunk function; output_function_prologue()
14533 and output_function_epilogue() are not invoked.
14535 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14536 been extracted from it.) It might possibly be useful on some
14537 targets, but probably not.
14539 If you do not define this macro, the target-independent code in the
14540 C++ frontend will generate a less efficient heavyweight thunk that
14541 calls FUNCTION instead of jumping to it. The generic approach does
14542 not support varargs. */
14545 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
14546 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
14549 rtx this, insn, funexp;
14551 reload_completed = 1;
14552 epilogue_completed = 1;
14553 no_new_pseudos = 1;
14554 reset_block_changes ();
14556 /* Mark the end of the (empty) prologue. */
14557 emit_note (NOTE_INSN_PROLOGUE_END);
14559 /* Find the "this" pointer. If the function returns a structure,
14560 the structure return pointer is in r3. */
14561 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
14562 this = gen_rtx_REG (Pmode, 4);
14564 this = gen_rtx_REG (Pmode, 3);
14566 /* Apply the constant offset, if required. */
14569 rtx delta_rtx = GEN_INT (delta);
14570 emit_insn (TARGET_32BIT
14571 ? gen_addsi3 (this, this, delta_rtx)
14572 : gen_adddi3 (this, this, delta_rtx));
14575 /* Apply the offset from the vtable, if required. */
14578 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
14579 rtx tmp = gen_rtx_REG (Pmode, 12);
14581 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
14582 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
14584 emit_insn (TARGET_32BIT
14585 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
14586 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
14587 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
14591 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
14593 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
14595 emit_insn (TARGET_32BIT
14596 ? gen_addsi3 (this, this, tmp)
14597 : gen_adddi3 (this, this, tmp));
14600 /* Generate a tail call to the target function. */
14601 if (!TREE_USED (function))
14603 assemble_external (function);
14604 TREE_USED (function) = 1;
14606 funexp = XEXP (DECL_RTL (function), 0);
14607 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
14610 if (MACHOPIC_INDIRECT)
14611 funexp = machopic_indirect_call_target (funexp);
14614 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14615 generate sibcall RTL explicitly. */
14616 insn = emit_call_insn (
14617 gen_rtx_PARALLEL (VOIDmode,
14619 gen_rtx_CALL (VOIDmode,
14620 funexp, const0_rtx),
14621 gen_rtx_USE (VOIDmode, const0_rtx),
14622 gen_rtx_USE (VOIDmode,
14623 gen_rtx_REG (SImode,
14624 LINK_REGISTER_REGNUM)),
14625 gen_rtx_RETURN (VOIDmode))));
14626 SIBLING_CALL_P (insn) = 1;
14629 /* Run just enough of rest_of_compilation to get the insns emitted.
14630 There's not really enough bulk here to make other passes such as
14631 instruction scheduling worth while. Note that use_thunk calls
14632 assemble_start_function and assemble_end_function. */
14633 insn = get_insns ();
14634 insn_locators_initialize ();
14635 shorten_branches (insn);
14636 final_start_function (insn, file, 1);
14637 final (insn, file, 1);
14638 final_end_function ();
14640 reload_completed = 0;
14641 epilogue_completed = 0;
14642 no_new_pseudos = 0;
14645 /* A quick summary of the various types of 'constant-pool tables'
14648 Target Flags Name One table per
14649 AIX (none) AIX TOC object file
14650 AIX -mfull-toc AIX TOC object file
14651 AIX -mminimal-toc AIX minimal TOC translation unit
14652 SVR4/EABI (none) SVR4 SDATA object file
14653 SVR4/EABI -fpic SVR4 pic object file
14654 SVR4/EABI -fPIC SVR4 PIC translation unit
14655 SVR4/EABI -mrelocatable EABI TOC function
14656 SVR4/EABI -maix AIX TOC object file
14657 SVR4/EABI -maix -mminimal-toc
14658 AIX minimal TOC translation unit
14660 Name Reg. Set by entries contains:
14661 made by addrs? fp? sum?
14663 AIX TOC 2 crt0 as Y option option
14664 AIX minimal TOC 30 prolog gcc Y Y option
14665 SVR4 SDATA 13 crt0 gcc N Y N
14666 SVR4 pic 30 prolog ld Y not yet N
14667 SVR4 PIC 30 prolog gcc Y option option
14668 EABI TOC 30 prolog gcc Y option option
14672 /* Hash functions for the hash table. */
14675 rs6000_hash_constant (rtx k)
14677 enum rtx_code code = GET_CODE (k);
14678 enum machine_mode mode = GET_MODE (k);
14679 unsigned result = (code << 3) ^ mode;
14680 const char *format;
14683 format = GET_RTX_FORMAT (code);
14684 flen = strlen (format);
14690 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
14693 if (mode != VOIDmode)
14694 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
14706 for (; fidx < flen; fidx++)
14707 switch (format[fidx])
14712 const char *str = XSTR (k, fidx);
14713 len = strlen (str);
14714 result = result * 613 + len;
14715 for (i = 0; i < len; i++)
14716 result = result * 613 + (unsigned) str[i];
14721 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
14725 result = result * 613 + (unsigned) XINT (k, fidx);
14728 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
14729 result = result * 613 + (unsigned) XWINT (k, fidx);
14733 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
14734 result = result * 613 + (unsigned) (XWINT (k, fidx)
14741 gcc_unreachable ();
14748 toc_hash_function (const void *hash_entry)
14750 const struct toc_hash_struct *thc =
14751 (const struct toc_hash_struct *) hash_entry;
14752 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
14755 /* Compare H1 and H2 for equivalence. */
14758 toc_hash_eq (const void *h1, const void *h2)
14760 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
14761 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
14763 if (((const struct toc_hash_struct *) h1)->key_mode
14764 != ((const struct toc_hash_struct *) h2)->key_mode)
14767 return rtx_equal_p (r1, r2);
14770 /* These are the names given by the C++ front-end to vtables, and
14771 vtable-like objects. Ideally, this logic should not be here;
14772 instead, there should be some programmatic way of inquiring as
14773 to whether or not an object is a vtable. */
14775 #define VTABLE_NAME_P(NAME) \
14776 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
14777 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14778 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14779 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14780 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14783 rs6000_output_symbol_ref (FILE *file, rtx x)
14785 /* Currently C++ toc references to vtables can be emitted before it
14786 is decided whether the vtable is public or private. If this is
14787 the case, then the linker will eventually complain that there is
14788 a reference to an unknown section. Thus, for vtables only,
14789 we emit the TOC reference to reference the symbol and not the
14791 const char *name = XSTR (x, 0);
14793 if (VTABLE_NAME_P (name))
14795 RS6000_OUTPUT_BASENAME (file, name);
14798 assemble_name (file, name);
14801 /* Output a TOC entry. We derive the entry name from what is being
14805 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14808 const char *name = buf;
14809 const char *real_name;
14813 gcc_assert (!TARGET_NO_TOC);
14815 /* When the linker won't eliminate them, don't output duplicate
14816 TOC entries (this happens on AIX if there is any kind of TOC,
14817 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14819 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14821 struct toc_hash_struct *h;
14824 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14825 time because GGC is not initialized at that point. */
14826 if (toc_hash_table == NULL)
14827 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14828 toc_hash_eq, NULL);
14830 h = ggc_alloc (sizeof (*h));
14832 h->key_mode = mode;
14833 h->labelno = labelno;
14835 found = htab_find_slot (toc_hash_table, h, 1);
14836 if (*found == NULL)
14838 else /* This is indeed a duplicate.
14839 Set this label equal to that label. */
14841 fputs ("\t.set ", file);
14842 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14843 fprintf (file, "%d,", labelno);
14844 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14845 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
14851 /* If we're going to put a double constant in the TOC, make sure it's
14852 aligned properly when strict alignment is on. */
14853 if (GET_CODE (x) == CONST_DOUBLE
14854 && STRICT_ALIGNMENT
14855 && GET_MODE_BITSIZE (mode) >= 64
14856 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
14857 ASM_OUTPUT_ALIGN (file, 3);
14860 (*targetm.asm_out.internal_label) (file, "LC", labelno);
14862 /* Handle FP constants specially. Note that if we have a minimal
14863 TOC, things we put here aren't actually in the TOC, so we can allow
14865 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
14867 REAL_VALUE_TYPE rv;
14870 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14871 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
14875 if (TARGET_MINIMAL_TOC)
14876 fputs (DOUBLE_INT_ASM_OP, file);
14878 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14879 k[0] & 0xffffffff, k[1] & 0xffffffff,
14880 k[2] & 0xffffffff, k[3] & 0xffffffff);
14881 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
14882 k[0] & 0xffffffff, k[1] & 0xffffffff,
14883 k[2] & 0xffffffff, k[3] & 0xffffffff);
14888 if (TARGET_MINIMAL_TOC)
14889 fputs ("\t.long ", file);
14891 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14892 k[0] & 0xffffffff, k[1] & 0xffffffff,
14893 k[2] & 0xffffffff, k[3] & 0xffffffff);
14894 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14895 k[0] & 0xffffffff, k[1] & 0xffffffff,
14896 k[2] & 0xffffffff, k[3] & 0xffffffff);
14900 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
14902 REAL_VALUE_TYPE rv;
14905 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14906 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
14910 if (TARGET_MINIMAL_TOC)
14911 fputs (DOUBLE_INT_ASM_OP, file);
14913 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14914 k[0] & 0xffffffff, k[1] & 0xffffffff);
14915 fprintf (file, "0x%lx%08lx\n",
14916 k[0] & 0xffffffff, k[1] & 0xffffffff);
14921 if (TARGET_MINIMAL_TOC)
14922 fputs ("\t.long ", file);
14924 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14925 k[0] & 0xffffffff, k[1] & 0xffffffff);
14926 fprintf (file, "0x%lx,0x%lx\n",
14927 k[0] & 0xffffffff, k[1] & 0xffffffff);
14931 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
14933 REAL_VALUE_TYPE rv;
14936 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14937 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
14941 if (TARGET_MINIMAL_TOC)
14942 fputs (DOUBLE_INT_ASM_OP, file);
14944 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14945 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
14950 if (TARGET_MINIMAL_TOC)
14951 fputs ("\t.long ", file);
14953 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14954 fprintf (file, "0x%lx\n", l & 0xffffffff);
14958 else if (GET_MODE (x) == VOIDmode
14959 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
14961 unsigned HOST_WIDE_INT low;
14962 HOST_WIDE_INT high;
14964 if (GET_CODE (x) == CONST_DOUBLE)
14966 low = CONST_DOUBLE_LOW (x);
14967 high = CONST_DOUBLE_HIGH (x);
14970 #if HOST_BITS_PER_WIDE_INT == 32
14973 high = (low & 0x80000000) ? ~0 : 0;
14977 low = INTVAL (x) & 0xffffffff;
14978 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
14982 /* TOC entries are always Pmode-sized, but since this
14983 is a bigendian machine then if we're putting smaller
14984 integer constants in the TOC we have to pad them.
14985 (This is still a win over putting the constants in
14986 a separate constant pool, because then we'd have
14987 to have both a TOC entry _and_ the actual constant.)
14989 For a 32-bit target, CONST_INT values are loaded and shifted
14990 entirely within `low' and can be stored in one TOC entry. */
14992 /* It would be easy to make this work, but it doesn't now. */
14993 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
14995 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
14997 #if HOST_BITS_PER_WIDE_INT == 32
14998 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
14999 POINTER_SIZE, &low, &high, 0);
15002 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15003 high = (HOST_WIDE_INT) low >> 32;
15010 if (TARGET_MINIMAL_TOC)
15011 fputs (DOUBLE_INT_ASM_OP, file);
15013 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15014 (long) high & 0xffffffff, (long) low & 0xffffffff);
15015 fprintf (file, "0x%lx%08lx\n",
15016 (long) high & 0xffffffff, (long) low & 0xffffffff);
15021 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15023 if (TARGET_MINIMAL_TOC)
15024 fputs ("\t.long ", file);
15026 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15027 (long) high & 0xffffffff, (long) low & 0xffffffff);
15028 fprintf (file, "0x%lx,0x%lx\n",
15029 (long) high & 0xffffffff, (long) low & 0xffffffff);
15033 if (TARGET_MINIMAL_TOC)
15034 fputs ("\t.long ", file);
15036 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15037 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15043 if (GET_CODE (x) == CONST)
15045 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
15047 base = XEXP (XEXP (x, 0), 0);
15048 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15051 switch (GET_CODE (base))
15054 name = XSTR (base, 0);
15058 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15059 CODE_LABEL_NUMBER (XEXP (base, 0)));
15063 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15067 gcc_unreachable ();
15070 real_name = (*targetm.strip_name_encoding) (name);
15071 if (TARGET_MINIMAL_TOC)
15072 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15075 fprintf (file, "\t.tc %s", real_name);
15078 fprintf (file, ".N%d", - offset);
15080 fprintf (file, ".P%d", offset);
15082 fputs ("[TC],", file);
15085 /* Currently C++ toc references to vtables can be emitted before it
15086 is decided whether the vtable is public or private. If this is
15087 the case, then the linker will eventually complain that there is
15088 a TOC reference to an unknown section. Thus, for vtables only,
15089 we emit the TOC reference to reference the symbol and not the
15091 if (VTABLE_NAME_P (name))
15093 RS6000_OUTPUT_BASENAME (file, name);
15095 fprintf (file, "%d", offset);
15096 else if (offset > 0)
15097 fprintf (file, "+%d", offset);
15100 output_addr_const (file, x);
15104 /* Output an assembler pseudo-op to write an ASCII string of N characters
15105 starting at P to FILE.
15107 On the RS/6000, we have to do this using the .byte operation and
15108 write out special characters outside the quoted string.
15109 Also, the assembler is broken; very long strings are truncated,
15110 so we must artificially break them up early. */
15113 output_ascii (FILE *file, const char *p, int n)
15116 int i, count_string;
15117 const char *for_string = "\t.byte \"";
15118 const char *for_decimal = "\t.byte ";
15119 const char *to_close = NULL;
15122 for (i = 0; i < n; i++)
15125 if (c >= ' ' && c < 0177)
15128 fputs (for_string, file);
15131 /* Write two quotes to get one. */
15139 for_decimal = "\"\n\t.byte ";
15143 if (count_string >= 512)
15145 fputs (to_close, file);
15147 for_string = "\t.byte \"";
15148 for_decimal = "\t.byte ";
15156 fputs (for_decimal, file);
15157 fprintf (file, "%d", c);
15159 for_string = "\n\t.byte \"";
15160 for_decimal = ", ";
15166 /* Now close the string if we have written one. Then end the line. */
15168 fputs (to_close, file);
15171 /* Generate a unique section name for FILENAME for a section type
15172 represented by SECTION_DESC. Output goes into BUF.
15174 SECTION_DESC can be any string, as long as it is different for each
15175 possible section type.
15177 We name the section in the same manner as xlc. The name begins with an
15178 underscore followed by the filename (after stripping any leading directory
15179 names) with the last period replaced by the string SECTION_DESC. If
15180 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15184 rs6000_gen_section_name (char **buf, const char *filename,
15185 const char *section_desc)
15187 const char *q, *after_last_slash, *last_period = 0;
15191 after_last_slash = filename;
15192 for (q = filename; *q; q++)
15195 after_last_slash = q + 1;
15196 else if (*q == '.')
15200 len = strlen (after_last_slash) + strlen (section_desc) + 2;
15201 *buf = (char *) xmalloc (len);
15206 for (q = after_last_slash; *q; q++)
15208 if (q == last_period)
15210 strcpy (p, section_desc);
15211 p += strlen (section_desc);
15215 else if (ISALNUM (*q))
15219 if (last_period == 0)
15220 strcpy (p, section_desc);
15225 /* Emit profile function. */
15228 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
15230 if (TARGET_PROFILE_KERNEL)
15233 if (DEFAULT_ABI == ABI_AIX)
15235 #ifndef NO_PROFILE_COUNTERS
15236 # define NO_PROFILE_COUNTERS 0
15238 if (NO_PROFILE_COUNTERS)
15239 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
15243 const char *label_name;
15246 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15247 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
15248 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
15250 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
15254 else if (DEFAULT_ABI == ABI_DARWIN)
15256 const char *mcount_name = RS6000_MCOUNT;
15257 int caller_addr_regno = LINK_REGISTER_REGNUM;
15259 /* Be conservative and always set this, at least for now. */
15260 current_function_uses_pic_offset_table = 1;
15263 /* For PIC code, set up a stub and collect the caller's address
15264 from r0, which is where the prologue puts it. */
15265 if (MACHOPIC_INDIRECT
15266 && current_function_uses_pic_offset_table)
15267 caller_addr_regno = 0;
15269 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
15271 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
15275 /* Write function profiler code. */
15278 output_function_profiler (FILE *file, int labelno)
15283 switch (DEFAULT_ABI)
15286 gcc_unreachable ();
15292 warning (0, "no profiling of 64-bit code for this ABI");
15295 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15296 fprintf (file, "\tmflr %s\n", reg_names[0]);
15299 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
15300 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15301 reg_names[0], save_lr, reg_names[1]);
15302 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15303 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
15304 assemble_name (file, buf);
15305 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
15307 else if (flag_pic > 1)
15309 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15310 reg_names[0], save_lr, reg_names[1]);
15311 /* Now, we need to get the address of the label. */
15312 fputs ("\tbl 1f\n\t.long ", file);
15313 assemble_name (file, buf);
15314 fputs ("-.\n1:", file);
15315 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
15316 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
15317 reg_names[0], reg_names[11]);
15318 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
15319 reg_names[0], reg_names[0], reg_names[11]);
15323 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
15324 assemble_name (file, buf);
15325 fputs ("@ha\n", file);
15326 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15327 reg_names[0], save_lr, reg_names[1]);
15328 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
15329 assemble_name (file, buf);
15330 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
15333 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15334 fprintf (file, "\tbl %s%s\n",
15335 RS6000_MCOUNT, flag_pic ? "@plt" : "");
15340 if (!TARGET_PROFILE_KERNEL)
15342 /* Don't do anything, done in output_profile_hook (). */
15346 gcc_assert (!TARGET_32BIT);
15348 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
15349 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
15351 if (cfun->static_chain_decl != NULL)
15353 asm_fprintf (file, "\tstd %s,24(%s)\n",
15354 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15355 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15356 asm_fprintf (file, "\tld %s,24(%s)\n",
15357 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15360 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15367 /* Power4 load update and store update instructions are cracked into a
15368 load or store and an integer insn which are executed in the same cycle.
15369 Branches have their own dispatch slot which does not count against the
15370 GCC issue rate, but it changes the program flow so there are no other
15371 instructions to issue in this cycle. */
15374 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
15375 int verbose ATTRIBUTE_UNUSED,
15376 rtx insn, int more)
15378 if (GET_CODE (PATTERN (insn)) == USE
15379 || GET_CODE (PATTERN (insn)) == CLOBBER)
15382 if (rs6000_sched_groups)
15384 if (is_microcoded_insn (insn))
15386 else if (is_cracked_insn (insn))
15387 return more > 2 ? more - 2 : 0;
15393 /* Adjust the cost of a scheduling dependency. Return the new cost of
15394 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15397 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
15399 if (! recog_memoized (insn))
15402 if (REG_NOTE_KIND (link) != 0)
15405 if (REG_NOTE_KIND (link) == 0)
15407 /* Data dependency; DEP_INSN writes a register that INSN reads
15408 some cycles later. */
15410 /* Separate a load from a narrower, dependent store. */
15411 if (rs6000_sched_groups
15412 && GET_CODE (PATTERN (insn)) == SET
15413 && GET_CODE (PATTERN (dep_insn)) == SET
15414 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
15415 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
15416 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
15417 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
15420 switch (get_attr_type (insn))
15423 /* Tell the first scheduling pass about the latency between
15424 a mtctr and bctr (and mtlr and br/blr). The first
15425 scheduling pass will not know about this latency since
15426 the mtctr instruction, which has the latency associated
15427 to it, will be generated by reload. */
15428 return TARGET_POWER ? 5 : 4;
15430 /* Leave some extra cycles between a compare and its
15431 dependent branch, to inhibit expensive mispredicts. */
15432 if ((rs6000_cpu_attr == CPU_PPC603
15433 || rs6000_cpu_attr == CPU_PPC604
15434 || rs6000_cpu_attr == CPU_PPC604E
15435 || rs6000_cpu_attr == CPU_PPC620
15436 || rs6000_cpu_attr == CPU_PPC630
15437 || rs6000_cpu_attr == CPU_PPC750
15438 || rs6000_cpu_attr == CPU_PPC7400
15439 || rs6000_cpu_attr == CPU_PPC7450
15440 || rs6000_cpu_attr == CPU_POWER4
15441 || rs6000_cpu_attr == CPU_POWER5)
15442 && recog_memoized (dep_insn)
15443 && (INSN_CODE (dep_insn) >= 0)
15444 && (get_attr_type (dep_insn) == TYPE_CMP
15445 || get_attr_type (dep_insn) == TYPE_COMPARE
15446 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
15447 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
15448 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
15449 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
15450 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
15451 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
15456 /* Fall out to return default cost. */
15462 /* The function returns a true if INSN is microcoded.
15463 Return false otherwise. */
15466 is_microcoded_insn (rtx insn)
15468 if (!insn || !INSN_P (insn)
15469 || GET_CODE (PATTERN (insn)) == USE
15470 || GET_CODE (PATTERN (insn)) == CLOBBER)
15473 if (rs6000_sched_groups)
15475 enum attr_type type = get_attr_type (insn);
15476 if (type == TYPE_LOAD_EXT_U
15477 || type == TYPE_LOAD_EXT_UX
15478 || type == TYPE_LOAD_UX
15479 || type == TYPE_STORE_UX
15480 || type == TYPE_MFCR)
15487 /* The function returns a nonzero value if INSN can be scheduled only
15488 as the first insn in a dispatch group ("dispatch-slot restricted").
15489 In this case, the returned value indicates how many dispatch slots
15490 the insn occupies (at the beginning of the group).
15491 Return 0 otherwise. */
15494 is_dispatch_slot_restricted (rtx insn)
15496 enum attr_type type;
15498 if (!rs6000_sched_groups)
15502 || insn == NULL_RTX
15503 || GET_CODE (insn) == NOTE
15504 || GET_CODE (PATTERN (insn)) == USE
15505 || GET_CODE (PATTERN (insn)) == CLOBBER)
15508 type = get_attr_type (insn);
15515 case TYPE_DELAYED_CR:
15516 case TYPE_CR_LOGICAL:
15524 if (rs6000_cpu == PROCESSOR_POWER5
15525 && is_cracked_insn (insn))
15531 /* The function returns true if INSN is cracked into 2 instructions
15532 by the processor (and therefore occupies 2 issue slots). */
15535 is_cracked_insn (rtx insn)
15537 if (!insn || !INSN_P (insn)
15538 || GET_CODE (PATTERN (insn)) == USE
15539 || GET_CODE (PATTERN (insn)) == CLOBBER)
15542 if (rs6000_sched_groups)
15544 enum attr_type type = get_attr_type (insn);
15545 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
15546 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
15547 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
15548 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
15549 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
15550 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
15551 || type == TYPE_IDIV || type == TYPE_LDIV
15552 || type == TYPE_INSERT_WORD)
15559 /* The function returns true if INSN can be issued only from
15560 the branch slot. */
15563 is_branch_slot_insn (rtx insn)
15565 if (!insn || !INSN_P (insn)
15566 || GET_CODE (PATTERN (insn)) == USE
15567 || GET_CODE (PATTERN (insn)) == CLOBBER)
15570 if (rs6000_sched_groups)
15572 enum attr_type type = get_attr_type (insn);
15573 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
15581 /* A C statement (sans semicolon) to update the integer scheduling
15582 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15583 INSN earlier, reduce the priority to execute INSN later. Do not
15584 define this macro if you do not need to adjust the scheduling
15585 priorities of insns. */
15588 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
15590 /* On machines (like the 750) which have asymmetric integer units,
15591 where one integer unit can do multiply and divides and the other
15592 can't, reduce the priority of multiply/divide so it is scheduled
15593 before other integer operations. */
15596 if (! INSN_P (insn))
15599 if (GET_CODE (PATTERN (insn)) == USE)
15602 switch (rs6000_cpu_attr) {
15604 switch (get_attr_type (insn))
15611 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
15612 priority, priority);
15613 if (priority >= 0 && priority < 0x01000000)
15620 if (is_dispatch_slot_restricted (insn)
15621 && reload_completed
15622 && current_sched_info->sched_max_insns_priority
15623 && rs6000_sched_restricted_insns_priority)
15626 /* Prioritize insns that can be dispatched only in the first
15628 if (rs6000_sched_restricted_insns_priority == 1)
15629 /* Attach highest priority to insn. This means that in
15630 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15631 precede 'priority' (critical path) considerations. */
15632 return current_sched_info->sched_max_insns_priority;
15633 else if (rs6000_sched_restricted_insns_priority == 2)
15634 /* Increase priority of insn by a minimal amount. This means that in
15635 haifa-sched.c:ready_sort(), only 'priority' (critical path)
15636 considerations precede dispatch-slot restriction considerations. */
15637 return (priority + 1);
15643 /* Return how many instructions the machine can issue per cycle. */
15646 rs6000_issue_rate (void)
15648 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15649 if (!reload_completed)
15652 switch (rs6000_cpu_attr) {
15653 case CPU_RIOS1: /* ? */
15655 case CPU_PPC601: /* ? */
15678 /* Return how many instructions to look ahead for better insn
15682 rs6000_use_sched_lookahead (void)
15684 if (rs6000_cpu_attr == CPU_PPC8540)
15689 /* Determine is PAT refers to memory. */
15692 is_mem_ref (rtx pat)
15698 if (GET_CODE (pat) == MEM)
15701 /* Recursively process the pattern. */
15702 fmt = GET_RTX_FORMAT (GET_CODE (pat));
15704 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
15707 ret |= is_mem_ref (XEXP (pat, i));
15708 else if (fmt[i] == 'E')
15709 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
15710 ret |= is_mem_ref (XVECEXP (pat, i, j));
15716 /* Determine if PAT is a PATTERN of a load insn. */
15719 is_load_insn1 (rtx pat)
15721 if (!pat || pat == NULL_RTX)
15724 if (GET_CODE (pat) == SET)
15725 return is_mem_ref (SET_SRC (pat));
15727 if (GET_CODE (pat) == PARALLEL)
15731 for (i = 0; i < XVECLEN (pat, 0); i++)
15732 if (is_load_insn1 (XVECEXP (pat, 0, i)))
15739 /* Determine if INSN loads from memory. */
15742 is_load_insn (rtx insn)
15744 if (!insn || !INSN_P (insn))
15747 if (GET_CODE (insn) == CALL_INSN)
15750 return is_load_insn1 (PATTERN (insn));
15753 /* Determine if PAT is a PATTERN of a store insn. */
15756 is_store_insn1 (rtx pat)
15758 if (!pat || pat == NULL_RTX)
15761 if (GET_CODE (pat) == SET)
15762 return is_mem_ref (SET_DEST (pat));
15764 if (GET_CODE (pat) == PARALLEL)
15768 for (i = 0; i < XVECLEN (pat, 0); i++)
15769 if (is_store_insn1 (XVECEXP (pat, 0, i)))
15776 /* Determine if INSN stores to memory. */
15779 is_store_insn (rtx insn)
15781 if (!insn || !INSN_P (insn))
15784 return is_store_insn1 (PATTERN (insn));
15787 /* Returns whether the dependence between INSN and NEXT is considered
15788 costly by the given target. */
15791 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
15794 /* If the flag is not enabled - no dependence is considered costly;
15795 allow all dependent insns in the same group.
15796 This is the most aggressive option. */
15797 if (rs6000_sched_costly_dep == no_dep_costly)
15800 /* If the flag is set to 1 - a dependence is always considered costly;
15801 do not allow dependent instructions in the same group.
15802 This is the most conservative option. */
15803 if (rs6000_sched_costly_dep == all_deps_costly)
15806 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15807 && is_load_insn (next)
15808 && is_store_insn (insn))
15809 /* Prevent load after store in the same group. */
15812 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15813 && is_load_insn (next)
15814 && is_store_insn (insn)
15815 && (!link || (int) REG_NOTE_KIND (link) == 0))
15816 /* Prevent load after store in the same group if it is a true
15820 /* The flag is set to X; dependences with latency >= X are considered costly,
15821 and will not be scheduled in the same group. */
15822 if (rs6000_sched_costly_dep <= max_dep_latency
15823 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15829 /* Return the next insn after INSN that is found before TAIL is reached,
15830 skipping any "non-active" insns - insns that will not actually occupy
15831 an issue slot. Return NULL_RTX if such an insn is not found. */
15834 get_next_active_insn (rtx insn, rtx tail)
15838 if (!insn || insn == tail)
15841 next_insn = NEXT_INSN (insn);
15844 && next_insn != tail
15845 && (GET_CODE (next_insn) == NOTE
15846 || GET_CODE (PATTERN (next_insn)) == USE
15847 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
15849 next_insn = NEXT_INSN (next_insn);
15852 if (!next_insn || next_insn == tail)
15858 /* Return whether the presence of INSN causes a dispatch group termination
15859 of group WHICH_GROUP.
15861 If WHICH_GROUP == current_group, this function will return true if INSN
15862 causes the termination of the current group (i.e, the dispatch group to
15863 which INSN belongs). This means that INSN will be the last insn in the
15864 group it belongs to.
15866 If WHICH_GROUP == previous_group, this function will return true if INSN
15867 causes the termination of the previous group (i.e, the dispatch group that
15868 precedes the group to which INSN belongs). This means that INSN will be
15869 the first insn in the group it belongs to). */
15872 insn_terminates_group_p (rtx insn, enum group_termination which_group)
15874 enum attr_type type;
15879 type = get_attr_type (insn);
15881 if (is_microcoded_insn (insn))
15884 if (which_group == current_group)
15886 if (is_branch_slot_insn (insn))
15890 else if (which_group == previous_group)
15892 if (is_dispatch_slot_restricted (insn))
15900 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15901 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15904 is_costly_group (rtx *group_insns, rtx next_insn)
15909 int issue_rate = rs6000_issue_rate ();
15911 for (i = 0; i < issue_rate; i++)
15913 rtx insn = group_insns[i];
15916 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
15918 rtx next = XEXP (link, 0);
15919 if (next == next_insn)
15921 cost = insn_cost (insn, link, next_insn);
15922 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
15931 /* Utility of the function redefine_groups.
15932 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15933 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15934 to keep it "far" (in a separate group) from GROUP_INSNS, following
15935 one of the following schemes, depending on the value of the flag
15936 -minsert_sched_nops = X:
15937 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15938 in order to force NEXT_INSN into a separate group.
15939 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15940 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15941 insertion (has a group just ended, how many vacant issue slots remain in the
15942 last group, and how many dispatch groups were encountered so far). */
15945 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
15946 rtx next_insn, bool *group_end, int can_issue_more,
15951 int issue_rate = rs6000_issue_rate ();
15952 bool end = *group_end;
15955 if (next_insn == NULL_RTX)
15956 return can_issue_more;
15958 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
15959 return can_issue_more;
15961 force = is_costly_group (group_insns, next_insn);
15963 return can_issue_more;
15965 if (sched_verbose > 6)
15966 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
15967 *group_count ,can_issue_more);
15969 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
15972 can_issue_more = 0;
15974 /* Since only a branch can be issued in the last issue_slot, it is
15975 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15976 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15977 in this case the last nop will start a new group and the branch
15978 will be forced to the new group. */
15979 if (can_issue_more && !is_branch_slot_insn (next_insn))
15982 while (can_issue_more > 0)
15985 emit_insn_before (nop, next_insn);
15993 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
15995 int n_nops = rs6000_sched_insert_nops;
15997 /* Nops can't be issued from the branch slot, so the effective
15998 issue_rate for nops is 'issue_rate - 1'. */
15999 if (can_issue_more == 0)
16000 can_issue_more = issue_rate;
16002 if (can_issue_more == 0)
16004 can_issue_more = issue_rate - 1;
16007 for (i = 0; i < issue_rate; i++)
16009 group_insns[i] = 0;
16016 emit_insn_before (nop, next_insn);
16017 if (can_issue_more == issue_rate - 1) /* new group begins */
16020 if (can_issue_more == 0)
16022 can_issue_more = issue_rate - 1;
16025 for (i = 0; i < issue_rate; i++)
16027 group_insns[i] = 0;
16033 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16036 /* Is next_insn going to start a new group? */
16039 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16040 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16041 || (can_issue_more < issue_rate &&
16042 insn_terminates_group_p (next_insn, previous_group)));
16043 if (*group_end && end)
16046 if (sched_verbose > 6)
16047 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16048 *group_count, can_issue_more);
16049 return can_issue_more;
16052 return can_issue_more;
16055 /* This function tries to synch the dispatch groups that the compiler "sees"
16056 with the dispatch groups that the processor dispatcher is expected to
16057 form in practice. It tries to achieve this synchronization by forcing the
16058 estimated processor grouping on the compiler (as opposed to the function
16059 'pad_goups' which tries to force the scheduler's grouping on the processor).
16061 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16062 examines the (estimated) dispatch groups that will be formed by the processor
16063 dispatcher. It marks these group boundaries to reflect the estimated
16064 processor grouping, overriding the grouping that the scheduler had marked.
16065 Depending on the value of the flag '-minsert-sched-nops' this function can
16066 force certain insns into separate groups or force a certain distance between
16067 them by inserting nops, for example, if there exists a "costly dependence"
16070 The function estimates the group boundaries that the processor will form as
16071 folllows: It keeps track of how many vacant issue slots are available after
16072 each insn. A subsequent insn will start a new group if one of the following
16074 - no more vacant issue slots remain in the current dispatch group.
16075 - only the last issue slot, which is the branch slot, is vacant, but the next
16076 insn is not a branch.
16077 - only the last 2 or less issue slots, including the branch slot, are vacant,
16078 which means that a cracked insn (which occupies two issue slots) can't be
16079 issued in this group.
16080 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16081 start a new group. */
16084 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16086 rtx insn, next_insn;
16088 int can_issue_more;
16091 int group_count = 0;
16095 issue_rate = rs6000_issue_rate ();
16096 group_insns = alloca (issue_rate * sizeof (rtx));
16097 for (i = 0; i < issue_rate; i++)
16099 group_insns[i] = 0;
16101 can_issue_more = issue_rate;
16103 insn = get_next_active_insn (prev_head_insn, tail);
16106 while (insn != NULL_RTX)
16108 slot = (issue_rate - can_issue_more);
16109 group_insns[slot] = insn;
16111 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16112 if (insn_terminates_group_p (insn, current_group))
16113 can_issue_more = 0;
16115 next_insn = get_next_active_insn (insn, tail);
16116 if (next_insn == NULL_RTX)
16117 return group_count + 1;
16119 /* Is next_insn going to start a new group? */
16121 = (can_issue_more == 0
16122 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16123 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16124 || (can_issue_more < issue_rate &&
16125 insn_terminates_group_p (next_insn, previous_group)));
16127 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16128 next_insn, &group_end, can_issue_more,
16134 can_issue_more = 0;
16135 for (i = 0; i < issue_rate; i++)
16137 group_insns[i] = 0;
16141 if (GET_MODE (next_insn) == TImode && can_issue_more)
16142 PUT_MODE (next_insn, VOIDmode);
16143 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16144 PUT_MODE (next_insn, TImode);
16147 if (can_issue_more == 0)
16148 can_issue_more = issue_rate;
16151 return group_count;
16154 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16155 dispatch group boundaries that the scheduler had marked. Pad with nops
16156 any dispatch groups which have vacant issue slots, in order to force the
16157 scheduler's grouping on the processor dispatcher. The function
16158 returns the number of dispatch groups found. */
16161 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16163 rtx insn, next_insn;
16166 int can_issue_more;
16168 int group_count = 0;
16170 /* Initialize issue_rate. */
16171 issue_rate = rs6000_issue_rate ();
16172 can_issue_more = issue_rate;
16174 insn = get_next_active_insn (prev_head_insn, tail);
16175 next_insn = get_next_active_insn (insn, tail);
16177 while (insn != NULL_RTX)
16180 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16182 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
16184 if (next_insn == NULL_RTX)
16189 /* If the scheduler had marked group termination at this location
16190 (between insn and next_indn), and neither insn nor next_insn will
16191 force group termination, pad the group with nops to force group
16194 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
16195 && !insn_terminates_group_p (insn, current_group)
16196 && !insn_terminates_group_p (next_insn, previous_group))
16198 if (!is_branch_slot_insn (next_insn))
16201 while (can_issue_more)
16204 emit_insn_before (nop, next_insn);
16209 can_issue_more = issue_rate;
16214 next_insn = get_next_active_insn (insn, tail);
16217 return group_count;
16220 /* The following function is called at the end of scheduling BB.
16221 After reload, it inserts nops at insn group bundling. */
16224 rs6000_sched_finish (FILE *dump, int sched_verbose)
16229 fprintf (dump, "=== Finishing schedule.\n");
16231 if (reload_completed && rs6000_sched_groups)
16233 if (rs6000_sched_insert_nops == sched_finish_none)
16236 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
16237 n_groups = pad_groups (dump, sched_verbose,
16238 current_sched_info->prev_head,
16239 current_sched_info->next_tail);
16241 n_groups = redefine_groups (dump, sched_verbose,
16242 current_sched_info->prev_head,
16243 current_sched_info->next_tail);
16245 if (sched_verbose >= 6)
16247 fprintf (dump, "ngroups = %d\n", n_groups);
16248 print_rtl (dump, current_sched_info->prev_head);
16249 fprintf (dump, "Done finish_sched\n");
16254 /* Length in units of the trampoline for entering a nested function. */
16257 rs6000_trampoline_size (void)
16261 switch (DEFAULT_ABI)
16264 gcc_unreachable ();
16267 ret = (TARGET_32BIT) ? 12 : 24;
16272 ret = (TARGET_32BIT) ? 40 : 48;
16279 /* Emit RTL insns to initialize the variable parts of a trampoline.
16280 FNADDR is an RTX for the address of the function's pure code.
16281 CXT is an RTX for the static chain value for the function. */
16284 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
16286 enum machine_mode pmode = Pmode;
16287 int regsize = (TARGET_32BIT) ? 4 : 8;
16288 rtx ctx_reg = force_reg (pmode, cxt);
16290 switch (DEFAULT_ABI)
16293 gcc_unreachable ();
16295 /* Macros to shorten the code expansions below. */
16296 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16297 #define MEM_PLUS(addr,offset) \
16298 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16300 /* Under AIX, just build the 3 word function descriptor */
16303 rtx fn_reg = gen_reg_rtx (pmode);
16304 rtx toc_reg = gen_reg_rtx (pmode);
16305 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
16306 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
16307 emit_move_insn (MEM_DEREF (addr), fn_reg);
16308 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
16309 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
16313 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16316 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
16317 FALSE, VOIDmode, 4,
16319 GEN_INT (rs6000_trampoline_size ()), SImode,
16329 /* Table of valid machine attributes. */
16331 const struct attribute_spec rs6000_attribute_table[] =
16333 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16334 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
16335 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16336 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16337 #ifdef SUBTARGET_ATTRIBUTE_TABLE
16338 SUBTARGET_ATTRIBUTE_TABLE,
16340 { NULL, 0, 0, false, false, false, NULL }
16343 /* Handle the "altivec" attribute. The attribute may have
16344 arguments as follows:
16346 __attribute__((altivec(vector__)))
16347 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16348 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16350 and may appear more than once (e.g., 'vector bool char') in a
16351 given declaration. */
16354 rs6000_handle_altivec_attribute (tree *node,
16355 tree name ATTRIBUTE_UNUSED,
16357 int flags ATTRIBUTE_UNUSED,
16358 bool *no_add_attrs)
16360 tree type = *node, result = NULL_TREE;
16361 enum machine_mode mode;
16364 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
16365 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
16366 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
16369 while (POINTER_TYPE_P (type)
16370 || TREE_CODE (type) == FUNCTION_TYPE
16371 || TREE_CODE (type) == METHOD_TYPE
16372 || TREE_CODE (type) == ARRAY_TYPE)
16373 type = TREE_TYPE (type);
16375 mode = TYPE_MODE (type);
16377 /* Check for invalid AltiVec type qualifiers. */
16378 if (type == long_unsigned_type_node || type == long_integer_type_node)
16381 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
16382 else if (rs6000_warn_altivec_long)
16383 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
16385 else if (type == long_long_unsigned_type_node
16386 || type == long_long_integer_type_node)
16387 error ("use of %<long long%> in AltiVec types is invalid");
16388 else if (type == double_type_node)
16389 error ("use of %<double%> in AltiVec types is invalid");
16390 else if (type == long_double_type_node)
16391 error ("use of %<long double%> in AltiVec types is invalid");
16392 else if (type == boolean_type_node)
16393 error ("use of boolean types in AltiVec types is invalid");
16394 else if (TREE_CODE (type) == COMPLEX_TYPE)
16395 error ("use of %<complex%> in AltiVec types is invalid");
16397 switch (altivec_type)
16400 unsigned_p = TYPE_UNSIGNED (type);
16404 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
16407 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
16410 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
16412 case SFmode: result = V4SF_type_node; break;
16413 /* If the user says 'vector int bool', we may be handed the 'bool'
16414 attribute _before_ the 'vector' attribute, and so select the
16415 proper type in the 'b' case below. */
16416 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
16424 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
16425 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
16426 case QImode: case V16QImode: result = bool_V16QI_type_node;
16433 case V8HImode: result = pixel_V8HI_type_node;
16439 if (result && result != type && TYPE_READONLY (type))
16440 result = build_qualified_type (result, TYPE_QUAL_CONST);
16442 *no_add_attrs = true; /* No need to hang on to the attribute. */
16445 *node = reconstruct_complex_type (*node, result);
16450 /* AltiVec defines four built-in scalar types that serve as vector
16451 elements; we must teach the compiler how to mangle them. */
16453 static const char *
16454 rs6000_mangle_fundamental_type (tree type)
16456 if (type == bool_char_type_node) return "U6__boolc";
16457 if (type == bool_short_type_node) return "U6__bools";
16458 if (type == pixel_type_node) return "u7__pixel";
16459 if (type == bool_int_type_node) return "U6__booli";
16461 /* For all other types, use normal C++ mangling. */
16465 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16466 struct attribute_spec.handler. */
16469 rs6000_handle_longcall_attribute (tree *node, tree name,
16470 tree args ATTRIBUTE_UNUSED,
16471 int flags ATTRIBUTE_UNUSED,
16472 bool *no_add_attrs)
16474 if (TREE_CODE (*node) != FUNCTION_TYPE
16475 && TREE_CODE (*node) != FIELD_DECL
16476 && TREE_CODE (*node) != TYPE_DECL)
16478 warning (0, "%qs attribute only applies to functions",
16479 IDENTIFIER_POINTER (name));
16480 *no_add_attrs = true;
16486 /* Set longcall attributes on all functions declared when
16487 rs6000_default_long_calls is true. */
16489 rs6000_set_default_type_attributes (tree type)
16491 if (rs6000_default_long_calls
16492 && (TREE_CODE (type) == FUNCTION_TYPE
16493 || TREE_CODE (type) == METHOD_TYPE))
16494 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
16496 TYPE_ATTRIBUTES (type));
16499 /* Return a reference suitable for calling a function with the
16500 longcall attribute. */
16503 rs6000_longcall_ref (rtx call_ref)
16505 const char *call_name;
16508 if (GET_CODE (call_ref) != SYMBOL_REF)
16511 /* System V adds '.' to the internal name, so skip them. */
16512 call_name = XSTR (call_ref, 0);
16513 if (*call_name == '.')
16515 while (*call_name == '.')
16518 node = get_identifier (call_name);
16519 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
16522 return force_reg (Pmode, call_ref);
16525 #ifdef USING_ELFOS_H
16527 /* A C statement or statements to switch to the appropriate section
16528 for output of RTX in mode MODE. You can assume that RTX is some
16529 kind of constant in RTL. The argument MODE is redundant except in
16530 the case of a `const_int' rtx. Select the section by calling
16531 `text_section' or one of the alternatives for other sections.
16533 Do not define this macro if you put all constants in the read-only
16537 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
16538 unsigned HOST_WIDE_INT align)
16540 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16543 default_elf_select_rtx_section (mode, x, align);
16546 /* A C statement or statements to switch to the appropriate
16547 section for output of DECL. DECL is either a `VAR_DECL' node
16548 or a constant of some sort. RELOC indicates whether forming
16549 the initial value of DECL requires link-time relocations. */
16552 rs6000_elf_select_section (tree decl, int reloc,
16553 unsigned HOST_WIDE_INT align)
16555 /* Pretend that we're always building for a shared library when
16556 ABI_AIX, because otherwise we end up with dynamic relocations
16557 in read-only sections. This happens for function pointers,
16558 references to vtables in typeinfo, and probably other cases. */
16559 default_elf_select_section_1 (decl, reloc, align,
16560 flag_pic || DEFAULT_ABI == ABI_AIX);
16563 /* A C statement to build up a unique section name, expressed as a
16564 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16565 RELOC indicates whether the initial value of EXP requires
16566 link-time relocations. If you do not define this macro, GCC will use
16567 the symbol name prefixed by `.' as the section name. Note - this
16568 macro can now be called for uninitialized data items as well as
16569 initialized data and functions. */
16572 rs6000_elf_unique_section (tree decl, int reloc)
16574 /* As above, pretend that we're always building for a shared library
16575 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16576 default_unique_section_1 (decl, reloc,
16577 flag_pic || DEFAULT_ABI == ABI_AIX);
16580 /* For a SYMBOL_REF, set generic flags and then perform some
16581 target-specific processing.
16583 When the AIX ABI is requested on a non-AIX system, replace the
16584 function name with the real name (with a leading .) rather than the
16585 function descriptor name. This saves a lot of overriding code to
16586 read the prefixes. */
16589 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
16591 default_encode_section_info (decl, rtl, first);
16594 && TREE_CODE (decl) == FUNCTION_DECL
16596 && DEFAULT_ABI == ABI_AIX)
16598 rtx sym_ref = XEXP (rtl, 0);
16599 size_t len = strlen (XSTR (sym_ref, 0));
16600 char *str = alloca (len + 2);
16602 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
16603 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
16608 rs6000_elf_in_small_data_p (tree decl)
16610 if (rs6000_sdata == SDATA_NONE)
16613 /* We want to merge strings, so we never consider them small data. */
16614 if (TREE_CODE (decl) == STRING_CST)
16617 /* Functions are never in the small data area. */
16618 if (TREE_CODE (decl) == FUNCTION_DECL)
16621 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
16623 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
16624 if (strcmp (section, ".sdata") == 0
16625 || strcmp (section, ".sdata2") == 0
16626 || strcmp (section, ".sbss") == 0
16627 || strcmp (section, ".sbss2") == 0
16628 || strcmp (section, ".PPC.EMB.sdata0") == 0
16629 || strcmp (section, ".PPC.EMB.sbss0") == 0)
16634 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
16637 && (unsigned HOST_WIDE_INT) size <= g_switch_value
16638 /* If it's not public, and we're not going to reference it there,
16639 there's no need to put it in the small data section. */
16640 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
16647 #endif /* USING_ELFOS_H */
16650 /* Return a REG that occurs in ADDR with coefficient 1.
16651 ADDR can be effectively incremented by incrementing REG.
16653 r0 is special and we must not select it as an address
16654 register by this routine since our caller will try to
16655 increment the returned register via an "la" instruction. */
16658 find_addr_reg (rtx addr)
16660 while (GET_CODE (addr) == PLUS)
16662 if (GET_CODE (XEXP (addr, 0)) == REG
16663 && REGNO (XEXP (addr, 0)) != 0)
16664 addr = XEXP (addr, 0);
16665 else if (GET_CODE (XEXP (addr, 1)) == REG
16666 && REGNO (XEXP (addr, 1)) != 0)
16667 addr = XEXP (addr, 1);
16668 else if (CONSTANT_P (XEXP (addr, 0)))
16669 addr = XEXP (addr, 1);
16670 else if (CONSTANT_P (XEXP (addr, 1)))
16671 addr = XEXP (addr, 0);
16673 gcc_unreachable ();
16675 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
16680 rs6000_fatal_bad_address (rtx op)
16682 fatal_insn ("bad address", op);
16687 static tree branch_island_list = 0;
16689 /* Remember to generate a branch island for far calls to the given
16693 add_compiler_branch_island (tree label_name, tree function_name,
16696 tree branch_island = build_tree_list (function_name, label_name);
16697 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
16698 TREE_CHAIN (branch_island) = branch_island_list;
16699 branch_island_list = branch_island;
16702 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
16703 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
16704 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
16705 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16707 /* Generate far-jump branch islands for everything on the
16708 branch_island_list. Invoked immediately after the last instruction
16709 of the epilogue has been emitted; the branch-islands must be
16710 appended to, and contiguous with, the function body. Mach-O stubs
16711 are generated in machopic_output_stub(). */
16714 macho_branch_islands (void)
16717 tree branch_island;
16719 for (branch_island = branch_island_list;
16721 branch_island = TREE_CHAIN (branch_island))
16723 const char *label =
16724 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
16726 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
16727 char name_buf[512];
16728 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16729 if (name[0] == '*' || name[0] == '&')
16730 strcpy (name_buf, name+1);
16734 strcpy (name_buf+1, name);
16736 strcpy (tmp_buf, "\n");
16737 strcat (tmp_buf, label);
16738 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16739 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16740 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
16741 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16744 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
16745 strcat (tmp_buf, label);
16746 strcat (tmp_buf, "_pic\n");
16747 strcat (tmp_buf, label);
16748 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
16750 strcat (tmp_buf, "\taddis r11,r11,ha16(");
16751 strcat (tmp_buf, name_buf);
16752 strcat (tmp_buf, " - ");
16753 strcat (tmp_buf, label);
16754 strcat (tmp_buf, "_pic)\n");
16756 strcat (tmp_buf, "\tmtlr r0\n");
16758 strcat (tmp_buf, "\taddi r12,r11,lo16(");
16759 strcat (tmp_buf, name_buf);
16760 strcat (tmp_buf, " - ");
16761 strcat (tmp_buf, label);
16762 strcat (tmp_buf, "_pic)\n");
16764 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
16768 strcat (tmp_buf, ":\nlis r12,hi16(");
16769 strcat (tmp_buf, name_buf);
16770 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
16771 strcat (tmp_buf, name_buf);
16772 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
16774 output_asm_insn (tmp_buf, 0);
16775 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16776 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16777 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
16778 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16781 branch_island_list = 0;
16784 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16785 already there or not. */
16788 no_previous_def (tree function_name)
16790 tree branch_island;
16791 for (branch_island = branch_island_list;
16793 branch_island = TREE_CHAIN (branch_island))
16794 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16799 /* GET_PREV_LABEL gets the label name from the previous definition of
16803 get_prev_label (tree function_name)
16805 tree branch_island;
16806 for (branch_island = branch_island_list;
16808 branch_island = TREE_CHAIN (branch_island))
16809 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16810 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16814 /* INSN is either a function call or a millicode call. It may have an
16815 unconditional jump in its delay slot.
16817 CALL_DEST is the routine we are calling. */
16820 output_call (rtx insn, rtx *operands, int dest_operand_number,
16821 int cookie_operand_number)
16823 static char buf[256];
16824 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16825 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16828 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
16830 if (no_previous_def (funname))
16832 int line_number = 0;
16833 rtx label_rtx = gen_label_rtx ();
16834 char *label_buf, temp_buf[256];
16835 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
16836 CODE_LABEL_NUMBER (label_rtx));
16837 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
16838 labelname = get_identifier (label_buf);
16839 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
16841 line_number = NOTE_LINE_NUMBER (insn);
16842 add_compiler_branch_island (labelname, funname, line_number);
16845 labelname = get_prev_label (funname);
16847 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16848 instruction will reach 'foo', otherwise link as 'bl L42'".
16849 "L42" should be a 'branch island', that will do a far jump to
16850 'foo'. Branch islands are generated in
16851 macho_branch_islands(). */
16852 sprintf (buf, "jbsr %%z%d,%.246s",
16853 dest_operand_number, IDENTIFIER_POINTER (labelname));
16856 sprintf (buf, "bl %%z%d", dest_operand_number);
16860 /* Generate PIC and indirect symbol stubs. */
16863 machopic_output_stub (FILE *file, const char *symb, const char *stub)
16865 unsigned int length;
16866 char *symbol_name, *lazy_ptr_name;
16867 char *local_label_0;
16868 static int label = 0;
16870 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16871 symb = (*targetm.strip_name_encoding) (symb);
16874 length = strlen (symb);
16875 symbol_name = alloca (length + 32);
16876 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
16878 lazy_ptr_name = alloca (length + 32);
16879 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
16882 machopic_picsymbol_stub1_section ();
16884 machopic_symbol_stub1_section ();
16888 fprintf (file, "\t.align 5\n");
16890 fprintf (file, "%s:\n", stub);
16891 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16894 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
16895 sprintf (local_label_0, "\"L%011d$spb\"", label);
16897 fprintf (file, "\tmflr r0\n");
16898 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
16899 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
16900 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
16901 lazy_ptr_name, local_label_0);
16902 fprintf (file, "\tmtlr r0\n");
16903 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
16904 (TARGET_64BIT ? "ldu" : "lwzu"),
16905 lazy_ptr_name, local_label_0);
16906 fprintf (file, "\tmtctr r12\n");
16907 fprintf (file, "\tbctr\n");
16911 fprintf (file, "\t.align 4\n");
16913 fprintf (file, "%s:\n", stub);
16914 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16916 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
16917 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
16918 (TARGET_64BIT ? "ldu" : "lwzu"),
16920 fprintf (file, "\tmtctr r12\n");
16921 fprintf (file, "\tbctr\n");
16924 machopic_lazy_symbol_ptr_section ();
16925 fprintf (file, "%s:\n", lazy_ptr_name);
16926 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16927 fprintf (file, "%sdyld_stub_binding_helper\n",
16928 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
16931 /* Legitimize PIC addresses. If the address is already
16932 position-independent, we return ORIG. Newly generated
16933 position-independent addresses go into a reg. This is REG if non
16934 zero, otherwise we allocate register(s) as necessary. */
16936 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
16939 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
16944 if (reg == NULL && ! reload_in_progress && ! reload_completed)
16945 reg = gen_reg_rtx (Pmode);
16947 if (GET_CODE (orig) == CONST)
16951 if (GET_CODE (XEXP (orig, 0)) == PLUS
16952 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
16956 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
16958 /* Use a different reg for the intermediate value, as
16959 it will be marked UNCHANGING. */
16960 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16962 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16965 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16969 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
16971 /* Use a different reg for the intermediate value, as
16972 it will be marked UNCHANGING. */
16973 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16974 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16977 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16981 if (GET_CODE (offset) == CONST_INT)
16983 if (SMALL_INT (offset))
16984 return plus_constant (base, INTVAL (offset));
16985 else if (! reload_in_progress && ! reload_completed)
16986 offset = force_reg (Pmode, offset);
16989 rtx mem = force_const_mem (Pmode, orig);
16990 return machopic_legitimize_pic_address (mem, Pmode, reg);
16993 return gen_rtx_PLUS (Pmode, base, offset);
16996 /* Fall back on generic machopic code. */
16997 return machopic_legitimize_pic_address (orig, mode, reg);
17000 /* This is just a placeholder to make linking work without having to
17001 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17002 ever needed for Darwin (not too likely!) this would have to get a
17003 real definition. */
17010 /* Output a .machine directive for the Darwin assembler, and call
17011 the generic start_file routine. */
17014 rs6000_darwin_file_start (void)
17016 static const struct
17022 { "ppc64", "ppc64", MASK_64BIT },
17023 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17024 { "power4", "ppc970", 0 },
17025 { "G5", "ppc970", 0 },
17026 { "7450", "ppc7450", 0 },
17027 { "7400", "ppc7400", MASK_ALTIVEC },
17028 { "G4", "ppc7400", 0 },
17029 { "750", "ppc750", 0 },
17030 { "740", "ppc750", 0 },
17031 { "G3", "ppc750", 0 },
17032 { "604e", "ppc604e", 0 },
17033 { "604", "ppc604", 0 },
17034 { "603e", "ppc603", 0 },
17035 { "603", "ppc603", 0 },
17036 { "601", "ppc601", 0 },
17037 { NULL, "ppc", 0 } };
17038 const char *cpu_id = "";
17041 rs6000_file_start ();
17043 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17044 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17045 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17046 && rs6000_select[i].string[0] != '\0')
17047 cpu_id = rs6000_select[i].string;
17049 /* Look through the mapping array. Pick the first name that either
17050 matches the argument, has a bit set in IF_SET that is also set
17051 in the target flags, or has a NULL name. */
17054 while (mapping[i].arg != NULL
17055 && strcmp (mapping[i].arg, cpu_id) != 0
17056 && (mapping[i].if_set & target_flags) == 0)
17059 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17062 #endif /* TARGET_MACHO */
17065 static unsigned int
17066 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17068 return default_section_type_flags_1 (decl, name, reloc,
17069 flag_pic || DEFAULT_ABI == ABI_AIX);
17072 /* Record an element in the table of global constructors. SYMBOL is
17073 a SYMBOL_REF of the function to be called; PRIORITY is a number
17074 between 0 and MAX_INIT_PRIORITY.
17076 This differs from default_named_section_asm_out_constructor in
17077 that we have special handling for -mrelocatable. */
17080 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17082 const char *section = ".ctors";
17085 if (priority != DEFAULT_INIT_PRIORITY)
17087 sprintf (buf, ".ctors.%.5u",
17088 /* Invert the numbering so the linker puts us in the proper
17089 order; constructors are run from right to left, and the
17090 linker sorts in increasing order. */
17091 MAX_INIT_PRIORITY - priority);
17095 named_section_flags (section, SECTION_WRITE);
17096 assemble_align (POINTER_SIZE);
17098 if (TARGET_RELOCATABLE)
17100 fputs ("\t.long (", asm_out_file);
17101 output_addr_const (asm_out_file, symbol);
17102 fputs (")@fixup\n", asm_out_file);
17105 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17109 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17111 const char *section = ".dtors";
17114 if (priority != DEFAULT_INIT_PRIORITY)
17116 sprintf (buf, ".dtors.%.5u",
17117 /* Invert the numbering so the linker puts us in the proper
17118 order; constructors are run from right to left, and the
17119 linker sorts in increasing order. */
17120 MAX_INIT_PRIORITY - priority);
17124 named_section_flags (section, SECTION_WRITE);
17125 assemble_align (POINTER_SIZE);
17127 if (TARGET_RELOCATABLE)
17129 fputs ("\t.long (", asm_out_file);
17130 output_addr_const (asm_out_file, symbol);
17131 fputs (")@fixup\n", asm_out_file);
17134 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17138 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17142 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17143 ASM_OUTPUT_LABEL (file, name);
17144 fputs (DOUBLE_INT_ASM_OP, file);
17145 rs6000_output_function_entry (file, name);
17146 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17149 fputs ("\t.size\t", file);
17150 assemble_name (file, name);
17151 fputs (",24\n\t.type\t.", file);
17152 assemble_name (file, name);
17153 fputs (",@function\n", file);
17154 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17156 fputs ("\t.globl\t.", file);
17157 assemble_name (file, name);
17162 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17163 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17164 rs6000_output_function_entry (file, name);
17165 fputs (":\n", file);
17169 if (TARGET_RELOCATABLE
17170 && (get_pool_size () != 0 || current_function_profile)
17175 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17177 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17178 fprintf (file, "\t.long ");
17179 assemble_name (file, buf);
17181 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17182 assemble_name (file, buf);
17186 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17187 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17189 if (DEFAULT_ABI == ABI_AIX)
17191 const char *desc_name, *orig_name;
17193 orig_name = (*targetm.strip_name_encoding) (name);
17194 desc_name = orig_name;
17195 while (*desc_name == '.')
17198 if (TREE_PUBLIC (decl))
17199 fprintf (file, "\t.globl %s\n", desc_name);
17201 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17202 fprintf (file, "%s:\n", desc_name);
17203 fprintf (file, "\t.long %s\n", orig_name);
17204 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
17205 if (DEFAULT_ABI == ABI_AIX)
17206 fputs ("\t.long 0\n", file);
17207 fprintf (file, "\t.previous\n");
17209 ASM_OUTPUT_LABEL (file, name);
17213 rs6000_elf_end_indicate_exec_stack (void)
17216 file_end_indicate_exec_stack ();
17222 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
17224 fputs (GLOBAL_ASM_OP, stream);
17225 RS6000_OUTPUT_BASENAME (stream, name);
17226 putc ('\n', stream);
17230 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
17231 tree decl ATTRIBUTE_UNUSED)
17234 static const char * const suffix[3] = { "PR", "RO", "RW" };
17236 if (flags & SECTION_CODE)
17238 else if (flags & SECTION_WRITE)
17243 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
17244 (flags & SECTION_CODE) ? "." : "",
17245 name, suffix[smclass], flags & SECTION_ENTSIZE);
17249 rs6000_xcoff_select_section (tree decl, int reloc,
17250 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17252 if (decl_readonly_section_1 (decl, reloc, 1))
17254 if (TREE_PUBLIC (decl))
17255 read_only_data_section ();
17257 read_only_private_data_section ();
17261 if (TREE_PUBLIC (decl))
17264 private_data_section ();
17269 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
17273 /* Use select_section for private and uninitialized data. */
17274 if (!TREE_PUBLIC (decl)
17275 || DECL_COMMON (decl)
17276 || DECL_INITIAL (decl) == NULL_TREE
17277 || DECL_INITIAL (decl) == error_mark_node
17278 || (flag_zero_initialized_in_bss
17279 && initializer_zerop (DECL_INITIAL (decl))))
17282 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
17283 name = (*targetm.strip_name_encoding) (name);
17284 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
17287 /* Select section for constant in constant pool.
17289 On RS/6000, all constants are in the private read-only data area.
17290 However, if this is being placed in the TOC it must be output as a
17294 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
17295 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17297 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17300 read_only_private_data_section ();
17303 /* Remove any trailing [DS] or the like from the symbol name. */
17305 static const char *
17306 rs6000_xcoff_strip_name_encoding (const char *name)
17311 len = strlen (name);
17312 if (name[len - 1] == ']')
17313 return ggc_alloc_string (name, len - 4);
17318 /* Section attributes. AIX is always PIC. */
17320 static unsigned int
17321 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
17323 unsigned int align;
17324 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
17326 /* Align to at least UNIT size. */
17327 if (flags & SECTION_CODE)
17328 align = MIN_UNITS_PER_WORD;
17330 /* Increase alignment of large objects if not already stricter. */
17331 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
17332 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
17333 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
17335 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
17338 /* Output at beginning of assembler file.
17340 Initialize the section names for the RS/6000 at this point.
17342 Specify filename, including full path, to assembler.
17344 We want to go into the TOC section so at least one .toc will be emitted.
17345 Also, in order to output proper .bs/.es pairs, we need at least one static
17346 [RW] section emitted.
17348 Finally, declare mcount when profiling to make the assembler happy. */
17351 rs6000_xcoff_file_start (void)
17353 rs6000_gen_section_name (&xcoff_bss_section_name,
17354 main_input_filename, ".bss_");
17355 rs6000_gen_section_name (&xcoff_private_data_section_name,
17356 main_input_filename, ".rw_");
17357 rs6000_gen_section_name (&xcoff_read_only_section_name,
17358 main_input_filename, ".ro_");
17360 fputs ("\t.file\t", asm_out_file);
17361 output_quoted_string (asm_out_file, main_input_filename);
17362 fputc ('\n', asm_out_file);
17363 if (write_symbols != NO_DEBUG)
17364 private_data_section ();
17367 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
17368 rs6000_file_start ();
17371 /* Output at end of assembler file.
17372 On the RS/6000, referencing data should automatically pull in text. */
17375 rs6000_xcoff_file_end (void)
17378 fputs ("_section_.text:\n", asm_out_file);
17380 fputs (TARGET_32BIT
17381 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17384 #endif /* TARGET_XCOFF */
17387 /* Cross-module name binding. Darwin does not support overriding
17388 functions at dynamic-link time. */
17391 rs6000_binds_local_p (tree decl)
17393 return default_binds_local_p_1 (decl, 0);
17397 /* Compute a (partial) cost for rtx X. Return true if the complete
17398 cost has been computed, and false if subexpressions should be
17399 scanned. In either case, *TOTAL contains the cost result. */
17402 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
17404 enum machine_mode mode = GET_MODE (x);
17408 /* On the RS/6000, if it is valid in the insn, it is free. */
17410 if (((outer_code == SET
17411 || outer_code == PLUS
17412 || outer_code == MINUS)
17413 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17414 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17415 || (outer_code == AND
17416 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17417 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17418 mode == SImode ? 'L' : 'J'))
17419 || mask_operand (x, VOIDmode)))
17420 || ((outer_code == IOR || outer_code == XOR)
17421 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17422 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17423 mode == SImode ? 'L' : 'J'))))
17424 || outer_code == ASHIFT
17425 || outer_code == ASHIFTRT
17426 || outer_code == LSHIFTRT
17427 || outer_code == ROTATE
17428 || outer_code == ROTATERT
17429 || outer_code == ZERO_EXTRACT
17430 || (outer_code == MULT
17431 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17432 || ((outer_code == DIV || outer_code == UDIV
17433 || outer_code == MOD || outer_code == UMOD)
17434 && exact_log2 (INTVAL (x)) >= 0)
17435 || (outer_code == COMPARE
17436 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17437 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
17438 || (outer_code == EQ
17439 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17440 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17441 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
17442 mode == SImode ? 'L' : 'J'))))
17443 || (outer_code == GTU
17444 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17445 || (outer_code == LTU
17446 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
17451 else if ((outer_code == PLUS
17452 && reg_or_add_cint64_operand (x, VOIDmode))
17453 || (outer_code == MINUS
17454 && reg_or_sub_cint64_operand (x, VOIDmode))
17455 || ((outer_code == SET
17456 || outer_code == IOR
17457 || outer_code == XOR)
17459 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
17461 *total = COSTS_N_INSNS (1);
17468 && ((outer_code == AND
17469 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17470 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17471 || mask64_operand (x, DImode)))
17472 || ((outer_code == IOR || outer_code == XOR)
17473 && CONST_DOUBLE_HIGH (x) == 0
17474 && (CONST_DOUBLE_LOW (x)
17475 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
17480 else if (mode == DImode
17481 && (outer_code == SET
17482 || outer_code == IOR
17483 || outer_code == XOR)
17484 && CONST_DOUBLE_HIGH (x) == 0)
17486 *total = COSTS_N_INSNS (1);
17495 /* When optimizing for size, MEM should be slightly more expensive
17496 than generating address, e.g., (plus (reg) (const)).
17497 L1 cache latency is about two instructions. */
17498 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17506 if (mode == DFmode)
17508 if (GET_CODE (XEXP (x, 0)) == MULT)
17510 /* FNMA accounted in outer NEG. */
17511 if (outer_code == NEG)
17512 *total = rs6000_cost->dmul - rs6000_cost->fp;
17514 *total = rs6000_cost->dmul;
17517 *total = rs6000_cost->fp;
17519 else if (mode == SFmode)
17521 /* FNMA accounted in outer NEG. */
17522 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17525 *total = rs6000_cost->fp;
17527 else if (GET_CODE (XEXP (x, 0)) == MULT)
17529 /* The rs6000 doesn't have shift-and-add instructions. */
17530 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
17531 *total += COSTS_N_INSNS (1);
17534 *total = COSTS_N_INSNS (1);
17538 if (mode == DFmode)
17540 if (GET_CODE (XEXP (x, 0)) == MULT)
17542 /* FNMA accounted in outer NEG. */
17543 if (outer_code == NEG)
17546 *total = rs6000_cost->dmul;
17549 *total = rs6000_cost->fp;
17551 else if (mode == SFmode)
17553 /* FNMA accounted in outer NEG. */
17554 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17557 *total = rs6000_cost->fp;
17559 else if (GET_CODE (XEXP (x, 0)) == MULT)
17561 /* The rs6000 doesn't have shift-and-sub instructions. */
17562 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
17563 *total += COSTS_N_INSNS (1);
17566 *total = COSTS_N_INSNS (1);
17570 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17571 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
17573 if (INTVAL (XEXP (x, 1)) >= -256
17574 && INTVAL (XEXP (x, 1)) <= 255)
17575 *total = rs6000_cost->mulsi_const9;
17577 *total = rs6000_cost->mulsi_const;
17579 /* FMA accounted in outer PLUS/MINUS. */
17580 else if ((mode == DFmode || mode == SFmode)
17581 && (outer_code == PLUS || outer_code == MINUS))
17583 else if (mode == DFmode)
17584 *total = rs6000_cost->dmul;
17585 else if (mode == SFmode)
17586 *total = rs6000_cost->fp;
17587 else if (mode == DImode)
17588 *total = rs6000_cost->muldi;
17590 *total = rs6000_cost->mulsi;
17595 if (FLOAT_MODE_P (mode))
17597 *total = mode == DFmode ? rs6000_cost->ddiv
17598 : rs6000_cost->sdiv;
17605 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17606 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
17608 if (code == DIV || code == MOD)
17610 *total = COSTS_N_INSNS (2);
17613 *total = COSTS_N_INSNS (1);
17617 if (GET_MODE (XEXP (x, 1)) == DImode)
17618 *total = rs6000_cost->divdi;
17620 *total = rs6000_cost->divsi;
17622 /* Add in shift and subtract for MOD. */
17623 if (code == MOD || code == UMOD)
17624 *total += COSTS_N_INSNS (2);
17628 *total = COSTS_N_INSNS (4);
17632 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
17643 *total = COSTS_N_INSNS (1);
17651 /* Handle mul_highpart. */
17652 if (outer_code == TRUNCATE
17653 && GET_CODE (XEXP (x, 0)) == MULT)
17655 if (mode == DImode)
17656 *total = rs6000_cost->muldi;
17658 *total = rs6000_cost->mulsi;
17661 else if (outer_code == AND)
17664 *total = COSTS_N_INSNS (1);
17669 if (GET_CODE (XEXP (x, 0)) == MEM)
17672 *total = COSTS_N_INSNS (1);
17678 if (!FLOAT_MODE_P (mode))
17680 *total = COSTS_N_INSNS (1);
17686 case UNSIGNED_FLOAT:
17690 case FLOAT_TRUNCATE:
17691 *total = rs6000_cost->fp;
17695 switch (XINT (x, 1))
17698 *total = rs6000_cost->fp;
17710 *total = COSTS_N_INSNS (1);
17713 else if (FLOAT_MODE_P (mode)
17714 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
17716 *total = rs6000_cost->fp;
17724 /* Carry bit requires mode == Pmode.
17725 NEG or PLUS already counted so only add one. */
17727 && (outer_code == NEG || outer_code == PLUS))
17729 *total = COSTS_N_INSNS (1);
17732 if (outer_code == SET)
17734 if (XEXP (x, 1) == const0_rtx)
17736 *total = COSTS_N_INSNS (2);
17739 else if (mode == Pmode)
17741 *total = COSTS_N_INSNS (3);
17750 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
17752 *total = COSTS_N_INSNS (2);
17756 if (outer_code == COMPARE)
17770 /* A C expression returning the cost of moving data from a register of class
17771 CLASS1 to one of CLASS2. */
17774 rs6000_register_move_cost (enum machine_mode mode,
17775 enum reg_class from, enum reg_class to)
17777 /* Moves from/to GENERAL_REGS. */
17778 if (reg_classes_intersect_p (to, GENERAL_REGS)
17779 || reg_classes_intersect_p (from, GENERAL_REGS))
17781 if (! reg_classes_intersect_p (to, GENERAL_REGS))
17784 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
17785 return (rs6000_memory_move_cost (mode, from, 0)
17786 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
17788 /* It's more expensive to move CR_REGS than CR0_REGS because of the
17790 else if (from == CR_REGS)
17794 /* A move will cost one instruction per GPR moved. */
17795 return 2 * HARD_REGNO_NREGS (0, mode);
17798 /* Moving between two similar registers is just one instruction. */
17799 else if (reg_classes_intersect_p (to, from))
17800 return mode == TFmode ? 4 : 2;
17802 /* Everything else has to go through GENERAL_REGS. */
17804 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
17805 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
17808 /* A C expressions returning the cost of moving data of MODE from a register to
17812 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
17813 int in ATTRIBUTE_UNUSED)
17815 if (reg_classes_intersect_p (class, GENERAL_REGS))
17816 return 4 * HARD_REGNO_NREGS (0, mode);
17817 else if (reg_classes_intersect_p (class, FLOAT_REGS))
17818 return 4 * HARD_REGNO_NREGS (32, mode);
17819 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
17820 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
17822 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
17825 /* Return an RTX representing where to find the function value of a
17826 function returning MODE. */
17828 rs6000_complex_function_value (enum machine_mode mode)
17830 unsigned int regno;
17832 enum machine_mode inner = GET_MODE_INNER (mode);
17833 unsigned int inner_bytes = GET_MODE_SIZE (inner);
17835 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
17836 regno = FP_ARG_RETURN;
17839 regno = GP_ARG_RETURN;
17841 /* 32-bit is OK since it'll go in r3/r4. */
17842 if (TARGET_32BIT && inner_bytes >= 4)
17843 return gen_rtx_REG (mode, regno);
17846 if (inner_bytes >= 8)
17847 return gen_rtx_REG (mode, regno);
17849 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
17851 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
17852 GEN_INT (inner_bytes));
17853 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
17856 /* Define how to find the value returned by a function.
17857 VALTYPE is the data type of the value (as a tree).
17858 If the precise function being called is known, FUNC is its FUNCTION_DECL;
17859 otherwise, FUNC is 0.
17861 On the SPE, both FPs and vectors are returned in r3.
17863 On RS/6000 an integer value is in r3 and a floating-point value is in
17864 fp1, unless -msoft-float. */
17867 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
17869 enum machine_mode mode;
17870 unsigned int regno;
17872 /* Special handling for structs in darwin64. */
17873 if (rs6000_darwin64_abi
17874 && TYPE_MODE (valtype) == BLKmode
17875 && TREE_CODE (valtype) == RECORD_TYPE
17876 && int_size_in_bytes (valtype) > 0)
17878 CUMULATIVE_ARGS valcum;
17882 valcum.fregno = FP_ARG_MIN_REG;
17883 valcum.vregno = ALTIVEC_ARG_MIN_REG;
17884 /* Do a trial code generation as if this were going to be passed as
17885 an argument; if any part goes in memory, we return NULL. */
17886 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
17889 /* Otherwise fall through to standard ABI rules. */
17892 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
17894 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17895 return gen_rtx_PARALLEL (DImode,
17897 gen_rtx_EXPR_LIST (VOIDmode,
17898 gen_rtx_REG (SImode, GP_ARG_RETURN),
17900 gen_rtx_EXPR_LIST (VOIDmode,
17901 gen_rtx_REG (SImode,
17902 GP_ARG_RETURN + 1),
17906 if ((INTEGRAL_TYPE_P (valtype)
17907 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
17908 || POINTER_TYPE_P (valtype))
17909 mode = TARGET_32BIT ? SImode : DImode;
17911 mode = TYPE_MODE (valtype);
17913 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
17914 regno = FP_ARG_RETURN;
17915 else if (TREE_CODE (valtype) == COMPLEX_TYPE
17916 && targetm.calls.split_complex_arg)
17917 return rs6000_complex_function_value (mode);
17918 else if (TREE_CODE (valtype) == VECTOR_TYPE
17919 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
17920 && ALTIVEC_VECTOR_MODE (mode))
17921 regno = ALTIVEC_ARG_RETURN;
17922 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17923 && (mode == DFmode || mode == DCmode))
17924 return spe_build_register_parallel (mode, GP_ARG_RETURN);
17926 regno = GP_ARG_RETURN;
17928 return gen_rtx_REG (mode, regno);
17931 /* Define how to find the value returned by a library function
17932 assuming the value has mode MODE. */
17934 rs6000_libcall_value (enum machine_mode mode)
17936 unsigned int regno;
17938 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
17940 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17941 return gen_rtx_PARALLEL (DImode,
17943 gen_rtx_EXPR_LIST (VOIDmode,
17944 gen_rtx_REG (SImode, GP_ARG_RETURN),
17946 gen_rtx_EXPR_LIST (VOIDmode,
17947 gen_rtx_REG (SImode,
17948 GP_ARG_RETURN + 1),
17952 if (GET_MODE_CLASS (mode) == MODE_FLOAT
17953 && TARGET_HARD_FLOAT && TARGET_FPRS)
17954 regno = FP_ARG_RETURN;
17955 else if (ALTIVEC_VECTOR_MODE (mode)
17956 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
17957 regno = ALTIVEC_ARG_RETURN;
17958 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
17959 return rs6000_complex_function_value (mode);
17960 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17961 && (mode == DFmode || mode == DCmode))
17962 return spe_build_register_parallel (mode, GP_ARG_RETURN);
17964 regno = GP_ARG_RETURN;
17966 return gen_rtx_REG (mode, regno);
17969 /* Define the offset between two registers, FROM to be eliminated and its
17970 replacement TO, at the start of a routine. */
17972 rs6000_initial_elimination_offset (int from, int to)
17974 rs6000_stack_t *info = rs6000_stack_info ();
17975 HOST_WIDE_INT offset;
17977 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17978 offset = info->push_p ? 0 : -info->total_size;
17979 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
17980 offset = info->total_size;
17981 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17982 offset = info->push_p ? info->total_size : 0;
17983 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
17986 gcc_unreachable ();
17991 /* Return true if TYPE is a SPE or AltiVec opaque type. */
17994 rs6000_is_opaque_type (tree type)
17996 return (type == opaque_V2SI_type_node
17997 || type == opaque_V2SF_type_node
17998 || type == opaque_p_V2SI_type_node
17999 || type == opaque_V4SI_type_node);
18003 rs6000_dwarf_register_span (rtx reg)
18008 && (SPE_VECTOR_MODE (GET_MODE (reg))
18009 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18014 regno = REGNO (reg);
18016 /* The duality of the SPE register size wreaks all kinds of havoc.
18017 This is a way of distinguishing r0 in 32-bits from r0 in
18020 gen_rtx_PARALLEL (VOIDmode,
18023 gen_rtx_REG (SImode, regno + 1200),
18024 gen_rtx_REG (SImode, regno))
18026 gen_rtx_REG (SImode, regno),
18027 gen_rtx_REG (SImode, regno + 1200)));
18030 /* Map internal gcc register numbers to DWARF2 register numbers. */
18033 rs6000_dbx_register_number (unsigned int regno)
18035 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18037 if (regno == MQ_REGNO)
18039 if (regno == LINK_REGISTER_REGNUM)
18041 if (regno == COUNT_REGISTER_REGNUM)
18043 if (CR_REGNO_P (regno))
18044 return regno - CR0_REGNO + 86;
18045 if (regno == XER_REGNO)
18047 if (ALTIVEC_REGNO_P (regno))
18048 return regno - FIRST_ALTIVEC_REGNO + 1124;
18049 if (regno == VRSAVE_REGNO)
18051 if (regno == VSCR_REGNO)
18053 if (regno == SPE_ACC_REGNO)
18055 if (regno == SPEFSCR_REGNO)
18057 /* SPE high reg number. We get these values of regno from
18058 rs6000_dwarf_register_span. */
18059 gcc_assert (regno >= 1200 && regno < 1232);
18063 /* target hook eh_return_filter_mode */
18064 static enum machine_mode
18065 rs6000_eh_return_filter_mode (void)
18067 return TARGET_32BIT ? SImode : word_mode;
18070 /* Target hook for vector_mode_supported_p. */
18072 rs6000_vector_mode_supported_p (enum machine_mode mode)
18075 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
18078 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
18085 /* Target hook for invalid_arg_for_unprototyped_fn. */
18086 static const char *
18087 invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
18089 return (!rs6000_darwin64_abi
18091 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
18092 && (funcdecl == NULL_TREE
18093 || (TREE_CODE (funcdecl) == FUNCTION_DECL
18094 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
18095 ? N_("AltiVec argument passed to unprototyped function")
18099 #include "gt-rs6000.h"