1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 enum rs6000_abi abi; /* which ABI to use */
83 int gp_save_offset; /* offset to save GP regs from initial SP */
84 int fp_save_offset; /* offset to save FP regs from initial SP */
85 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
86 int lr_save_offset; /* offset to save LR from initial SP */
87 int cr_save_offset; /* offset to save CR from initial SP */
88 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
89 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
90 int toc_save_offset; /* offset to save the TOC pointer */
91 int varargs_save_offset; /* offset to save the varargs registers */
92 int ehrd_offset; /* offset to EH return data */
93 int reg_size; /* register size (4 or 8) */
94 int varargs_size; /* size to hold V.4 args passed in regs */
95 HOST_WIDE_INT vars_size; /* variable save area size */
96 int parm_size; /* outgoing parameter size */
97 int save_size; /* save area size */
98 int fixed_size; /* fixed size of stack frame */
99 int gp_size; /* size of saved GP registers */
100 int fp_size; /* size of saved FP registers */
101 int altivec_size; /* size of saved AltiVec registers */
102 int cr_size; /* size to hold CR if not in save_size */
103 int lr_size; /* size to hold LR if not in save_size */
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
109 int toc_size; /* size to hold TOC if not in save_size */
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
114 /* Target cpu type */
116 enum processor_type rs6000_cpu;
117 struct rs6000_cpu_select rs6000_select[3] =
119 /* switch name, tune arch */
120 { (const char *)0, "--with-cpu=", 1, 1 },
121 { (const char *)0, "-mcpu=", 1, 1 },
122 { (const char *)0, "-mtune=", 1, 0 },
125 /* Always emit branch hint bits. */
126 static GTY(()) bool rs6000_always_hint;
128 /* Schedule instructions for group formation. */
129 static GTY(()) bool rs6000_sched_groups;
131 /* Support adjust_priority scheduler hook
132 and -mprioritize-restricted-insns= option. */
133 const char *rs6000_sched_restricted_insns_priority_str;
134 int rs6000_sched_restricted_insns_priority;
136 /* Support for -msched-costly-dep option. */
137 const char *rs6000_sched_costly_dep_str;
138 enum rs6000_dependence_cost rs6000_sched_costly_dep;
140 /* Support for -minsert-sched-nops option. */
141 const char *rs6000_sched_insert_nops_str;
142 enum rs6000_nop_insertion rs6000_sched_insert_nops;
144 /* Size of long double */
145 const char *rs6000_long_double_size_string;
146 int rs6000_long_double_type_size;
148 /* Whether -mabi=altivec has appeared */
149 int rs6000_altivec_abi;
151 /* Whether VRSAVE instructions should be generated. */
152 int rs6000_altivec_vrsave;
154 /* String from -mvrsave= option. */
155 const char *rs6000_altivec_vrsave_string;
157 /* Nonzero if we want SPE ABI extensions. */
160 /* Whether isel instructions should be generated. */
163 /* Whether SPE simd instructions should be generated. */
166 /* Nonzero if floating point operations are done in the GPRs. */
167 int rs6000_float_gprs = 0;
169 /* String from -mfloat-gprs=. */
170 const char *rs6000_float_gprs_string;
172 /* String from -misel=. */
173 const char *rs6000_isel_string;
175 /* String from -mspe=. */
176 const char *rs6000_spe_string;
178 /* Set to nonzero once AIX common-mode calls have been defined. */
179 static GTY(()) int common_mode_defined;
181 /* Save information from a "cmpxx" operation until the branch or scc is
183 rtx rs6000_compare_op0, rs6000_compare_op1;
184 int rs6000_compare_fp_p;
186 /* Label number of label created for -mrelocatable, to call to so we can
187 get the address of the GOT section */
188 int rs6000_pic_labelno;
191 /* Which abi to adhere to */
192 const char *rs6000_abi_name;
194 /* Semantics of the small data area */
195 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
197 /* Which small data model to use */
198 const char *rs6000_sdata_name = (char *)0;
200 /* Counter for labels which are to be placed in .fixup. */
201 int fixuplabelno = 0;
204 /* Bit size of immediate TLS offsets and string from which it is decoded. */
205 int rs6000_tls_size = 32;
206 const char *rs6000_tls_size_string;
208 /* ABI enumeration available for subtarget to use. */
209 enum rs6000_abi rs6000_current_abi;
211 /* ABI string from -mabi= option. */
212 const char *rs6000_abi_string;
215 const char *rs6000_debug_name;
216 int rs6000_debug_stack; /* debug stack applications */
217 int rs6000_debug_arg; /* debug argument handling */
219 /* Value is TRUE if register/mode pair is accepatable. */
220 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
223 static GTY(()) tree opaque_V2SI_type_node;
224 static GTY(()) tree opaque_V2SF_type_node;
225 static GTY(()) tree opaque_p_V2SI_type_node;
226 static GTY(()) tree V16QI_type_node;
227 static GTY(()) tree V2SI_type_node;
228 static GTY(()) tree V2SF_type_node;
229 static GTY(()) tree V4HI_type_node;
230 static GTY(()) tree V4SI_type_node;
231 static GTY(()) tree V4SF_type_node;
232 static GTY(()) tree V8HI_type_node;
233 static GTY(()) tree unsigned_V16QI_type_node;
234 static GTY(()) tree unsigned_V8HI_type_node;
235 static GTY(()) tree unsigned_V4SI_type_node;
236 static GTY(()) tree bool_char_type_node; /* __bool char */
237 static GTY(()) tree bool_short_type_node; /* __bool short */
238 static GTY(()) tree bool_int_type_node; /* __bool int */
239 static GTY(()) tree pixel_type_node; /* __pixel */
240 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
241 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
242 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
243 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
245 int rs6000_warn_altivec_long = 1; /* On by default. */
246 const char *rs6000_warn_altivec_long_switch;
248 const char *rs6000_traceback_name;
250 traceback_default = 0,
256 /* Flag to say the TOC is initialized */
258 char toc_label_name[10];
260 /* Alias set for saves and restores from the rs6000 stack. */
261 static GTY(()) int rs6000_sr_alias_set;
263 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
264 The only place that looks at this is rs6000_set_default_type_attributes;
265 everywhere else should rely on the presence or absence of a longcall
266 attribute on the function declaration. */
267 int rs6000_default_long_calls;
268 const char *rs6000_longcall_switch;
270 /* Control alignment for fields within structures. */
271 /* String from -malign-XXXXX. */
272 const char *rs6000_alignment_string;
273 int rs6000_alignment_flags;
275 struct builtin_description
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
286 /* Target cpu costs. */
288 struct processor_costs {
289 const int mulsi; /* cost of SImode multiplication. */
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
301 const struct processor_costs *rs6000_cost;
303 /* Processor costs (relative to an add) */
305 /* Instruction size costs on 32bit processors. */
307 struct processor_costs size32_cost = {
308 COSTS_N_INSNS (1), /* mulsi */
309 COSTS_N_INSNS (1), /* mulsi_const */
310 COSTS_N_INSNS (1), /* mulsi_const9 */
311 COSTS_N_INSNS (1), /* muldi */
312 COSTS_N_INSNS (1), /* divsi */
313 COSTS_N_INSNS (1), /* divdi */
314 COSTS_N_INSNS (1), /* fp */
315 COSTS_N_INSNS (1), /* dmul */
316 COSTS_N_INSNS (1), /* sdiv */
317 COSTS_N_INSNS (1), /* ddiv */
320 /* Instruction size costs on 64bit processors. */
322 struct processor_costs size64_cost = {
323 COSTS_N_INSNS (1), /* mulsi */
324 COSTS_N_INSNS (1), /* mulsi_const */
325 COSTS_N_INSNS (1), /* mulsi_const9 */
326 COSTS_N_INSNS (1), /* muldi */
327 COSTS_N_INSNS (1), /* divsi */
328 COSTS_N_INSNS (1), /* divdi */
329 COSTS_N_INSNS (1), /* fp */
330 COSTS_N_INSNS (1), /* dmul */
331 COSTS_N_INSNS (1), /* sdiv */
332 COSTS_N_INSNS (1), /* ddiv */
335 /* Instruction costs on RIOS1 processors. */
337 struct processor_costs rios1_cost = {
338 COSTS_N_INSNS (5), /* mulsi */
339 COSTS_N_INSNS (4), /* mulsi_const */
340 COSTS_N_INSNS (3), /* mulsi_const9 */
341 COSTS_N_INSNS (5), /* muldi */
342 COSTS_N_INSNS (19), /* divsi */
343 COSTS_N_INSNS (19), /* divdi */
344 COSTS_N_INSNS (2), /* fp */
345 COSTS_N_INSNS (2), /* dmul */
346 COSTS_N_INSNS (19), /* sdiv */
347 COSTS_N_INSNS (19), /* ddiv */
350 /* Instruction costs on RIOS2 processors. */
352 struct processor_costs rios2_cost = {
353 COSTS_N_INSNS (2), /* mulsi */
354 COSTS_N_INSNS (2), /* mulsi_const */
355 COSTS_N_INSNS (2), /* mulsi_const9 */
356 COSTS_N_INSNS (2), /* muldi */
357 COSTS_N_INSNS (13), /* divsi */
358 COSTS_N_INSNS (13), /* divdi */
359 COSTS_N_INSNS (2), /* fp */
360 COSTS_N_INSNS (2), /* dmul */
361 COSTS_N_INSNS (17), /* sdiv */
362 COSTS_N_INSNS (17), /* ddiv */
365 /* Instruction costs on RS64A processors. */
367 struct processor_costs rs64a_cost = {
368 COSTS_N_INSNS (20), /* mulsi */
369 COSTS_N_INSNS (12), /* mulsi_const */
370 COSTS_N_INSNS (8), /* mulsi_const9 */
371 COSTS_N_INSNS (34), /* muldi */
372 COSTS_N_INSNS (65), /* divsi */
373 COSTS_N_INSNS (67), /* divdi */
374 COSTS_N_INSNS (4), /* fp */
375 COSTS_N_INSNS (4), /* dmul */
376 COSTS_N_INSNS (31), /* sdiv */
377 COSTS_N_INSNS (31), /* ddiv */
380 /* Instruction costs on MPCCORE processors. */
382 struct processor_costs mpccore_cost = {
383 COSTS_N_INSNS (2), /* mulsi */
384 COSTS_N_INSNS (2), /* mulsi_const */
385 COSTS_N_INSNS (2), /* mulsi_const9 */
386 COSTS_N_INSNS (2), /* muldi */
387 COSTS_N_INSNS (6), /* divsi */
388 COSTS_N_INSNS (6), /* divdi */
389 COSTS_N_INSNS (4), /* fp */
390 COSTS_N_INSNS (5), /* dmul */
391 COSTS_N_INSNS (10), /* sdiv */
392 COSTS_N_INSNS (17), /* ddiv */
395 /* Instruction costs on PPC403 processors. */
397 struct processor_costs ppc403_cost = {
398 COSTS_N_INSNS (4), /* mulsi */
399 COSTS_N_INSNS (4), /* mulsi_const */
400 COSTS_N_INSNS (4), /* mulsi_const9 */
401 COSTS_N_INSNS (4), /* muldi */
402 COSTS_N_INSNS (33), /* divsi */
403 COSTS_N_INSNS (33), /* divdi */
404 COSTS_N_INSNS (11), /* fp */
405 COSTS_N_INSNS (11), /* dmul */
406 COSTS_N_INSNS (11), /* sdiv */
407 COSTS_N_INSNS (11), /* ddiv */
410 /* Instruction costs on PPC405 processors. */
412 struct processor_costs ppc405_cost = {
413 COSTS_N_INSNS (5), /* mulsi */
414 COSTS_N_INSNS (4), /* mulsi_const */
415 COSTS_N_INSNS (3), /* mulsi_const9 */
416 COSTS_N_INSNS (5), /* muldi */
417 COSTS_N_INSNS (35), /* divsi */
418 COSTS_N_INSNS (35), /* divdi */
419 COSTS_N_INSNS (11), /* fp */
420 COSTS_N_INSNS (11), /* dmul */
421 COSTS_N_INSNS (11), /* sdiv */
422 COSTS_N_INSNS (11), /* ddiv */
425 /* Instruction costs on PPC440 processors. */
427 struct processor_costs ppc440_cost = {
428 COSTS_N_INSNS (3), /* mulsi */
429 COSTS_N_INSNS (2), /* mulsi_const */
430 COSTS_N_INSNS (2), /* mulsi_const9 */
431 COSTS_N_INSNS (3), /* muldi */
432 COSTS_N_INSNS (34), /* divsi */
433 COSTS_N_INSNS (34), /* divdi */
434 COSTS_N_INSNS (5), /* fp */
435 COSTS_N_INSNS (5), /* dmul */
436 COSTS_N_INSNS (19), /* sdiv */
437 COSTS_N_INSNS (33), /* ddiv */
440 /* Instruction costs on PPC601 processors. */
442 struct processor_costs ppc601_cost = {
443 COSTS_N_INSNS (5), /* mulsi */
444 COSTS_N_INSNS (5), /* mulsi_const */
445 COSTS_N_INSNS (5), /* mulsi_const9 */
446 COSTS_N_INSNS (5), /* muldi */
447 COSTS_N_INSNS (36), /* divsi */
448 COSTS_N_INSNS (36), /* divdi */
449 COSTS_N_INSNS (4), /* fp */
450 COSTS_N_INSNS (5), /* dmul */
451 COSTS_N_INSNS (17), /* sdiv */
452 COSTS_N_INSNS (31), /* ddiv */
455 /* Instruction costs on PPC603 processors. */
457 struct processor_costs ppc603_cost = {
458 COSTS_N_INSNS (5), /* mulsi */
459 COSTS_N_INSNS (3), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (5), /* muldi */
462 COSTS_N_INSNS (37), /* divsi */
463 COSTS_N_INSNS (37), /* divdi */
464 COSTS_N_INSNS (3), /* fp */
465 COSTS_N_INSNS (4), /* dmul */
466 COSTS_N_INSNS (18), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
470 /* Instruction costs on PPC604 processors. */
472 struct processor_costs ppc604_cost = {
473 COSTS_N_INSNS (4), /* mulsi */
474 COSTS_N_INSNS (4), /* mulsi_const */
475 COSTS_N_INSNS (4), /* mulsi_const9 */
476 COSTS_N_INSNS (4), /* muldi */
477 COSTS_N_INSNS (20), /* divsi */
478 COSTS_N_INSNS (20), /* divdi */
479 COSTS_N_INSNS (3), /* fp */
480 COSTS_N_INSNS (3), /* dmul */
481 COSTS_N_INSNS (18), /* sdiv */
482 COSTS_N_INSNS (32), /* ddiv */
485 /* Instruction costs on PPC604e processors. */
487 struct processor_costs ppc604e_cost = {
488 COSTS_N_INSNS (2), /* mulsi */
489 COSTS_N_INSNS (2), /* mulsi_const */
490 COSTS_N_INSNS (2), /* mulsi_const9 */
491 COSTS_N_INSNS (2), /* muldi */
492 COSTS_N_INSNS (20), /* divsi */
493 COSTS_N_INSNS (20), /* divdi */
494 COSTS_N_INSNS (3), /* fp */
495 COSTS_N_INSNS (3), /* dmul */
496 COSTS_N_INSNS (18), /* sdiv */
497 COSTS_N_INSNS (32), /* ddiv */
500 /* Instruction costs on PPC620 processors. */
502 struct processor_costs ppc620_cost = {
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (3), /* mulsi_const9 */
506 COSTS_N_INSNS (7), /* muldi */
507 COSTS_N_INSNS (21), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (32), /* ddiv */
515 /* Instruction costs on PPC630 processors. */
517 struct processor_costs ppc630_cost = {
518 COSTS_N_INSNS (5), /* mulsi */
519 COSTS_N_INSNS (4), /* mulsi_const */
520 COSTS_N_INSNS (3), /* mulsi_const9 */
521 COSTS_N_INSNS (7), /* muldi */
522 COSTS_N_INSNS (21), /* divsi */
523 COSTS_N_INSNS (37), /* divdi */
524 COSTS_N_INSNS (3), /* fp */
525 COSTS_N_INSNS (3), /* dmul */
526 COSTS_N_INSNS (17), /* sdiv */
527 COSTS_N_INSNS (21), /* ddiv */
530 /* Instruction costs on PPC750 and PPC7400 processors. */
532 struct processor_costs ppc750_cost = {
533 COSTS_N_INSNS (5), /* mulsi */
534 COSTS_N_INSNS (3), /* mulsi_const */
535 COSTS_N_INSNS (2), /* mulsi_const9 */
536 COSTS_N_INSNS (5), /* muldi */
537 COSTS_N_INSNS (17), /* divsi */
538 COSTS_N_INSNS (17), /* divdi */
539 COSTS_N_INSNS (3), /* fp */
540 COSTS_N_INSNS (3), /* dmul */
541 COSTS_N_INSNS (17), /* sdiv */
542 COSTS_N_INSNS (31), /* ddiv */
545 /* Instruction costs on PPC7450 processors. */
547 struct processor_costs ppc7450_cost = {
548 COSTS_N_INSNS (4), /* mulsi */
549 COSTS_N_INSNS (3), /* mulsi_const */
550 COSTS_N_INSNS (3), /* mulsi_const9 */
551 COSTS_N_INSNS (4), /* muldi */
552 COSTS_N_INSNS (23), /* divsi */
553 COSTS_N_INSNS (23), /* divdi */
554 COSTS_N_INSNS (5), /* fp */
555 COSTS_N_INSNS (5), /* dmul */
556 COSTS_N_INSNS (21), /* sdiv */
557 COSTS_N_INSNS (35), /* ddiv */
560 /* Instruction costs on PPC8540 processors. */
562 struct processor_costs ppc8540_cost = {
563 COSTS_N_INSNS (4), /* mulsi */
564 COSTS_N_INSNS (4), /* mulsi_const */
565 COSTS_N_INSNS (4), /* mulsi_const9 */
566 COSTS_N_INSNS (4), /* muldi */
567 COSTS_N_INSNS (19), /* divsi */
568 COSTS_N_INSNS (19), /* divdi */
569 COSTS_N_INSNS (4), /* fp */
570 COSTS_N_INSNS (4), /* dmul */
571 COSTS_N_INSNS (29), /* sdiv */
572 COSTS_N_INSNS (29), /* ddiv */
575 /* Instruction costs on POWER4 and POWER5 processors. */
577 struct processor_costs power4_cost = {
578 COSTS_N_INSNS (3), /* mulsi */
579 COSTS_N_INSNS (2), /* mulsi_const */
580 COSTS_N_INSNS (2), /* mulsi_const9 */
581 COSTS_N_INSNS (4), /* muldi */
582 COSTS_N_INSNS (18), /* divsi */
583 COSTS_N_INSNS (34), /* divdi */
584 COSTS_N_INSNS (3), /* fp */
585 COSTS_N_INSNS (3), /* dmul */
586 COSTS_N_INSNS (17), /* sdiv */
587 COSTS_N_INSNS (17), /* ddiv */
591 static bool rs6000_function_ok_for_sibcall (tree, tree);
592 static int num_insns_constant_wide (HOST_WIDE_INT);
593 static void validate_condition_mode (enum rtx_code, enum machine_mode);
594 static rtx rs6000_generate_compare (enum rtx_code);
595 static void rs6000_maybe_dead (rtx);
596 static void rs6000_emit_stack_tie (void);
597 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
598 static rtx spe_synthesize_frame_save (rtx);
599 static bool spe_func_has_64bit_regs_p (void);
600 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
602 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
603 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
604 static unsigned rs6000_hash_constant (rtx);
605 static unsigned toc_hash_function (const void *);
606 static int toc_hash_eq (const void *, const void *);
607 static int constant_pool_expr_1 (rtx, int *, int *);
608 static bool constant_pool_expr_p (rtx);
609 static bool toc_relative_expr_p (rtx);
610 static bool legitimate_small_data_p (enum machine_mode, rtx);
611 static bool legitimate_indexed_address_p (rtx, int);
612 static bool legitimate_indirect_address_p (rtx, int);
613 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
614 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
615 static struct machine_function * rs6000_init_machine_status (void);
616 static bool rs6000_assemble_integer (rtx, unsigned int, int);
617 #ifdef HAVE_GAS_HIDDEN
618 static void rs6000_assemble_visibility (tree, int);
620 static int rs6000_ra_ever_killed (void);
621 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
622 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
623 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
624 static const char *rs6000_mangle_fundamental_type (tree);
625 extern const struct attribute_spec rs6000_attribute_table[];
626 static void rs6000_set_default_type_attributes (tree);
627 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
628 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
629 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
631 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
632 static bool rs6000_return_in_memory (tree, tree);
633 static void rs6000_file_start (void);
635 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
636 static void rs6000_elf_asm_out_constructor (rtx, int);
637 static void rs6000_elf_asm_out_destructor (rtx, int);
638 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
639 static void rs6000_elf_unique_section (tree, int);
640 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
641 unsigned HOST_WIDE_INT);
642 static void rs6000_elf_encode_section_info (tree, rtx, int)
644 static bool rs6000_elf_in_small_data_p (tree);
647 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
648 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
649 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
650 static void rs6000_xcoff_unique_section (tree, int);
651 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
652 unsigned HOST_WIDE_INT);
653 static const char * rs6000_xcoff_strip_name_encoding (const char *);
654 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
655 static void rs6000_xcoff_file_start (void);
656 static void rs6000_xcoff_file_end (void);
659 static bool rs6000_binds_local_p (tree);
661 static int rs6000_variable_issue (FILE *, int, rtx, int);
662 static bool rs6000_rtx_costs (rtx, int, int, int *);
663 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
664 static bool is_microcoded_insn (rtx);
665 static int is_dispatch_slot_restricted (rtx);
666 static bool is_cracked_insn (rtx);
667 static bool is_branch_slot_insn (rtx);
668 static int rs6000_adjust_priority (rtx, int);
669 static int rs6000_issue_rate (void);
670 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
671 static rtx get_next_active_insn (rtx, rtx);
672 static bool insn_terminates_group_p (rtx , enum group_termination);
673 static bool is_costly_group (rtx *, rtx);
674 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
675 static int redefine_groups (FILE *, int, rtx, rtx);
676 static int pad_groups (FILE *, int, rtx, rtx);
677 static void rs6000_sched_finish (FILE *, int);
678 static int rs6000_use_sched_lookahead (void);
680 static void rs6000_init_builtins (void);
681 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
682 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
683 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
684 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
685 static void altivec_init_builtins (void);
686 static void rs6000_common_init_builtins (void);
687 static void rs6000_init_libfuncs (void);
689 static void enable_mask_for_builtins (struct builtin_description *, int,
690 enum rs6000_builtins,
691 enum rs6000_builtins);
692 static tree build_opaque_vector_type (tree, int);
693 static void spe_init_builtins (void);
694 static rtx spe_expand_builtin (tree, rtx, bool *);
695 static rtx spe_expand_stv_builtin (enum insn_code, tree);
696 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
697 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
698 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
699 static rs6000_stack_t *rs6000_stack_info (void);
700 static void debug_stack_info (rs6000_stack_t *);
702 static rtx altivec_expand_builtin (tree, rtx, bool *);
703 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
704 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
705 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
706 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
707 static rtx altivec_expand_predicate_builtin (enum insn_code,
708 const char *, tree, rtx);
709 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
710 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
711 static void rs6000_parse_abi_options (void);
712 static void rs6000_parse_alignment_option (void);
713 static void rs6000_parse_tls_size_option (void);
714 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
715 static int first_altivec_reg_to_save (void);
716 static unsigned int compute_vrsave_mask (void);
717 static void is_altivec_return_reg (rtx, void *);
718 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
719 int easy_vector_constant (rtx, enum machine_mode);
720 static int easy_vector_same (rtx, enum machine_mode);
721 static int easy_vector_splat_const (int, enum machine_mode);
722 static bool is_ev64_opaque_type (tree);
723 static rtx rs6000_dwarf_register_span (rtx);
724 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
725 static rtx rs6000_tls_get_addr (void);
726 static rtx rs6000_got_sym (void);
727 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
728 static const char *rs6000_get_some_local_dynamic_name (void);
729 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
730 static rtx rs6000_complex_function_value (enum machine_mode);
731 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
732 enum machine_mode, tree);
733 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
734 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
735 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
736 enum machine_mode, tree,
738 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
741 static void macho_branch_islands (void);
742 static void add_compiler_branch_island (tree, tree, int);
743 static int no_previous_def (tree function_name);
744 static tree get_prev_label (tree function_name);
747 static tree rs6000_build_builtin_va_list (void);
748 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
749 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
751 /* Hash table stuff for keeping track of TOC entries. */
753 struct toc_hash_struct GTY(())
755 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
756 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
758 enum machine_mode key_mode;
762 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
764 /* Default register names. */
765 char rs6000_reg_names[][8] =
767 "0", "1", "2", "3", "4", "5", "6", "7",
768 "8", "9", "10", "11", "12", "13", "14", "15",
769 "16", "17", "18", "19", "20", "21", "22", "23",
770 "24", "25", "26", "27", "28", "29", "30", "31",
771 "0", "1", "2", "3", "4", "5", "6", "7",
772 "8", "9", "10", "11", "12", "13", "14", "15",
773 "16", "17", "18", "19", "20", "21", "22", "23",
774 "24", "25", "26", "27", "28", "29", "30", "31",
775 "mq", "lr", "ctr","ap",
776 "0", "1", "2", "3", "4", "5", "6", "7",
778 /* AltiVec registers. */
779 "0", "1", "2", "3", "4", "5", "6", "7",
780 "8", "9", "10", "11", "12", "13", "14", "15",
781 "16", "17", "18", "19", "20", "21", "22", "23",
782 "24", "25", "26", "27", "28", "29", "30", "31",
788 #ifdef TARGET_REGNAMES
789 static const char alt_reg_names[][8] =
791 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
792 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
793 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
794 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
795 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
796 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
797 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
798 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
799 "mq", "lr", "ctr", "ap",
800 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
802 /* AltiVec registers. */
803 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
804 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
805 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
806 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
813 #ifndef MASK_STRICT_ALIGN
814 #define MASK_STRICT_ALIGN 0
816 #ifndef TARGET_PROFILE_KERNEL
817 #define TARGET_PROFILE_KERNEL 0
820 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
821 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
823 /* Return 1 for a symbol ref for a thread-local storage symbol. */
824 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
825 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
827 /* Initialize the GCC target structure. */
828 #undef TARGET_ATTRIBUTE_TABLE
829 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
830 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
831 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
833 #undef TARGET_ASM_ALIGNED_DI_OP
834 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
836 /* Default unaligned ops are only provided for ELF. Find the ops needed
837 for non-ELF systems. */
838 #ifndef OBJECT_FORMAT_ELF
840 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
842 #undef TARGET_ASM_UNALIGNED_HI_OP
843 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
844 #undef TARGET_ASM_UNALIGNED_SI_OP
845 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
846 #undef TARGET_ASM_UNALIGNED_DI_OP
847 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
850 #undef TARGET_ASM_UNALIGNED_HI_OP
851 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
852 #undef TARGET_ASM_UNALIGNED_SI_OP
853 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
857 /* This hook deals with fixups for relocatable code and DI-mode objects
859 #undef TARGET_ASM_INTEGER
860 #define TARGET_ASM_INTEGER rs6000_assemble_integer
862 #ifdef HAVE_GAS_HIDDEN
863 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
864 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
867 #undef TARGET_HAVE_TLS
868 #define TARGET_HAVE_TLS HAVE_AS_TLS
870 #undef TARGET_CANNOT_FORCE_CONST_MEM
871 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
873 #undef TARGET_ASM_FUNCTION_PROLOGUE
874 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
875 #undef TARGET_ASM_FUNCTION_EPILOGUE
876 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
878 #undef TARGET_SCHED_VARIABLE_ISSUE
879 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
881 #undef TARGET_SCHED_ISSUE_RATE
882 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
883 #undef TARGET_SCHED_ADJUST_COST
884 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
885 #undef TARGET_SCHED_ADJUST_PRIORITY
886 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
887 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
888 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
889 #undef TARGET_SCHED_FINISH
890 #define TARGET_SCHED_FINISH rs6000_sched_finish
892 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
893 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
895 #undef TARGET_INIT_BUILTINS
896 #define TARGET_INIT_BUILTINS rs6000_init_builtins
898 #undef TARGET_EXPAND_BUILTIN
899 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
901 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
902 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
904 #undef TARGET_INIT_LIBFUNCS
905 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
908 #undef TARGET_BINDS_LOCAL_P
909 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
912 #undef TARGET_ASM_OUTPUT_MI_THUNK
913 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
915 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
916 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
918 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
919 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
921 #undef TARGET_RTX_COSTS
922 #define TARGET_RTX_COSTS rs6000_rtx_costs
923 #undef TARGET_ADDRESS_COST
924 #define TARGET_ADDRESS_COST hook_int_rtx_0
926 #undef TARGET_VECTOR_OPAQUE_P
927 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
929 #undef TARGET_DWARF_REGISTER_SPAN
930 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
932 /* On rs6000, function arguments are promoted, as are function return
934 #undef TARGET_PROMOTE_FUNCTION_ARGS
935 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
936 #undef TARGET_PROMOTE_FUNCTION_RETURN
937 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
939 #undef TARGET_RETURN_IN_MEMORY
940 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
942 #undef TARGET_SETUP_INCOMING_VARARGS
943 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
945 /* Always strict argument naming on rs6000. */
946 #undef TARGET_STRICT_ARGUMENT_NAMING
947 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
948 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
949 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
950 #undef TARGET_SPLIT_COMPLEX_ARG
951 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
952 #undef TARGET_MUST_PASS_IN_STACK
953 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
954 #undef TARGET_PASS_BY_REFERENCE
955 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
957 #undef TARGET_BUILD_BUILTIN_VA_LIST
958 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
960 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
961 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
963 struct gcc_target targetm = TARGET_INITIALIZER;
966 /* Value is 1 if hard register REGNO can hold a value of machine-mode
969 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
971 /* The GPRs can hold any mode, but values bigger than one register
972 cannot go past R31. */
973 if (INT_REGNO_P (regno))
974 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
976 /* The float registers can only hold floating modes and DImode. */
977 if (FP_REGNO_P (regno))
979 (GET_MODE_CLASS (mode) == MODE_FLOAT
980 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
981 || (GET_MODE_CLASS (mode) == MODE_INT
982 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
984 /* The CR register can only hold CC modes. */
985 if (CR_REGNO_P (regno))
986 return GET_MODE_CLASS (mode) == MODE_CC;
988 if (XER_REGNO_P (regno))
989 return mode == PSImode;
991 /* AltiVec only in AldyVec registers. */
992 if (ALTIVEC_REGNO_P (regno))
993 return ALTIVEC_VECTOR_MODE (mode);
995 /* ...but GPRs can hold SIMD data on the SPE in one register. */
996 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
999 /* We cannot put TImode anywhere except general register and it must be
1000 able to fit within the register set. */
1002 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1005 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1007 rs6000_init_hard_regno_mode_ok (void)
1011 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1012 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1013 if (rs6000_hard_regno_mode_ok (r, m))
1014 rs6000_hard_regno_mode_ok_p[m][r] = true;
1017 /* If not otherwise specified by a target, make 'long double' equivalent to
1020 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1021 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1024 /* Override command line options. Mostly we process the processor
1025 type and sometimes adjust other TARGET_ options. */
1028 rs6000_override_options (const char *default_cpu)
1031 struct rs6000_cpu_select *ptr;
1034 /* Simplifications for entries below. */
1037 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1038 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1041 /* This table occasionally claims that a processor does not support
1042 a particular feature even though it does, but the feature is slower
1043 than the alternative. Thus, it shouldn't be relied on as a
1044 complete description of the processor's support.
1046 Please keep this list in order, and don't forget to update the
1047 documentation in invoke.texi when adding a new processor or
1051 const char *const name; /* Canonical processor name. */
1052 const enum processor_type processor; /* Processor type enum value. */
1053 const int target_enable; /* Target flags to enable. */
1054 } const processor_target_table[]
1055 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1056 {"403", PROCESSOR_PPC403,
1057 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1058 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1059 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1060 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1061 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1062 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1063 {"601", PROCESSOR_PPC601,
1064 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1065 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1066 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1067 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1068 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1069 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1070 {"620", PROCESSOR_PPC620,
1071 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1072 {"630", PROCESSOR_PPC630,
1073 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1074 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1075 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1076 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1077 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1078 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1079 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1080 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1081 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1082 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1083 {"970", PROCESSOR_POWER4,
1084 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1085 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1086 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1087 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1088 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1089 {"G5", PROCESSOR_POWER4,
1090 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1091 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1092 {"power2", PROCESSOR_POWER,
1093 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1094 {"power3", PROCESSOR_PPC630,
1095 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1096 {"power4", PROCESSOR_POWER4,
1097 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1098 {"power5", PROCESSOR_POWER5,
1099 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1100 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1101 {"powerpc64", PROCESSOR_POWERPC64,
1102 POWERPC_BASE_MASK | MASK_POWERPC64},
1103 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1104 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1105 {"rios2", PROCESSOR_RIOS2,
1106 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1107 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1108 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1109 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
1112 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1114 /* Some OSs don't support saving the high part of 64-bit registers on
1115 context switch. Other OSs don't support saving Altivec registers.
1116 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1117 settings; if the user wants either, the user must explicitly specify
1118 them and we won't interfere with the user's specification. */
1121 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1122 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1123 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1127 rs6000_init_hard_regno_mode_ok ();
1129 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1130 #ifdef OS_MISSING_POWERPC64
1131 if (OS_MISSING_POWERPC64)
1132 set_masks &= ~MASK_POWERPC64;
1134 #ifdef OS_MISSING_ALTIVEC
1135 if (OS_MISSING_ALTIVEC)
1136 set_masks &= ~MASK_ALTIVEC;
1139 /* Don't override these by the processor default if given explicitly. */
1140 set_masks &= ~(target_flags_explicit
1141 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
1143 /* Identify the processor type. */
1144 rs6000_select[0].string = default_cpu;
1145 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1147 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1149 ptr = &rs6000_select[i];
1150 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1152 for (j = 0; j < ptt_size; j++)
1153 if (! strcmp (ptr->string, processor_target_table[j].name))
1155 if (ptr->set_tune_p)
1156 rs6000_cpu = processor_target_table[j].processor;
1158 if (ptr->set_arch_p)
1160 target_flags &= ~set_masks;
1161 target_flags |= (processor_target_table[j].target_enable
1168 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1175 /* If we are optimizing big endian systems for space, use the load/store
1176 multiple and string instructions. */
1177 if (BYTES_BIG_ENDIAN && optimize_size)
1178 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1180 /* Don't allow -mmultiple or -mstring on little endian systems
1181 unless the cpu is a 750, because the hardware doesn't support the
1182 instructions used in little endian mode, and causes an alignment
1183 trap. The 750 does not cause an alignment trap (except when the
1184 target is unaligned). */
1186 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1188 if (TARGET_MULTIPLE)
1190 target_flags &= ~MASK_MULTIPLE;
1191 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1192 warning ("-mmultiple is not supported on little endian systems");
1197 target_flags &= ~MASK_STRING;
1198 if ((target_flags_explicit & MASK_STRING) != 0)
1199 warning ("-mstring is not supported on little endian systems");
1203 /* Set debug flags */
1204 if (rs6000_debug_name)
1206 if (! strcmp (rs6000_debug_name, "all"))
1207 rs6000_debug_stack = rs6000_debug_arg = 1;
1208 else if (! strcmp (rs6000_debug_name, "stack"))
1209 rs6000_debug_stack = 1;
1210 else if (! strcmp (rs6000_debug_name, "arg"))
1211 rs6000_debug_arg = 1;
1213 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1216 if (rs6000_traceback_name)
1218 if (! strncmp (rs6000_traceback_name, "full", 4))
1219 rs6000_traceback = traceback_full;
1220 else if (! strncmp (rs6000_traceback_name, "part", 4))
1221 rs6000_traceback = traceback_part;
1222 else if (! strncmp (rs6000_traceback_name, "no", 2))
1223 rs6000_traceback = traceback_none;
1225 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1226 rs6000_traceback_name);
1229 /* Set size of long double */
1230 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1231 if (rs6000_long_double_size_string)
1234 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1235 if (*tail != '\0' || (size != 64 && size != 128))
1236 error ("Unknown switch -mlong-double-%s",
1237 rs6000_long_double_size_string);
1239 rs6000_long_double_type_size = size;
1242 /* Set Altivec ABI as default for powerpc64 linux. */
1243 if (TARGET_ELF && TARGET_64BIT)
1245 rs6000_altivec_abi = 1;
1246 rs6000_altivec_vrsave = 1;
1249 /* Handle -mabi= options. */
1250 rs6000_parse_abi_options ();
1252 /* Handle -malign-XXXXX option. */
1253 rs6000_parse_alignment_option ();
1255 /* Handle generic -mFOO=YES/NO options. */
1256 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1257 &rs6000_altivec_vrsave);
1258 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1260 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1261 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
1262 &rs6000_float_gprs);
1264 /* Handle -mtls-size option. */
1265 rs6000_parse_tls_size_option ();
1267 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1268 SUBTARGET_OVERRIDE_OPTIONS;
1270 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1271 SUBSUBTARGET_OVERRIDE_OPTIONS;
1277 error ("AltiVec and E500 instructions cannot coexist");
1279 /* The e500 does not have string instructions, and we set
1280 MASK_STRING above when optimizing for size. */
1281 if ((target_flags & MASK_STRING) != 0)
1282 target_flags = target_flags & ~MASK_STRING;
1284 /* No SPE means 64-bit long doubles, even if an E500. */
1285 if (rs6000_spe_string != 0
1286 && !strcmp (rs6000_spe_string, "no"))
1287 rs6000_long_double_type_size = 64;
1289 else if (rs6000_select[1].string != NULL)
1291 /* For the powerpc-eabispe configuration, we set all these by
1292 default, so let's unset them if we manually set another
1293 CPU that is not the E500. */
1294 if (rs6000_abi_string == 0)
1296 if (rs6000_spe_string == 0)
1298 if (rs6000_float_gprs_string == 0)
1299 rs6000_float_gprs = 0;
1300 if (rs6000_isel_string == 0)
1302 if (rs6000_long_double_size_string == 0)
1303 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1306 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1307 && rs6000_cpu != PROCESSOR_POWER5);
1308 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1309 || rs6000_cpu == PROCESSOR_POWER5);
1311 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1312 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1313 bits in target_flags so TARGET_SWITCHES cannot be used.
1314 Assumption here is that rs6000_longcall_switch points into the
1315 text of the complete option, rather than being a copy, so we can
1316 scan back for the presence or absence of the no- modifier. */
1317 if (rs6000_longcall_switch)
1319 const char *base = rs6000_longcall_switch;
1320 while (base[-1] != 'm') base--;
1322 if (*rs6000_longcall_switch != '\0')
1323 error ("invalid option `%s'", base);
1324 rs6000_default_long_calls = (base[0] != 'n');
1327 /* Handle -m(no-)warn-altivec-long similarly. */
1328 if (rs6000_warn_altivec_long_switch)
1330 const char *base = rs6000_warn_altivec_long_switch;
1331 while (base[-1] != 'm') base--;
1333 if (*rs6000_warn_altivec_long_switch != '\0')
1334 error ("invalid option `%s'", base);
1335 rs6000_warn_altivec_long = (base[0] != 'n');
1338 /* Handle -mprioritize-restricted-insns option. */
1339 rs6000_sched_restricted_insns_priority
1340 = (rs6000_sched_groups ? 1 : 0);
1341 if (rs6000_sched_restricted_insns_priority_str)
1342 rs6000_sched_restricted_insns_priority =
1343 atoi (rs6000_sched_restricted_insns_priority_str);
1345 /* Handle -msched-costly-dep option. */
1346 rs6000_sched_costly_dep
1347 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1348 if (rs6000_sched_costly_dep_str)
1350 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1351 rs6000_sched_costly_dep = no_dep_costly;
1352 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1353 rs6000_sched_costly_dep = all_deps_costly;
1354 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1355 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1356 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1357 rs6000_sched_costly_dep = store_to_load_dep_costly;
1359 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1362 /* Handle -minsert-sched-nops option. */
1363 rs6000_sched_insert_nops
1364 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1365 if (rs6000_sched_insert_nops_str)
1367 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1368 rs6000_sched_insert_nops = sched_finish_none;
1369 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1370 rs6000_sched_insert_nops = sched_finish_pad_groups;
1371 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1372 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1374 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1377 #ifdef TARGET_REGNAMES
1378 /* If the user desires alternate register names, copy in the
1379 alternate names now. */
1380 if (TARGET_REGNAMES)
1381 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1384 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1385 If -maix-struct-return or -msvr4-struct-return was explicitly
1386 used, don't override with the ABI default. */
1387 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1389 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1390 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1392 target_flags |= MASK_AIX_STRUCT_RET;
1395 if (TARGET_LONG_DOUBLE_128
1396 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1397 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1399 /* Allocate an alias set for register saves & restores from stack. */
1400 rs6000_sr_alias_set = new_alias_set ();
1403 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1405 /* We can only guarantee the availability of DI pseudo-ops when
1406 assembling for 64-bit targets. */
1409 targetm.asm_out.aligned_op.di = NULL;
1410 targetm.asm_out.unaligned_op.di = NULL;
1413 /* Set maximum branch target alignment at two instructions, eight bytes. */
1414 align_jumps_max_skip = 8;
1415 align_loops_max_skip = 8;
1417 /* Arrange to save and restore machine status around nested functions. */
1418 init_machine_status = rs6000_init_machine_status;
1420 /* We should always be splitting complex arguments, but we can't break
1421 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1422 if (DEFAULT_ABI != ABI_AIX)
1423 targetm.calls.split_complex_arg = NULL;
1425 /* Initialize rs6000_cost with the appropriate target costs. */
1427 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1431 case PROCESSOR_RIOS1:
1432 rs6000_cost = &rios1_cost;
1435 case PROCESSOR_RIOS2:
1436 rs6000_cost = &rios2_cost;
1439 case PROCESSOR_RS64A:
1440 rs6000_cost = &rs64a_cost;
1443 case PROCESSOR_MPCCORE:
1444 rs6000_cost = &mpccore_cost;
1447 case PROCESSOR_PPC403:
1448 rs6000_cost = &ppc403_cost;
1451 case PROCESSOR_PPC405:
1452 rs6000_cost = &ppc405_cost;
1455 case PROCESSOR_PPC440:
1456 rs6000_cost = &ppc440_cost;
1459 case PROCESSOR_PPC601:
1460 rs6000_cost = &ppc601_cost;
1463 case PROCESSOR_PPC603:
1464 rs6000_cost = &ppc603_cost;
1467 case PROCESSOR_PPC604:
1468 rs6000_cost = &ppc604_cost;
1471 case PROCESSOR_PPC604e:
1472 rs6000_cost = &ppc604e_cost;
1475 case PROCESSOR_PPC620:
1476 rs6000_cost = &ppc620_cost;
1479 case PROCESSOR_PPC630:
1480 rs6000_cost = &ppc630_cost;
1483 case PROCESSOR_PPC750:
1484 case PROCESSOR_PPC7400:
1485 rs6000_cost = &ppc750_cost;
1488 case PROCESSOR_PPC7450:
1489 rs6000_cost = &ppc7450_cost;
1492 case PROCESSOR_PPC8540:
1493 rs6000_cost = &ppc8540_cost;
1496 case PROCESSOR_POWER4:
1497 case PROCESSOR_POWER5:
1498 rs6000_cost = &power4_cost;
1506 /* Handle generic options of the form -mfoo=yes/no.
1507 NAME is the option name.
1508 VALUE is the option value.
1509 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1510 whether the option value is 'yes' or 'no' respectively. */
1512 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1516 else if (!strcmp (value, "yes"))
1518 else if (!strcmp (value, "no"))
1521 error ("unknown -m%s= option specified: '%s'", name, value);
1524 /* Handle -mabi= options. */
1526 rs6000_parse_abi_options (void)
1528 if (rs6000_abi_string == 0)
1530 else if (! strcmp (rs6000_abi_string, "altivec"))
1532 rs6000_altivec_abi = 1;
1535 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1536 rs6000_altivec_abi = 0;
1537 else if (! strcmp (rs6000_abi_string, "spe"))
1540 rs6000_altivec_abi = 0;
1541 if (!TARGET_SPE_ABI)
1542 error ("not configured for ABI: '%s'", rs6000_abi_string);
1545 else if (! strcmp (rs6000_abi_string, "no-spe"))
1548 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1551 /* Handle -malign-XXXXXX options. */
1553 rs6000_parse_alignment_option (void)
1555 if (rs6000_alignment_string == 0)
1557 else if (! strcmp (rs6000_alignment_string, "power"))
1558 rs6000_alignment_flags = MASK_ALIGN_POWER;
1559 else if (! strcmp (rs6000_alignment_string, "natural"))
1560 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1562 error ("unknown -malign-XXXXX option specified: '%s'",
1563 rs6000_alignment_string);
1566 /* Validate and record the size specified with the -mtls-size option. */
1569 rs6000_parse_tls_size_option (void)
1571 if (rs6000_tls_size_string == 0)
1573 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1574 rs6000_tls_size = 16;
1575 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1576 rs6000_tls_size = 32;
1577 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1578 rs6000_tls_size = 64;
1580 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1584 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1588 /* Do anything needed at the start of the asm file. */
1591 rs6000_file_start (void)
1595 const char *start = buffer;
1596 struct rs6000_cpu_select *ptr;
1597 const char *default_cpu = TARGET_CPU_DEFAULT;
1598 FILE *file = asm_out_file;
1600 default_file_start ();
1602 #ifdef TARGET_BI_ARCH
1603 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1607 if (flag_verbose_asm)
1609 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1610 rs6000_select[0].string = default_cpu;
1612 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1614 ptr = &rs6000_select[i];
1615 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1617 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1622 #ifdef USING_ELFOS_H
1623 switch (rs6000_sdata)
1625 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1626 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1627 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1628 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1631 if (rs6000_sdata && g_switch_value)
1633 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1644 /* Return nonzero if this function is known to have a null epilogue. */
1647 direct_return (void)
1649 if (reload_completed)
1651 rs6000_stack_t *info = rs6000_stack_info ();
1653 if (info->first_gp_reg_save == 32
1654 && info->first_fp_reg_save == 64
1655 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1656 && ! info->lr_save_p
1657 && ! info->cr_save_p
1658 && info->vrsave_mask == 0
1666 /* Returns 1 always. */
1669 any_operand (rtx op ATTRIBUTE_UNUSED,
1670 enum machine_mode mode ATTRIBUTE_UNUSED)
1675 /* Returns 1 if op is the count register. */
1677 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1679 if (GET_CODE (op) != REG)
1682 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1685 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1691 /* Returns 1 if op is an altivec register. */
1693 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1696 return (register_operand (op, mode)
1697 && (GET_CODE (op) != REG
1698 || REGNO (op) > FIRST_PSEUDO_REGISTER
1699 || ALTIVEC_REGNO_P (REGNO (op))));
1703 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1705 if (GET_CODE (op) != REG)
1708 if (XER_REGNO_P (REGNO (op)))
1714 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1715 by such constants completes more quickly. */
1718 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1720 return ( GET_CODE (op) == CONST_INT
1721 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1724 /* Return 1 if OP is a constant that can fit in a D field. */
1727 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1729 return (GET_CODE (op) == CONST_INT
1730 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1733 /* Similar for an unsigned D field. */
1736 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1738 return (GET_CODE (op) == CONST_INT
1739 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1742 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1745 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1747 return (GET_CODE (op) == CONST_INT
1748 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1751 /* Returns 1 if OP is a CONST_INT that is a positive value
1752 and an exact power of 2. */
1755 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1757 return (GET_CODE (op) == CONST_INT
1759 && exact_log2 (INTVAL (op)) >= 0);
1762 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1766 gpc_reg_operand (rtx op, enum machine_mode mode)
1768 return (register_operand (op, mode)
1769 && (GET_CODE (op) != REG
1770 || (REGNO (op) >= ARG_POINTER_REGNUM
1771 && !XER_REGNO_P (REGNO (op)))
1772 || REGNO (op) < MQ_REGNO));
1775 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1779 cc_reg_operand (rtx op, enum machine_mode mode)
1781 return (register_operand (op, mode)
1782 && (GET_CODE (op) != REG
1783 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1784 || CR_REGNO_P (REGNO (op))));
1787 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1788 CR field that isn't CR0. */
1791 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1793 return (register_operand (op, mode)
1794 && (GET_CODE (op) != REG
1795 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1796 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1799 /* Returns 1 if OP is either a constant integer valid for a D-field or
1800 a non-special register. If a register, it must be in the proper
1801 mode unless MODE is VOIDmode. */
1804 reg_or_short_operand (rtx op, enum machine_mode mode)
1806 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1809 /* Similar, except check if the negation of the constant would be
1810 valid for a D-field. Don't allow a constant zero, since all the
1811 patterns that call this predicate use "addic r1,r2,-constant" on
1812 a constant value to set a carry when r2 is greater or equal to
1813 "constant". That doesn't work for zero. */
1816 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1818 if (GET_CODE (op) == CONST_INT)
1819 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1821 return gpc_reg_operand (op, mode);
1824 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1825 a non-special register. If a register, it must be in the proper
1826 mode unless MODE is VOIDmode. */
1829 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1831 if (gpc_reg_operand (op, mode))
1833 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1840 /* Return 1 if the operand is either a register or an integer whose
1841 high-order 16 bits are zero. */
1844 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1846 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1849 /* Return 1 is the operand is either a non-special register or ANY
1850 constant integer. */
1853 reg_or_cint_operand (rtx op, enum machine_mode mode)
1855 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1858 /* Return 1 is the operand is either a non-special register or ANY
1859 32-bit signed constant integer. */
1862 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1864 return (gpc_reg_operand (op, mode)
1865 || (GET_CODE (op) == CONST_INT
1866 #if HOST_BITS_PER_WIDE_INT != 32
1867 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1868 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1873 /* Return 1 is the operand is either a non-special register or a 32-bit
1874 signed constant integer valid for 64-bit addition. */
1877 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1879 return (gpc_reg_operand (op, mode)
1880 || (GET_CODE (op) == CONST_INT
1881 #if HOST_BITS_PER_WIDE_INT == 32
1882 && INTVAL (op) < 0x7fff8000
1884 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1890 /* Return 1 is the operand is either a non-special register or a 32-bit
1891 signed constant integer valid for 64-bit subtraction. */
1894 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1896 return (gpc_reg_operand (op, mode)
1897 || (GET_CODE (op) == CONST_INT
1898 #if HOST_BITS_PER_WIDE_INT == 32
1899 && (- INTVAL (op)) < 0x7fff8000
1901 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1907 /* Return 1 is the operand is either a non-special register or ANY
1908 32-bit unsigned constant integer. */
1911 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1913 if (GET_CODE (op) == CONST_INT)
1915 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1917 if (GET_MODE_BITSIZE (mode) <= 32)
1920 if (INTVAL (op) < 0)
1924 return ((INTVAL (op) & GET_MODE_MASK (mode)
1925 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1927 else if (GET_CODE (op) == CONST_DOUBLE)
1929 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1933 return CONST_DOUBLE_HIGH (op) == 0;
1936 return gpc_reg_operand (op, mode);
1939 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1942 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1944 return (GET_CODE (op) == SYMBOL_REF
1945 || GET_CODE (op) == CONST
1946 || GET_CODE (op) == LABEL_REF);
1949 /* Return 1 if the operand is a simple references that can be loaded via
1950 the GOT (labels involving addition aren't allowed). */
1953 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1955 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1958 /* Return the number of instructions it takes to form a constant in an
1959 integer register. */
1962 num_insns_constant_wide (HOST_WIDE_INT value)
1964 /* signed constant loadable with {cal|addi} */
1965 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1968 /* constant loadable with {cau|addis} */
1969 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1972 #if HOST_BITS_PER_WIDE_INT == 64
1973 else if (TARGET_POWERPC64)
1975 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1976 HOST_WIDE_INT high = value >> 31;
1978 if (high == 0 || high == -1)
1984 return num_insns_constant_wide (high) + 1;
1986 return (num_insns_constant_wide (high)
1987 + num_insns_constant_wide (low) + 1);
1996 num_insns_constant (rtx op, enum machine_mode mode)
1998 if (GET_CODE (op) == CONST_INT)
2000 #if HOST_BITS_PER_WIDE_INT == 64
2001 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2002 && mask64_operand (op, mode))
2006 return num_insns_constant_wide (INTVAL (op));
2009 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
2014 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2015 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2016 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2019 else if (GET_CODE (op) == CONST_DOUBLE)
2025 int endian = (WORDS_BIG_ENDIAN == 0);
2027 if (mode == VOIDmode || mode == DImode)
2029 high = CONST_DOUBLE_HIGH (op);
2030 low = CONST_DOUBLE_LOW (op);
2034 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2035 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2037 low = l[1 - endian];
2041 return (num_insns_constant_wide (low)
2042 + num_insns_constant_wide (high));
2046 if (high == 0 && low >= 0)
2047 return num_insns_constant_wide (low);
2049 else if (high == -1 && low < 0)
2050 return num_insns_constant_wide (low);
2052 else if (mask64_operand (op, mode))
2056 return num_insns_constant_wide (high) + 1;
2059 return (num_insns_constant_wide (high)
2060 + num_insns_constant_wide (low) + 1);
2068 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2069 register with one instruction per word. We only do this if we can
2070 safely read CONST_DOUBLE_{LOW,HIGH}. */
2073 easy_fp_constant (rtx op, enum machine_mode mode)
2075 if (GET_CODE (op) != CONST_DOUBLE
2076 || GET_MODE (op) != mode
2077 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
2080 /* Consider all constants with -msoft-float to be easy. */
2081 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
2085 /* If we are using V.4 style PIC, consider all constants to be hard. */
2086 if (flag_pic && DEFAULT_ABI == ABI_V4)
2089 #ifdef TARGET_RELOCATABLE
2090 /* Similarly if we are using -mrelocatable, consider all constants
2092 if (TARGET_RELOCATABLE)
2101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2102 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
2104 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2105 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
2106 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
2107 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
2110 else if (mode == DFmode)
2115 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2116 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
2118 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2119 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
2122 else if (mode == SFmode)
2127 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2128 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2130 return num_insns_constant_wide (l) == 1;
2133 else if (mode == DImode)
2134 return ((TARGET_POWERPC64
2135 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
2136 || (num_insns_constant (op, DImode) <= 2));
2138 else if (mode == SImode)
2144 /* Returns the constant for the splat instruction, if exists. */
2147 easy_vector_splat_const (int cst, enum machine_mode mode)
2152 if (EASY_VECTOR_15 (cst)
2153 || EASY_VECTOR_15_ADD_SELF (cst))
2155 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2159 if (EASY_VECTOR_15 (cst)
2160 || EASY_VECTOR_15_ADD_SELF (cst))
2162 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2166 if (EASY_VECTOR_15 (cst)
2167 || EASY_VECTOR_15_ADD_SELF (cst))
2176 /* Return nonzero if all elements of a vector have the same value. */
2179 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2183 units = CONST_VECTOR_NUNITS (op);
2185 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2186 for (i = 1; i < units; ++i)
2187 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2189 if (i == units && easy_vector_splat_const (cst, mode))
2194 /* Return 1 if the operand is a CONST_INT and can be put into a
2195 register without using memory. */
2198 easy_vector_constant (rtx op, enum machine_mode mode)
2202 if (GET_CODE (op) != CONST_VECTOR
2207 if (zero_constant (op, mode)
2208 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
2209 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
2212 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2215 if (TARGET_SPE && mode == V1DImode)
2218 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2219 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
2221 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2223 evmergelo r0, r0, r0
2226 I don't know how efficient it would be to allow bigger constants,
2227 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2228 instructions is better than a 64-bit memory load, but I don't
2229 have the e500 timing specs. */
2230 if (TARGET_SPE && mode == V2SImode
2231 && cst >= -0x7fff && cst <= 0x7fff
2232 && cst2 >= -0x7fff && cst2 <= 0x7fff)
2236 && easy_vector_same (op, mode))
2238 cst = easy_vector_splat_const (cst, mode);
2239 if (EASY_VECTOR_15_ADD_SELF (cst)
2240 || EASY_VECTOR_15 (cst))
2246 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2249 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
2253 && GET_CODE (op) == CONST_VECTOR
2254 && easy_vector_same (op, mode))
2256 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
2257 if (EASY_VECTOR_15_ADD_SELF (cst))
2263 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2266 gen_easy_vector_constant_add_self (rtx op)
2270 units = GET_MODE_NUNITS (GET_MODE (op));
2271 v = rtvec_alloc (units);
2273 for (i = 0; i < units; i++)
2275 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2276 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2280 output_vec_const_move (rtx *operands)
2283 enum machine_mode mode;
2289 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2290 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2291 mode = GET_MODE (dest);
2295 if (zero_constant (vec, mode))
2296 return "vxor %0,%0,%0";
2297 else if (easy_vector_constant (vec, mode))
2299 operands[1] = GEN_INT (cst);
2303 if (EASY_VECTOR_15 (cst))
2305 operands[1] = GEN_INT (cst);
2306 return "vspltisw %0,%1";
2308 else if (EASY_VECTOR_15_ADD_SELF (cst))
2312 if (EASY_VECTOR_15 (cst))
2314 operands[1] = GEN_INT (cst);
2315 return "vspltish %0,%1";
2317 else if (EASY_VECTOR_15_ADD_SELF (cst))
2321 if (EASY_VECTOR_15 (cst))
2323 operands[1] = GEN_INT (cst);
2324 return "vspltisb %0,%1";
2326 else if (EASY_VECTOR_15_ADD_SELF (cst))
2338 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2339 pattern of V1DI, V4HI, and V2SF.
2341 FIXME: We should probably return # and add post reload
2342 splitters for these, but this way is so easy ;-).
2344 operands[1] = GEN_INT (cst);
2345 operands[2] = GEN_INT (cst2);
2347 return "li %0,%1\n\tevmergelo %0,%0,%0";
2349 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2355 /* Return 1 if the operand is the constant 0. This works for scalars
2356 as well as vectors. */
2358 zero_constant (rtx op, enum machine_mode mode)
2360 return op == CONST0_RTX (mode);
2363 /* Return 1 if the operand is 0.0. */
2365 zero_fp_constant (rtx op, enum machine_mode mode)
2367 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
2370 /* Return 1 if the operand is in volatile memory. Note that during
2371 the RTL generation phase, memory_operand does not return TRUE for
2372 volatile memory references. So this function allows us to
2373 recognize volatile references where its safe. */
2376 volatile_mem_operand (rtx op, enum machine_mode mode)
2378 if (GET_CODE (op) != MEM)
2381 if (!MEM_VOLATILE_P (op))
2384 if (mode != GET_MODE (op))
2387 if (reload_completed)
2388 return memory_operand (op, mode);
2390 if (reload_in_progress)
2391 return strict_memory_address_p (mode, XEXP (op, 0));
2393 return memory_address_p (mode, XEXP (op, 0));
2396 /* Return 1 if the operand is an offsettable memory operand. */
2399 offsettable_mem_operand (rtx op, enum machine_mode mode)
2401 return ((GET_CODE (op) == MEM)
2402 && offsettable_address_p (reload_completed || reload_in_progress,
2403 mode, XEXP (op, 0)));
2406 /* Return 1 if the operand is either an easy FP constant (see above) or
2410 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2412 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2415 /* Return 1 if the operand is either a non-special register or an item
2416 that can be used as the operand of a `mode' add insn. */
2419 add_operand (rtx op, enum machine_mode mode)
2421 if (GET_CODE (op) == CONST_INT)
2422 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2423 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2425 return gpc_reg_operand (op, mode);
2428 /* Return 1 if OP is a constant but not a valid add_operand. */
2431 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2433 return (GET_CODE (op) == CONST_INT
2434 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2435 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2438 /* Return 1 if the operand is a non-special register or a constant that
2439 can be used as the operand of an OR or XOR insn on the RS/6000. */
2442 logical_operand (rtx op, enum machine_mode mode)
2444 HOST_WIDE_INT opl, oph;
2446 if (gpc_reg_operand (op, mode))
2449 if (GET_CODE (op) == CONST_INT)
2451 opl = INTVAL (op) & GET_MODE_MASK (mode);
2453 #if HOST_BITS_PER_WIDE_INT <= 32
2454 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2458 else if (GET_CODE (op) == CONST_DOUBLE)
2460 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2463 opl = CONST_DOUBLE_LOW (op);
2464 oph = CONST_DOUBLE_HIGH (op);
2471 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2472 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2475 /* Return 1 if C is a constant that is not a logical operand (as
2476 above), but could be split into one. */
2479 non_logical_cint_operand (rtx op, enum machine_mode mode)
2481 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2482 && ! logical_operand (op, mode)
2483 && reg_or_logical_cint_operand (op, mode));
2486 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2487 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2488 Reject all ones and all zeros, since these should have been optimized
2489 away and confuse the making of MB and ME. */
2492 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2494 HOST_WIDE_INT c, lsb;
2496 if (GET_CODE (op) != CONST_INT)
2501 /* Fail in 64-bit mode if the mask wraps around because the upper
2502 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2503 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2506 /* We don't change the number of transitions by inverting,
2507 so make sure we start with the LS bit zero. */
2511 /* Reject all zeros or all ones. */
2515 /* Find the first transition. */
2518 /* Invert to look for a second transition. */
2521 /* Erase first transition. */
2524 /* Find the second transition (if any). */
2527 /* Match if all the bits above are 1's (or c is zero). */
2531 /* Return 1 for the PowerPC64 rlwinm corner case. */
2534 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2536 HOST_WIDE_INT c, lsb;
2538 if (GET_CODE (op) != CONST_INT)
2543 if ((c & 0x80000001) != 0x80000001)
2557 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2558 It is if there are no more than one 1->0 or 0->1 transitions.
2559 Reject all zeros, since zero should have been optimized away and
2560 confuses the making of MB and ME. */
2563 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2565 if (GET_CODE (op) == CONST_INT)
2567 HOST_WIDE_INT c, lsb;
2571 /* Reject all zeros. */
2575 /* We don't change the number of transitions by inverting,
2576 so make sure we start with the LS bit zero. */
2580 /* Find the transition, and check that all bits above are 1's. */
2583 /* Match if all the bits above are 1's (or c is zero). */
2589 /* Like mask64_operand, but allow up to three transitions. This
2590 predicate is used by insn patterns that generate two rldicl or
2591 rldicr machine insns. */
2594 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2596 if (GET_CODE (op) == CONST_INT)
2598 HOST_WIDE_INT c, lsb;
2602 /* Disallow all zeros. */
2606 /* We don't change the number of transitions by inverting,
2607 so make sure we start with the LS bit zero. */
2611 /* Find the first transition. */
2614 /* Invert to look for a second transition. */
2617 /* Erase first transition. */
2620 /* Find the second transition. */
2623 /* Invert to look for a third transition. */
2626 /* Erase second transition. */
2629 /* Find the third transition (if any). */
2632 /* Match if all the bits above are 1's (or c is zero). */
2638 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2639 implement ANDing by the mask IN. */
2641 build_mask64_2_operands (rtx in, rtx *out)
2643 #if HOST_BITS_PER_WIDE_INT >= 64
2644 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2647 if (GET_CODE (in) != CONST_INT)
2653 /* Assume c initially something like 0x00fff000000fffff. The idea
2654 is to rotate the word so that the middle ^^^^^^ group of zeros
2655 is at the MS end and can be cleared with an rldicl mask. We then
2656 rotate back and clear off the MS ^^ group of zeros with a
2658 c = ~c; /* c == 0xff000ffffff00000 */
2659 lsb = c & -c; /* lsb == 0x0000000000100000 */
2660 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2661 c = ~c; /* c == 0x00fff000000fffff */
2662 c &= -lsb; /* c == 0x00fff00000000000 */
2663 lsb = c & -c; /* lsb == 0x0000100000000000 */
2664 c = ~c; /* c == 0xff000fffffffffff */
2665 c &= -lsb; /* c == 0xff00000000000000 */
2667 while ((lsb >>= 1) != 0)
2668 shift++; /* shift == 44 on exit from loop */
2669 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2670 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2671 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2675 /* Assume c initially something like 0xff000f0000000000. The idea
2676 is to rotate the word so that the ^^^ middle group of zeros
2677 is at the LS end and can be cleared with an rldicr mask. We then
2678 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2680 lsb = c & -c; /* lsb == 0x0000010000000000 */
2681 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2682 c = ~c; /* c == 0x00fff0ffffffffff */
2683 c &= -lsb; /* c == 0x00fff00000000000 */
2684 lsb = c & -c; /* lsb == 0x0000100000000000 */
2685 c = ~c; /* c == 0xff000fffffffffff */
2686 c &= -lsb; /* c == 0xff00000000000000 */
2688 while ((lsb >>= 1) != 0)
2689 shift++; /* shift == 44 on exit from loop */
2690 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2691 m1 >>= shift; /* m1 == 0x0000000000000fff */
2692 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2695 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2696 masks will be all 1's. We are guaranteed more than one transition. */
2697 out[0] = GEN_INT (64 - shift);
2698 out[1] = GEN_INT (m1);
2699 out[2] = GEN_INT (shift);
2700 out[3] = GEN_INT (m2);
2708 /* Return 1 if the operand is either a non-special register or a constant
2709 that can be used as the operand of a PowerPC64 logical AND insn. */
2712 and64_operand (rtx op, enum machine_mode mode)
2714 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2715 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2717 return (logical_operand (op, mode) || mask64_operand (op, mode));
2720 /* Like the above, but also match constants that can be implemented
2721 with two rldicl or rldicr insns. */
2724 and64_2_operand (rtx op, enum machine_mode mode)
2726 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2727 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2729 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2732 /* Return 1 if the operand is either a non-special register or a
2733 constant that can be used as the operand of an RS/6000 logical AND insn. */
2736 and_operand (rtx op, enum machine_mode mode)
2738 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2739 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2741 return (logical_operand (op, mode) || mask_operand (op, mode));
2744 /* Return 1 if the operand is a general register or memory operand. */
2747 reg_or_mem_operand (rtx op, enum machine_mode mode)
2749 return (gpc_reg_operand (op, mode)
2750 || memory_operand (op, mode)
2751 || macho_lo_sum_memory_operand (op, mode)
2752 || volatile_mem_operand (op, mode));
2755 /* Return 1 if the operand is a general register or memory operand without
2756 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2760 lwa_operand (rtx op, enum machine_mode mode)
2764 if (reload_completed && GET_CODE (inner) == SUBREG)
2765 inner = SUBREG_REG (inner);
2767 return gpc_reg_operand (inner, mode)
2768 || (memory_operand (inner, mode)
2769 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2770 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2771 && (GET_CODE (XEXP (inner, 0)) != PLUS
2772 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2773 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2776 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2779 symbol_ref_operand (rtx op, enum machine_mode mode)
2781 if (mode != VOIDmode && GET_MODE (op) != mode)
2784 return (GET_CODE (op) == SYMBOL_REF
2785 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2788 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2789 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2792 call_operand (rtx op, enum machine_mode mode)
2794 if (mode != VOIDmode && GET_MODE (op) != mode)
2797 return (GET_CODE (op) == SYMBOL_REF
2798 || (GET_CODE (op) == REG
2799 && (REGNO (op) == LINK_REGISTER_REGNUM
2800 || REGNO (op) == COUNT_REGISTER_REGNUM
2801 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2804 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2808 current_file_function_operand (rtx op,
2809 enum machine_mode mode ATTRIBUTE_UNUSED)
2811 return (GET_CODE (op) == SYMBOL_REF
2812 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2813 && (SYMBOL_REF_LOCAL_P (op)
2814 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2817 /* Return 1 if this operand is a valid input for a move insn. */
2820 input_operand (rtx op, enum machine_mode mode)
2822 /* Memory is always valid. */
2823 if (memory_operand (op, mode))
2826 /* For floating-point, easy constants are valid. */
2827 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2829 && easy_fp_constant (op, mode))
2832 /* Allow any integer constant. */
2833 if (GET_MODE_CLASS (mode) == MODE_INT
2834 && (GET_CODE (op) == CONST_INT
2835 || GET_CODE (op) == CONST_DOUBLE))
2838 /* Allow easy vector constants. */
2839 if (GET_CODE (op) == CONST_VECTOR
2840 && easy_vector_constant (op, mode))
2843 /* For floating-point or multi-word mode, the only remaining valid type
2845 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2846 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2847 return register_operand (op, mode);
2849 /* The only cases left are integral modes one word or smaller (we
2850 do not get called for MODE_CC values). These can be in any
2852 if (register_operand (op, mode))
2855 /* A SYMBOL_REF referring to the TOC is valid. */
2856 if (legitimate_constant_pool_address_p (op))
2859 /* A constant pool expression (relative to the TOC) is valid */
2860 if (toc_relative_expr_p (op))
2863 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2865 if (DEFAULT_ABI == ABI_V4
2866 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2867 && small_data_operand (op, Pmode))
2874 /* Darwin, AIX increases natural record alignment to doubleword if the first
2875 field is an FP double while the FP fields remain word aligned. */
2878 rs6000_special_round_type_align (tree type, int computed, int specified)
2880 tree field = TYPE_FIELDS (type);
2882 /* Skip all the static variables only if ABI is greater than
2884 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2885 field = TREE_CHAIN (field);
2887 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2888 return MAX (computed, specified);
2890 return MAX (MAX (computed, specified), 64);
2893 /* Return 1 for an operand in small memory on V.4/eabi. */
2896 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2897 enum machine_mode mode ATTRIBUTE_UNUSED)
2902 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2905 if (DEFAULT_ABI != ABI_V4)
2908 if (GET_CODE (op) == SYMBOL_REF)
2911 else if (GET_CODE (op) != CONST
2912 || GET_CODE (XEXP (op, 0)) != PLUS
2913 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2914 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2919 rtx sum = XEXP (op, 0);
2920 HOST_WIDE_INT summand;
2922 /* We have to be careful here, because it is the referenced address
2923 that must be 32k from _SDA_BASE_, not just the symbol. */
2924 summand = INTVAL (XEXP (sum, 1));
2925 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2928 sym_ref = XEXP (sum, 0);
2931 return SYMBOL_REF_SMALL_P (sym_ref);
2937 /* Return true, if operand is a memory operand and has a
2938 displacement divisible by 4. */
2941 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2946 if (!memory_operand (op, mode))
2949 addr = XEXP (op, 0);
2950 if (GET_CODE (addr) == PLUS
2951 && GET_CODE (XEXP (addr, 0)) == REG
2952 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2953 off = INTVAL (XEXP (addr, 1));
2955 return (off % 4) == 0;
2958 /* Return true if either operand is a general purpose register. */
2961 gpr_or_gpr_p (rtx op0, rtx op1)
2963 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2964 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2968 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2971 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2973 switch (GET_CODE(op))
2976 if (RS6000_SYMBOL_REF_TLS_P (op))
2978 else if (CONSTANT_POOL_ADDRESS_P (op))
2980 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2988 else if (! strcmp (XSTR (op, 0), toc_label_name))
2997 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2998 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
3000 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
3009 constant_pool_expr_p (rtx op)
3013 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3017 toc_relative_expr_p (rtx op)
3021 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3025 legitimate_constant_pool_address_p (rtx x)
3028 && GET_CODE (x) == PLUS
3029 && GET_CODE (XEXP (x, 0)) == REG
3030 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3031 && constant_pool_expr_p (XEXP (x, 1)));
3035 legitimate_small_data_p (enum machine_mode mode, rtx x)
3037 return (DEFAULT_ABI == ABI_V4
3038 && !flag_pic && !TARGET_TOC
3039 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3040 && small_data_operand (x, mode));
3043 /* SPE offset addressing is limited to 5-bits worth of double words. */
3044 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3047 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3049 unsigned HOST_WIDE_INT offset, extra;
3051 if (GET_CODE (x) != PLUS)
3053 if (GET_CODE (XEXP (x, 0)) != REG)
3055 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3057 if (legitimate_constant_pool_address_p (x))
3059 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3062 offset = INTVAL (XEXP (x, 1));
3070 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3071 which leaves the only valid constant offset of zero, which by
3072 canonicalization rules is also invalid. */
3079 /* SPE vector modes. */
3080 return SPE_CONST_OFFSET_OK (offset);
3084 if (mode == DFmode || !TARGET_POWERPC64)
3086 else if (offset & 3)
3092 if (mode == TFmode || !TARGET_POWERPC64)
3094 else if (offset & 3)
3105 return (offset < 0x10000) && (offset + extra < 0x10000);
3109 legitimate_indexed_address_p (rtx x, int strict)
3113 if (GET_CODE (x) != PLUS)
3118 if (!REG_P (op0) || !REG_P (op1))
3121 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
3122 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3123 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3124 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
3128 legitimate_indirect_address_p (rtx x, int strict)
3130 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3134 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3136 if (!TARGET_MACHO || !flag_pic
3137 || mode != SImode || GET_CODE(x) != MEM)
3141 if (GET_CODE (x) != LO_SUM)
3143 if (GET_CODE (XEXP (x, 0)) != REG)
3145 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3149 return CONSTANT_P (x);
3153 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3155 if (GET_CODE (x) != LO_SUM)
3157 if (GET_CODE (XEXP (x, 0)) != REG)
3159 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3163 if (TARGET_ELF || TARGET_MACHO)
3165 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3169 if (GET_MODE_NUNITS (mode) != 1)
3171 if (GET_MODE_BITSIZE (mode) > 64)
3174 return CONSTANT_P (x);
3181 /* Try machine-dependent ways of modifying an illegitimate address
3182 to be legitimate. If we find one, return the new, valid address.
3183 This is used from only one place: `memory_address' in explow.c.
3185 OLDX is the address as it was before break_out_memory_refs was
3186 called. In some cases it is useful to look at this to decide what
3189 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3191 It is always safe for this function to do nothing. It exists to
3192 recognize opportunities to optimize the output.
3194 On RS/6000, first check for the sum of a register with a constant
3195 integer that is out of range. If so, generate code to add the
3196 constant with the low-order 16 bits masked to the register and force
3197 this result into another register (this can be done with `cau').
3198 Then generate an address of REG+(CONST&0xffff), allowing for the
3199 possibility of bit 16 being a one.
3201 Then check for the sum of a register and something not constant, try to
3202 load the other things into a register and return the sum. */
3205 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3206 enum machine_mode mode)
3208 if (GET_CODE (x) == SYMBOL_REF)
3210 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3212 return rs6000_legitimize_tls_address (x, model);
3215 if (GET_CODE (x) == PLUS
3216 && GET_CODE (XEXP (x, 0)) == REG
3217 && GET_CODE (XEXP (x, 1)) == CONST_INT
3218 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
3220 HOST_WIDE_INT high_int, low_int;
3222 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3223 high_int = INTVAL (XEXP (x, 1)) - low_int;
3224 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3225 GEN_INT (high_int)), 0);
3226 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3228 else if (GET_CODE (x) == PLUS
3229 && GET_CODE (XEXP (x, 0)) == REG
3230 && GET_CODE (XEXP (x, 1)) != CONST_INT
3231 && GET_MODE_NUNITS (mode) == 1
3232 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3234 || (mode != DFmode && mode != TFmode))
3235 && (TARGET_POWERPC64 || mode != DImode)
3238 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3239 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3241 else if (ALTIVEC_VECTOR_MODE (mode))
3245 /* Make sure both operands are registers. */
3246 if (GET_CODE (x) == PLUS)
3247 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3248 force_reg (Pmode, XEXP (x, 1)));
3250 reg = force_reg (Pmode, x);
3253 else if (SPE_VECTOR_MODE (mode))
3255 /* We accept [reg + reg] and [reg + OFFSET]. */
3257 if (GET_CODE (x) == PLUS)
3259 rtx op1 = XEXP (x, 0);
3260 rtx op2 = XEXP (x, 1);
3262 op1 = force_reg (Pmode, op1);
3264 if (GET_CODE (op2) != REG
3265 && (GET_CODE (op2) != CONST_INT
3266 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3267 op2 = force_reg (Pmode, op2);
3269 return gen_rtx_PLUS (Pmode, op1, op2);
3272 return force_reg (Pmode, x);
3278 && GET_CODE (x) != CONST_INT
3279 && GET_CODE (x) != CONST_DOUBLE
3281 && GET_MODE_NUNITS (mode) == 1
3282 && (GET_MODE_BITSIZE (mode) <= 32
3283 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3285 rtx reg = gen_reg_rtx (Pmode);
3286 emit_insn (gen_elf_high (reg, x));
3287 return gen_rtx_LO_SUM (Pmode, reg, x);
3289 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3292 && ! MACHO_DYNAMIC_NO_PIC_P
3294 && GET_CODE (x) != CONST_INT
3295 && GET_CODE (x) != CONST_DOUBLE
3297 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3301 rtx reg = gen_reg_rtx (Pmode);
3302 emit_insn (gen_macho_high (reg, x));
3303 return gen_rtx_LO_SUM (Pmode, reg, x);
3306 && constant_pool_expr_p (x)
3307 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3309 return create_TOC_reference (x);
3315 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3316 We need to emit DTP-relative relocations. */
3319 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3324 fputs ("\t.long\t", file);
3327 fputs (DOUBLE_INT_ASM_OP, file);
3332 output_addr_const (file, x);
3333 fputs ("@dtprel+0x8000", file);
3336 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3338 static GTY(()) rtx rs6000_tls_symbol;
3340 rs6000_tls_get_addr (void)
3342 if (!rs6000_tls_symbol)
3343 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3345 return rs6000_tls_symbol;
3348 /* Construct the SYMBOL_REF for TLS GOT references. */
3350 static GTY(()) rtx rs6000_got_symbol;
3352 rs6000_got_sym (void)
3354 if (!rs6000_got_symbol)
3356 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3357 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3358 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3361 return rs6000_got_symbol;
3364 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3365 this (thread-local) address. */
3368 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3372 dest = gen_reg_rtx (Pmode);
3373 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3379 tlsreg = gen_rtx_REG (Pmode, 13);
3380 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3384 tlsreg = gen_rtx_REG (Pmode, 2);
3385 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3389 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3393 tmp = gen_reg_rtx (Pmode);
3396 tlsreg = gen_rtx_REG (Pmode, 13);
3397 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3401 tlsreg = gen_rtx_REG (Pmode, 2);
3402 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3406 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3408 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3413 rtx r3, got, tga, tmp1, tmp2, eqv;
3416 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3420 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3423 rtx gsym = rs6000_got_sym ();
3424 got = gen_reg_rtx (Pmode);
3426 rs6000_emit_move (got, gsym, Pmode);
3430 static int tls_got_labelno = 0;
3431 rtx tempLR, lab, tmp3, mem;
3434 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3435 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3436 tempLR = gen_reg_rtx (Pmode);
3437 tmp1 = gen_reg_rtx (Pmode);
3438 tmp2 = gen_reg_rtx (Pmode);
3439 tmp3 = gen_reg_rtx (Pmode);
3440 mem = gen_rtx_MEM (Pmode, tmp1);
3441 RTX_UNCHANGING_P (mem) = 1;
3443 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3445 emit_move_insn (tmp1, tempLR);
3446 emit_move_insn (tmp2, mem);
3447 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3448 last = emit_move_insn (got, tmp3);
3449 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3451 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3453 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3459 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3461 r3 = gen_rtx_REG (Pmode, 3);
3463 insn = gen_tls_gd_64 (r3, got, addr);
3465 insn = gen_tls_gd_32 (r3, got, addr);
3468 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3469 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3470 insn = emit_call_insn (insn);
3471 CONST_OR_PURE_CALL_P (insn) = 1;
3472 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3473 insn = get_insns ();
3475 emit_libcall_block (insn, dest, r3, addr);
3477 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3479 r3 = gen_rtx_REG (Pmode, 3);
3481 insn = gen_tls_ld_64 (r3, got);
3483 insn = gen_tls_ld_32 (r3, got);
3486 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3487 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3488 insn = emit_call_insn (insn);
3489 CONST_OR_PURE_CALL_P (insn) = 1;
3490 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3491 insn = get_insns ();
3493 tmp1 = gen_reg_rtx (Pmode);
3494 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3496 emit_libcall_block (insn, tmp1, r3, eqv);
3497 if (rs6000_tls_size == 16)
3500 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3502 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3504 else if (rs6000_tls_size == 32)
3506 tmp2 = gen_reg_rtx (Pmode);
3508 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3510 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3513 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3515 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3519 tmp2 = gen_reg_rtx (Pmode);
3521 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3523 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3525 insn = gen_rtx_SET (Pmode, dest,
3526 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3532 /* IE, or 64 bit offset LE. */
3533 tmp2 = gen_reg_rtx (Pmode);
3535 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3537 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3540 insn = gen_tls_tls_64 (dest, tmp2, addr);
3542 insn = gen_tls_tls_32 (dest, tmp2, addr);
3550 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3551 instruction definitions. */
3554 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3556 return RS6000_SYMBOL_REF_TLS_P (x);
3559 /* Return 1 if X contains a thread-local symbol. */
3562 rs6000_tls_referenced_p (rtx x)
3564 if (! TARGET_HAVE_TLS)
3567 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3570 /* Return 1 if *X is a thread-local symbol. This is the same as
3571 rs6000_tls_symbol_ref except for the type of the unused argument. */
3574 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3576 return RS6000_SYMBOL_REF_TLS_P (*x);
3579 /* The convention appears to be to define this wherever it is used.
3580 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3581 is now used here. */
3582 #ifndef REG_MODE_OK_FOR_BASE_P
3583 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3586 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3587 replace the input X, or the original X if no replacement is called for.
3588 The output parameter *WIN is 1 if the calling macro should goto WIN,
3591 For RS/6000, we wish to handle large displacements off a base
3592 register by splitting the addend across an addiu/addis and the mem insn.
3593 This cuts number of extra insns needed from 3 to 1.
3595 On Darwin, we use this to generate code for floating point constants.
3596 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3597 The Darwin code is inside #if TARGET_MACHO because only then is
3598 machopic_function_base_name() defined. */
3600 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3601 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3603 /* We must recognize output that we have already generated ourselves. */
3604 if (GET_CODE (x) == PLUS
3605 && GET_CODE (XEXP (x, 0)) == PLUS
3606 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3607 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3608 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3610 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3611 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3612 opnum, (enum reload_type)type);
3618 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3619 && GET_CODE (x) == LO_SUM
3620 && GET_CODE (XEXP (x, 0)) == PLUS
3621 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3622 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3623 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3624 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3625 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3626 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3627 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3629 /* Result of previous invocation of this function on Darwin
3630 floating point constant. */
3631 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3632 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3633 opnum, (enum reload_type)type);
3638 if (GET_CODE (x) == PLUS
3639 && GET_CODE (XEXP (x, 0)) == REG
3640 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3641 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3642 && GET_CODE (XEXP (x, 1)) == CONST_INT
3643 && !SPE_VECTOR_MODE (mode)
3644 && !ALTIVEC_VECTOR_MODE (mode))
3646 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3647 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3649 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3651 /* Check for 32-bit overflow. */
3652 if (high + low != val)
3658 /* Reload the high part into a base reg; leave the low part
3659 in the mem directly. */
3661 x = gen_rtx_PLUS (GET_MODE (x),
3662 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3666 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3667 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3668 opnum, (enum reload_type)type);
3673 if (GET_CODE (x) == SYMBOL_REF
3674 && DEFAULT_ABI == ABI_DARWIN
3675 && !ALTIVEC_VECTOR_MODE (mode)
3676 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3677 /* Don't do this for TFmode, since the result isn't offsettable. */
3682 rtx offset = gen_rtx_CONST (Pmode,
3683 gen_rtx_MINUS (Pmode, x,
3684 machopic_function_base_sym ()));
3685 x = gen_rtx_LO_SUM (GET_MODE (x),
3686 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3687 gen_rtx_HIGH (Pmode, offset)), offset);
3690 x = gen_rtx_LO_SUM (GET_MODE (x),
3691 gen_rtx_HIGH (Pmode, x), x);
3693 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3694 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3695 opnum, (enum reload_type)type);
3701 && constant_pool_expr_p (x)
3702 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3704 (x) = create_TOC_reference (x);
3712 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3713 that is a valid memory address for an instruction.
3714 The MODE argument is the machine mode for the MEM expression
3715 that wants to use this address.
3717 On the RS/6000, there are four valid address: a SYMBOL_REF that
3718 refers to a constant pool entry of an address (or the sum of it
3719 plus a constant), a short (16-bit signed) constant plus a register,
3720 the sum of two registers, or a register indirect, possibly with an
3721 auto-increment. For DFmode and DImode with a constant plus register,
3722 we must ensure that both words are addressable or PowerPC64 with offset
3725 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3726 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3727 adjacent memory cells are accessed by adding word-sized offsets
3728 during assembly output. */
3730 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3732 if (RS6000_SYMBOL_REF_TLS_P (x))
3734 if (legitimate_indirect_address_p (x, reg_ok_strict))
3736 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3737 && !ALTIVEC_VECTOR_MODE (mode)
3738 && !SPE_VECTOR_MODE (mode)
3740 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3742 if (legitimate_small_data_p (mode, x))
3744 if (legitimate_constant_pool_address_p (x))
3746 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3748 && GET_CODE (x) == PLUS
3749 && GET_CODE (XEXP (x, 0)) == REG
3750 && (XEXP (x, 0) == virtual_stack_vars_rtx
3751 || XEXP (x, 0) == arg_pointer_rtx)
3752 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3754 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3758 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3760 || (mode != DFmode && mode != TFmode))
3761 && (TARGET_POWERPC64 || mode != DImode)
3762 && legitimate_indexed_address_p (x, reg_ok_strict))
3764 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3769 /* Go to LABEL if ADDR (a legitimate address expression)
3770 has an effect that depends on the machine mode it is used for.
3772 On the RS/6000 this is true of all integral offsets (since AltiVec
3773 modes don't allow them) or is a pre-increment or decrement.
3775 ??? Except that due to conceptual problems in offsettable_address_p
3776 we can't really report the problems of integral offsets. So leave
3777 this assuming that the adjustable offset must be valid for the
3778 sub-words of a TFmode operand, which is what we had before. */
3781 rs6000_mode_dependent_address (rtx addr)
3783 switch (GET_CODE (addr))
3786 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3788 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3789 return val + 12 + 0x8000 >= 0x10000;
3798 return TARGET_UPDATE;
3807 /* Return number of consecutive hard regs needed starting at reg REGNO
3808 to hold something of mode MODE.
3809 This is ordinarily the length in words of a value of mode MODE
3810 but can be less for certain modes in special long registers.
3812 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3813 scalar instructions. The upper 32 bits are only available to the
3816 POWER and PowerPC GPRs hold 32 bits worth;
3817 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3820 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3822 if (FP_REGNO_P (regno))
3823 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3825 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3826 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3828 if (ALTIVEC_REGNO_P (regno))
3830 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3832 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3835 /* Change register usage conditional on target flags. */
3837 rs6000_conditional_register_usage (void)
3841 /* Set MQ register fixed (already call_used) if not POWER
3842 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3847 /* 64-bit AIX reserves GPR13 for thread-private data. */
3849 fixed_regs[13] = call_used_regs[13]
3850 = call_really_used_regs[13] = 1;
3852 /* Conditionally disable FPRs. */
3853 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3854 for (i = 32; i < 64; i++)
3855 fixed_regs[i] = call_used_regs[i]
3856 = call_really_used_regs[i] = 1;
3858 if (DEFAULT_ABI == ABI_V4
3859 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3861 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3863 if (DEFAULT_ABI == ABI_V4
3864 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3866 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3867 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3868 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3870 if (DEFAULT_ABI == ABI_DARWIN
3871 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3872 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3873 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3874 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3875 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3877 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3878 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3879 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3882 global_regs[VSCR_REGNO] = 1;
3886 global_regs[SPEFSCR_REGNO] = 1;
3887 fixed_regs[FIXED_SCRATCH]
3888 = call_used_regs[FIXED_SCRATCH]
3889 = call_really_used_regs[FIXED_SCRATCH] = 1;
3892 if (! TARGET_ALTIVEC)
3894 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3895 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3896 call_really_used_regs[VRSAVE_REGNO] = 1;
3899 if (TARGET_ALTIVEC_ABI)
3900 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3901 call_used_regs[i] = call_really_used_regs[i] = 1;
3904 /* Try to output insns to set TARGET equal to the constant C if it can
3905 be done in less than N insns. Do all computations in MODE.
3906 Returns the place where the output has been placed if it can be
3907 done and the insns have been emitted. If it would take more than N
3908 insns, zero is returned and no insns and emitted. */
3911 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3912 rtx source, int n ATTRIBUTE_UNUSED)
3914 rtx result, insn, set;
3915 HOST_WIDE_INT c0, c1;
3917 if (mode == QImode || mode == HImode)
3920 dest = gen_reg_rtx (mode);
3921 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3924 else if (mode == SImode)
3926 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3928 emit_insn (gen_rtx_SET (VOIDmode, result,
3929 GEN_INT (INTVAL (source)
3930 & (~ (HOST_WIDE_INT) 0xffff))));
3931 emit_insn (gen_rtx_SET (VOIDmode, dest,
3932 gen_rtx_IOR (SImode, result,
3933 GEN_INT (INTVAL (source) & 0xffff))));
3936 else if (mode == DImode)
3938 if (GET_CODE (source) == CONST_INT)
3940 c0 = INTVAL (source);
3943 else if (GET_CODE (source) == CONST_DOUBLE)
3945 #if HOST_BITS_PER_WIDE_INT >= 64
3946 c0 = CONST_DOUBLE_LOW (source);
3949 c0 = CONST_DOUBLE_LOW (source);
3950 c1 = CONST_DOUBLE_HIGH (source);
3956 result = rs6000_emit_set_long_const (dest, c0, c1);
3961 insn = get_last_insn ();
3962 set = single_set (insn);
3963 if (! CONSTANT_P (SET_SRC (set)))
3964 set_unique_reg_note (insn, REG_EQUAL, source);
3969 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3970 fall back to a straight forward decomposition. We do this to avoid
3971 exponential run times encountered when looking for longer sequences
3972 with rs6000_emit_set_const. */
3974 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3976 if (!TARGET_POWERPC64)
3978 rtx operand1, operand2;
3980 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3982 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3984 emit_move_insn (operand1, GEN_INT (c1));
3985 emit_move_insn (operand2, GEN_INT (c2));
3989 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3992 ud2 = (c1 & 0xffff0000) >> 16;
3993 #if HOST_BITS_PER_WIDE_INT >= 64
3997 ud4 = (c2 & 0xffff0000) >> 16;
3999 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4000 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4003 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4005 emit_move_insn (dest, GEN_INT (ud1));
4008 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4009 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4012 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4015 emit_move_insn (dest, GEN_INT (ud2 << 16));
4017 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4019 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4020 || (ud4 == 0 && ! (ud3 & 0x8000)))
4023 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4026 emit_move_insn (dest, GEN_INT (ud3 << 16));
4029 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
4030 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
4032 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4037 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4040 emit_move_insn (dest, GEN_INT (ud4 << 16));
4043 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
4045 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
4047 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
4048 GEN_INT (ud2 << 16)));
4050 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4056 /* Helper for the following. Get rid of [r+r] memory refs
4057 in cases where it won't work (TImode, TFmode). */
4060 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4062 if (GET_CODE (operands[0]) == MEM
4063 && GET_CODE (XEXP (operands[0], 0)) != REG
4064 && ! reload_in_progress)
4066 = replace_equiv_address (operands[0],
4067 copy_addr_to_reg (XEXP (operands[0], 0)));
4069 if (GET_CODE (operands[1]) == MEM
4070 && GET_CODE (XEXP (operands[1], 0)) != REG
4071 && ! reload_in_progress)
4073 = replace_equiv_address (operands[1],
4074 copy_addr_to_reg (XEXP (operands[1], 0)));
4077 /* Emit a move from SOURCE to DEST in mode MODE. */
4079 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4083 operands[1] = source;
4085 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4086 if (GET_CODE (operands[1]) == CONST_DOUBLE
4087 && ! FLOAT_MODE_P (mode)
4088 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4090 /* FIXME. This should never happen. */
4091 /* Since it seems that it does, do the safe thing and convert
4093 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4095 if (GET_CODE (operands[1]) == CONST_DOUBLE
4096 && ! FLOAT_MODE_P (mode)
4097 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
4098 && CONST_DOUBLE_LOW (operands[1]) >= 0)
4099 || (CONST_DOUBLE_HIGH (operands[1]) == -1
4100 && CONST_DOUBLE_LOW (operands[1]) < 0)))
4103 /* Check if GCC is setting up a block move that will end up using FP
4104 registers as temporaries. We must make sure this is acceptable. */
4105 if (GET_CODE (operands[0]) == MEM
4106 && GET_CODE (operands[1]) == MEM
4108 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4109 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4110 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4111 ? 32 : MEM_ALIGN (operands[0])))
4112 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4114 : MEM_ALIGN (operands[1]))))
4115 && ! MEM_VOLATILE_P (operands [0])
4116 && ! MEM_VOLATILE_P (operands [1]))
4118 emit_move_insn (adjust_address (operands[0], SImode, 0),
4119 adjust_address (operands[1], SImode, 0));
4120 emit_move_insn (adjust_address (operands[0], SImode, 4),
4121 adjust_address (operands[1], SImode, 4));
4125 if (!no_new_pseudos)
4127 if (GET_CODE (operands[1]) == MEM && optimize > 0
4128 && (mode == QImode || mode == HImode || mode == SImode)
4129 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
4131 rtx reg = gen_reg_rtx (word_mode);
4133 emit_insn (gen_rtx_SET (word_mode, reg,
4134 gen_rtx_ZERO_EXTEND (word_mode,
4136 operands[1] = gen_lowpart (mode, reg);
4138 if (GET_CODE (operands[0]) != REG)
4139 operands[1] = force_reg (mode, operands[1]);
4142 if (mode == SFmode && ! TARGET_POWERPC
4143 && TARGET_HARD_FLOAT && TARGET_FPRS
4144 && GET_CODE (operands[0]) == MEM)
4148 if (reload_in_progress || reload_completed)
4149 regnum = true_regnum (operands[1]);
4150 else if (GET_CODE (operands[1]) == REG)
4151 regnum = REGNO (operands[1]);
4155 /* If operands[1] is a register, on POWER it may have
4156 double-precision data in it, so truncate it to single
4158 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4161 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4162 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4163 operands[1] = newreg;
4167 /* Recognize the case where operand[1] is a reference to thread-local
4168 data and load its address to a register. */
4169 if (GET_CODE (operands[1]) == SYMBOL_REF)
4171 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4173 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
4176 /* Handle the case where reload calls us with an invalid address. */
4177 if (reload_in_progress && mode == Pmode
4178 && (! general_operand (operands[1], mode)
4179 || ! nonimmediate_operand (operands[0], mode)))
4182 /* 128-bit constant floating-point values on Darwin should really be
4183 loaded as two parts. */
4184 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
4185 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4186 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4188 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4189 know how to get a DFmode SUBREG of a TFmode. */
4190 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4191 simplify_gen_subreg (DImode, operands[1], mode, 0),
4193 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4194 GET_MODE_SIZE (DImode)),
4195 simplify_gen_subreg (DImode, operands[1], mode,
4196 GET_MODE_SIZE (DImode)),
4201 /* FIXME: In the long term, this switch statement should go away
4202 and be replaced by a sequence of tests based on things like
4208 if (CONSTANT_P (operands[1])
4209 && GET_CODE (operands[1]) != CONST_INT)
4210 operands[1] = force_const_mem (mode, operands[1]);
4214 rs6000_eliminate_indexed_memrefs (operands);
4219 if (CONSTANT_P (operands[1])
4220 && ! easy_fp_constant (operands[1], mode))
4221 operands[1] = force_const_mem (mode, operands[1]);
4232 if (CONSTANT_P (operands[1])
4233 && !easy_vector_constant (operands[1], mode))
4234 operands[1] = force_const_mem (mode, operands[1]);
4239 /* Use default pattern for address of ELF small data */
4242 && DEFAULT_ABI == ABI_V4
4243 && (GET_CODE (operands[1]) == SYMBOL_REF
4244 || GET_CODE (operands[1]) == CONST)
4245 && small_data_operand (operands[1], mode))
4247 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4251 if (DEFAULT_ABI == ABI_V4
4252 && mode == Pmode && mode == SImode
4253 && flag_pic == 1 && got_operand (operands[1], mode))
4255 emit_insn (gen_movsi_got (operands[0], operands[1]));
4259 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4263 && CONSTANT_P (operands[1])
4264 && GET_CODE (operands[1]) != HIGH
4265 && GET_CODE (operands[1]) != CONST_INT)
4267 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4269 /* If this is a function address on -mcall-aixdesc,
4270 convert it to the address of the descriptor. */
4271 if (DEFAULT_ABI == ABI_AIX
4272 && GET_CODE (operands[1]) == SYMBOL_REF
4273 && XSTR (operands[1], 0)[0] == '.')
4275 const char *name = XSTR (operands[1], 0);
4277 while (*name == '.')
4279 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4280 CONSTANT_POOL_ADDRESS_P (new_ref)
4281 = CONSTANT_POOL_ADDRESS_P (operands[1]);
4282 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4283 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4284 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
4285 operands[1] = new_ref;
4288 if (DEFAULT_ABI == ABI_DARWIN)
4291 if (MACHO_DYNAMIC_NO_PIC_P)
4293 /* Take care of any required data indirection. */
4294 operands[1] = rs6000_machopic_legitimize_pic_address (
4295 operands[1], mode, operands[0]);
4296 if (operands[0] != operands[1])
4297 emit_insn (gen_rtx_SET (VOIDmode,
4298 operands[0], operands[1]));
4302 emit_insn (gen_macho_high (target, operands[1]));
4303 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4307 emit_insn (gen_elf_high (target, operands[1]));
4308 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4312 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4313 and we have put it in the TOC, we just need to make a TOC-relative
4316 && GET_CODE (operands[1]) == SYMBOL_REF
4317 && constant_pool_expr_p (operands[1])
4318 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4319 get_pool_mode (operands[1])))
4321 operands[1] = create_TOC_reference (operands[1]);
4323 else if (mode == Pmode
4324 && CONSTANT_P (operands[1])
4325 && ((GET_CODE (operands[1]) != CONST_INT
4326 && ! easy_fp_constant (operands[1], mode))
4327 || (GET_CODE (operands[1]) == CONST_INT
4328 && num_insns_constant (operands[1], mode) > 2)
4329 || (GET_CODE (operands[0]) == REG
4330 && FP_REGNO_P (REGNO (operands[0]))))
4331 && GET_CODE (operands[1]) != HIGH
4332 && ! legitimate_constant_pool_address_p (operands[1])
4333 && ! toc_relative_expr_p (operands[1]))
4335 /* Emit a USE operation so that the constant isn't deleted if
4336 expensive optimizations are turned on because nobody
4337 references it. This should only be done for operands that
4338 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4339 This should not be done for operands that contain LABEL_REFs.
4340 For now, we just handle the obvious case. */
4341 if (GET_CODE (operands[1]) != LABEL_REF)
4342 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4345 /* Darwin uses a special PIC legitimizer. */
4346 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4349 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4351 if (operands[0] != operands[1])
4352 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4357 /* If we are to limit the number of things we put in the TOC and
4358 this is a symbol plus a constant we can add in one insn,
4359 just put the symbol in the TOC and add the constant. Don't do
4360 this if reload is in progress. */
4361 if (GET_CODE (operands[1]) == CONST
4362 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4363 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4364 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4365 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4366 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4367 && ! side_effects_p (operands[0]))
4370 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4371 rtx other = XEXP (XEXP (operands[1], 0), 1);
4373 sym = force_reg (mode, sym);
4375 emit_insn (gen_addsi3 (operands[0], sym, other));
4377 emit_insn (gen_adddi3 (operands[0], sym, other));
4381 operands[1] = force_const_mem (mode, operands[1]);
4384 && constant_pool_expr_p (XEXP (operands[1], 0))
4385 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4386 get_pool_constant (XEXP (operands[1], 0)),
4387 get_pool_mode (XEXP (operands[1], 0))))
4390 = gen_rtx_MEM (mode,
4391 create_TOC_reference (XEXP (operands[1], 0)));
4392 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4393 RTX_UNCHANGING_P (operands[1]) = 1;
4399 rs6000_eliminate_indexed_memrefs (operands);
4403 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4405 gen_rtx_SET (VOIDmode,
4406 operands[0], operands[1]),
4407 gen_rtx_CLOBBER (VOIDmode,
4408 gen_rtx_SCRATCH (SImode)))));
4417 /* Above, we may have called force_const_mem which may have returned
4418 an invalid address. If we can, fix this up; otherwise, reload will
4419 have to deal with it. */
4420 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4421 operands[1] = validize_mem (operands[1]);
4424 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4427 /* Nonzero if we can use a floating-point register to pass this arg. */
4428 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4429 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4430 && (CUM)->fregno <= FP_ARG_MAX_REG \
4431 && TARGET_HARD_FLOAT && TARGET_FPRS)
4433 /* Nonzero if we can use an AltiVec register to pass this arg. */
4434 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4435 (ALTIVEC_VECTOR_MODE (MODE) \
4436 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4437 && TARGET_ALTIVEC_ABI \
4440 /* Return a nonzero value to say to return the function value in
4441 memory, just as large structures are always returned. TYPE will be
4442 the data type of the value, and FNTYPE will be the type of the
4443 function doing the returning, or @code{NULL} for libcalls.
4445 The AIX ABI for the RS/6000 specifies that all structures are
4446 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4447 specifies that structures <= 8 bytes are returned in r3/r4, but a
4448 draft put them in memory, and GCC used to implement the draft
4449 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4450 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4451 compatibility can change DRAFT_V4_STRUCT_RET to override the
4452 default, and -m switches get the final word. See
4453 rs6000_override_options for more details.
4455 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4456 long double support is enabled. These values are returned in memory.
4458 int_size_in_bytes returns -1 for variable size objects, which go in
4459 memory always. The cast to unsigned makes -1 > 8. */
4462 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4464 if (AGGREGATE_TYPE_P (type)
4465 && (TARGET_AIX_STRUCT_RET
4466 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4468 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4473 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4474 for a call to a function whose data type is FNTYPE.
4475 For a library call, FNTYPE is 0.
4477 For incoming args we set the number of arguments in the prototype large
4478 so we never return a PARALLEL. */
4481 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4482 rtx libname ATTRIBUTE_UNUSED, int incoming,
4483 int libcall, int n_named_args)
4485 static CUMULATIVE_ARGS zero_cumulative;
4487 *cum = zero_cumulative;
4489 cum->fregno = FP_ARG_MIN_REG;
4490 cum->vregno = ALTIVEC_ARG_MIN_REG;
4491 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4492 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4493 ? CALL_LIBCALL : CALL_NORMAL);
4494 cum->sysv_gregno = GP_ARG_MIN_REG;
4495 cum->stdarg = fntype
4496 && (TYPE_ARG_TYPES (fntype) != 0
4497 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4498 != void_type_node));
4500 cum->nargs_prototype = 0;
4501 if (incoming || cum->prototype)
4502 cum->nargs_prototype = n_named_args;
4504 /* Check for a longcall attribute. */
4506 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4507 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4508 cum->call_cookie = CALL_LONG;
4510 if (TARGET_DEBUG_ARG)
4512 fprintf (stderr, "\ninit_cumulative_args:");
4515 tree ret_type = TREE_TYPE (fntype);
4516 fprintf (stderr, " ret code = %s,",
4517 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4520 if (cum->call_cookie & CALL_LONG)
4521 fprintf (stderr, " longcall,");
4523 fprintf (stderr, " proto = %d, nargs = %d\n",
4524 cum->prototype, cum->nargs_prototype);
4529 && TARGET_ALTIVEC_ABI
4530 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4532 error ("Cannot return value in vector register because"
4533 " altivec instructions are disabled, use -maltivec"
4534 " to enable them.");
4538 /* Return true if TYPE must be passed on the stack and not in registers. */
4541 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4543 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4544 return must_pass_in_stack_var_size (mode, type);
4546 return must_pass_in_stack_var_size_or_pad (mode, type);
4549 /* If defined, a C expression which determines whether, and in which
4550 direction, to pad out an argument with extra space. The value
4551 should be of type `enum direction': either `upward' to pad above
4552 the argument, `downward' to pad below, or `none' to inhibit
4555 For the AIX ABI structs are always stored left shifted in their
4559 function_arg_padding (enum machine_mode mode, tree type)
4561 #ifndef AGGREGATE_PADDING_FIXED
4562 #define AGGREGATE_PADDING_FIXED 0
4564 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4565 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4568 if (!AGGREGATE_PADDING_FIXED)
4570 /* GCC used to pass structures of the same size as integer types as
4571 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4572 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4573 passed padded downward, except that -mstrict-align further
4574 muddied the water in that multi-component structures of 2 and 4
4575 bytes in size were passed padded upward.
4577 The following arranges for best compatibility with previous
4578 versions of gcc, but removes the -mstrict-align dependency. */
4579 if (BYTES_BIG_ENDIAN)
4581 HOST_WIDE_INT size = 0;
4583 if (mode == BLKmode)
4585 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4586 size = int_size_in_bytes (type);
4589 size = GET_MODE_SIZE (mode);
4591 if (size == 1 || size == 2 || size == 4)
4597 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4599 if (type != 0 && AGGREGATE_TYPE_P (type))
4603 /* SFmode parameters are padded upwards. */
4607 /* Fall back to the default. */
4608 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4611 /* If defined, a C expression that gives the alignment boundary, in bits,
4612 of an argument with the specified mode and type. If it is not defined,
4613 PARM_BOUNDARY is used for all arguments.
4615 V.4 wants long longs to be double word aligned. */
4618 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4620 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4622 else if (SPE_VECTOR_MODE (mode))
4624 else if (ALTIVEC_VECTOR_MODE (mode))
4627 return PARM_BOUNDARY;
4630 /* Compute the size (in words) of a function argument. */
4632 static unsigned long
4633 rs6000_arg_size (enum machine_mode mode, tree type)
4637 if (mode != BLKmode)
4638 size = GET_MODE_SIZE (mode);
4640 size = int_size_in_bytes (type);
4643 return (size + 3) >> 2;
4645 return (size + 7) >> 3;
4648 /* Update the data in CUM to advance over an argument
4649 of mode MODE and data type TYPE.
4650 (TYPE is null for libcalls where that information may not be available.)
4652 Note that for args passed by reference, function_arg will be called
4653 with MODE and TYPE set to that of the pointer to the arg, not the arg
4657 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4658 tree type, int named)
4660 cum->nargs_prototype--;
4662 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4666 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4669 if (!TARGET_ALTIVEC)
4670 error ("Cannot pass argument in vector register because"
4671 " altivec instructions are disabled, use -maltivec"
4672 " to enable them.");
4674 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4675 even if it is going to be passed in a vector register.
4676 Darwin does the same for variable-argument functions. */
4677 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4678 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4688 /* Vector parameters must be 16-byte aligned. This places
4689 them at 2 mod 4 in terms of words in 32-bit mode, since
4690 the parameter save area starts at offset 24 from the
4691 stack. In 64-bit mode, they just have to start on an
4692 even word, since the parameter save area is 16-byte
4693 aligned. Space for GPRs is reserved even if the argument
4694 will be passed in memory. */
4696 align = (2 - cum->words) & 3;
4698 align = cum->words & 1;
4699 cum->words += align + rs6000_arg_size (mode, type);
4701 if (TARGET_DEBUG_ARG)
4703 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4705 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4706 cum->nargs_prototype, cum->prototype,
4707 GET_MODE_NAME (mode));
4711 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4713 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4715 else if (DEFAULT_ABI == ABI_V4)
4717 if (TARGET_HARD_FLOAT && TARGET_FPRS
4718 && (mode == SFmode || mode == DFmode))
4720 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4725 cum->words += cum->words & 1;
4726 cum->words += rs6000_arg_size (mode, type);
4731 int n_words = rs6000_arg_size (mode, type);
4732 int gregno = cum->sysv_gregno;
4734 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4735 (r7,r8) or (r9,r10). As does any other 2 word item such
4736 as complex int due to a historical mistake. */
4738 gregno += (1 - gregno) & 1;
4740 /* Multi-reg args are not split between registers and stack. */
4741 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4743 /* Long long and SPE vectors are aligned on the stack.
4744 So are other 2 word items such as complex int due to
4745 a historical mistake. */
4747 cum->words += cum->words & 1;
4748 cum->words += n_words;
4751 /* Note: continuing to accumulate gregno past when we've started
4752 spilling to the stack indicates the fact that we've started
4753 spilling to the stack to expand_builtin_saveregs. */
4754 cum->sysv_gregno = gregno + n_words;
4757 if (TARGET_DEBUG_ARG)
4759 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4760 cum->words, cum->fregno);
4761 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4762 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4763 fprintf (stderr, "mode = %4s, named = %d\n",
4764 GET_MODE_NAME (mode), named);
4769 int n_words = rs6000_arg_size (mode, type);
4770 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4772 /* The simple alignment calculation here works because
4773 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4774 If we ever want to handle alignments larger than 8 bytes for
4775 32-bit or 16 bytes for 64-bit, then we'll need to take into
4776 account the offset to the start of the parm save area. */
4777 align &= cum->words;
4778 cum->words += align + n_words;
4780 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4781 && TARGET_HARD_FLOAT && TARGET_FPRS)
4782 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4784 if (TARGET_DEBUG_ARG)
4786 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4787 cum->words, cum->fregno);
4788 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4789 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4790 fprintf (stderr, "named = %d, align = %d\n", named, align);
4795 /* Determine where to put a SIMD argument on the SPE. */
4798 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4803 int gregno = cum->sysv_gregno;
4804 int n_words = rs6000_arg_size (mode, type);
4806 /* SPE vectors are put in odd registers. */
4807 if (n_words == 2 && (gregno & 1) == 0)
4810 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4813 enum machine_mode m = SImode;
4815 r1 = gen_rtx_REG (m, gregno);
4816 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4817 r2 = gen_rtx_REG (m, gregno + 1);
4818 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4819 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4826 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4827 return gen_rtx_REG (mode, cum->sysv_gregno);
4833 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4836 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4840 rtx rvec[GP_ARG_NUM_REG + 1];
4842 if (align_words >= GP_ARG_NUM_REG)
4845 n_units = rs6000_arg_size (mode, type);
4847 /* Optimize the simple case where the arg fits in one gpr, except in
4848 the case of BLKmode due to assign_parms assuming that registers are
4849 BITS_PER_WORD wide. */
4851 || (n_units == 1 && mode != BLKmode))
4852 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4855 if (align_words + n_units > GP_ARG_NUM_REG)
4856 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4857 using a magic NULL_RTX component.
4858 FIXME: This is not strictly correct. Only some of the arg
4859 belongs in memory, not all of it. However, there isn't any way
4860 to do this currently, apart from building rtx descriptions for
4861 the pieces of memory we want stored. Due to bugs in the generic
4862 code we can't use the normal function_arg_partial_nregs scheme
4863 with the PARALLEL arg description we emit here.
4864 In any case, the code to store the whole arg to memory is often
4865 more efficient than code to store pieces, and we know that space
4866 is available in the right place for the whole arg. */
4867 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4872 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4873 rtx off = GEN_INT (i++ * 4);
4874 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4876 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4878 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4881 /* Determine where to put an argument to a function.
4882 Value is zero to push the argument on the stack,
4883 or a hard register in which to store the argument.
4885 MODE is the argument's machine mode.
4886 TYPE is the data type of the argument (as a tree).
4887 This is null for libcalls where that information may
4889 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4890 the preceding args and about the function being called.
4891 NAMED is nonzero if this argument is a named parameter
4892 (otherwise it is an extra parameter matching an ellipsis).
4894 On RS/6000 the first eight words of non-FP are normally in registers
4895 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4896 Under V.4, the first 8 FP args are in registers.
4898 If this is floating-point and no prototype is specified, we use
4899 both an FP and integer register (or possibly FP reg and stack). Library
4900 functions (when CALL_LIBCALL is set) always have the proper types for args,
4901 so we can pass the FP value just in one register. emit_library_function
4902 doesn't support PARALLEL anyway.
4904 Note that for args passed by reference, function_arg will be called
4905 with MODE and TYPE set to that of the pointer to the arg, not the arg
4909 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4910 tree type, int named)
4912 enum rs6000_abi abi = DEFAULT_ABI;
4914 /* Return a marker to indicate whether CR1 needs to set or clear the
4915 bit that V.4 uses to say fp args were passed in registers.
4916 Assume that we don't need the marker for software floating point,
4917 or compiler generated library calls. */
4918 if (mode == VOIDmode)
4921 && cum->nargs_prototype < 0
4922 && (cum->call_cookie & CALL_LIBCALL) == 0
4923 && (cum->prototype || TARGET_NO_PROTOTYPE))
4925 /* For the SPE, we need to crxor CR6 always. */
4927 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4928 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4929 return GEN_INT (cum->call_cookie
4930 | ((cum->fregno == FP_ARG_MIN_REG)
4931 ? CALL_V4_SET_FP_ARGS
4932 : CALL_V4_CLEAR_FP_ARGS));
4935 return GEN_INT (cum->call_cookie);
4938 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4939 if (TARGET_64BIT && ! cum->prototype)
4941 /* Vector parameters get passed in vector register
4942 and also in GPRs or memory, in absence of prototype. */
4945 align_words = (cum->words + 1) & ~1;
4947 if (align_words >= GP_ARG_NUM_REG)
4953 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4955 return gen_rtx_PARALLEL (mode,
4957 gen_rtx_EXPR_LIST (VOIDmode,
4959 gen_rtx_EXPR_LIST (VOIDmode,
4960 gen_rtx_REG (mode, cum->vregno),
4964 return gen_rtx_REG (mode, cum->vregno);
4965 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4967 if (named || abi == ABI_V4)
4971 /* Vector parameters to varargs functions under AIX or Darwin
4972 get passed in memory and possibly also in GPRs. */
4973 int align, align_words, n_words;
4974 enum machine_mode part_mode;
4976 /* Vector parameters must be 16-byte aligned. This places them at
4977 2 mod 4 in terms of words in 32-bit mode, since the parameter
4978 save area starts at offset 24 from the stack. In 64-bit mode,
4979 they just have to start on an even word, since the parameter
4980 save area is 16-byte aligned. */
4982 align = (2 - cum->words) & 3;
4984 align = cum->words & 1;
4985 align_words = cum->words + align;
4987 /* Out of registers? Memory, then. */
4988 if (align_words >= GP_ARG_NUM_REG)
4991 if (TARGET_32BIT && TARGET_POWERPC64)
4992 return rs6000_mixed_function_arg (mode, type, align_words);
4994 /* The vector value goes in GPRs. Only the part of the
4995 value in GPRs is reported here. */
4997 n_words = rs6000_arg_size (mode, type);
4998 if (align_words + n_words > GP_ARG_NUM_REG)
4999 /* Fortunately, there are only two possibilities, the value
5000 is either wholly in GPRs or half in GPRs and half not. */
5003 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5006 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
5007 return rs6000_spe_function_arg (cum, mode, type);
5008 else if (abi == ABI_V4)
5010 if (TARGET_HARD_FLOAT && TARGET_FPRS
5011 && (mode == SFmode || mode == DFmode))
5013 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5014 return gen_rtx_REG (mode, cum->fregno);
5020 int n_words = rs6000_arg_size (mode, type);
5021 int gregno = cum->sysv_gregno;
5023 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5024 (r7,r8) or (r9,r10). As does any other 2 word item such
5025 as complex int due to a historical mistake. */
5027 gregno += (1 - gregno) & 1;
5029 /* Multi-reg args are not split between registers and stack. */
5030 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5033 if (TARGET_32BIT && TARGET_POWERPC64)
5034 return rs6000_mixed_function_arg (mode, type,
5035 gregno - GP_ARG_MIN_REG);
5036 return gen_rtx_REG (mode, gregno);
5041 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5042 int align_words = cum->words + (cum->words & align);
5044 if (USE_FP_FOR_ARG_P (cum, mode, type))
5046 rtx rvec[GP_ARG_NUM_REG + 1];
5050 enum machine_mode fmode = mode;
5051 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5053 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5055 /* Currently, we only ever need one reg here because complex
5056 doubles are split. */
5057 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
5060 /* Long double split over regs and memory. */
5064 /* Do we also need to pass this arg in the parameter save
5067 && (cum->nargs_prototype <= 0
5068 || (DEFAULT_ABI == ABI_AIX
5070 && align_words >= GP_ARG_NUM_REG)));
5072 if (!needs_psave && mode == fmode)
5073 return gen_rtx_REG (fmode, cum->fregno);
5078 /* Describe the part that goes in gprs or the stack.
5079 This piece must come first, before the fprs. */
5080 if (align_words < GP_ARG_NUM_REG)
5082 unsigned long n_words = rs6000_arg_size (mode, type);
5084 if (align_words + n_words > GP_ARG_NUM_REG
5085 || (TARGET_32BIT && TARGET_POWERPC64))
5087 /* If this is partially on the stack, then we only
5088 include the portion actually in registers here. */
5089 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5093 r = gen_rtx_REG (rmode,
5094 GP_ARG_MIN_REG + align_words);
5095 off = GEN_INT (k * GET_MODE_SIZE (rmode));
5096 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5098 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5102 /* The whole arg fits in gprs. */
5103 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5104 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5108 /* It's entirely in memory. */
5109 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5112 /* Describe where this piece goes in the fprs. */
5113 r = gen_rtx_REG (fmode, cum->fregno);
5114 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5116 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5118 else if (align_words < GP_ARG_NUM_REG)
5120 if (TARGET_32BIT && TARGET_POWERPC64)
5121 return rs6000_mixed_function_arg (mode, type, align_words);
5123 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5130 /* For an arg passed partly in registers and partly in memory, this is
5131 the number of registers used. For args passed entirely in registers
5132 or entirely in memory, zero. When an arg is described by a PARALLEL,
5133 perhaps using more than one register type, this function returns the
5134 number of registers used by the first element of the PARALLEL. */
5137 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5138 tree type, int named)
5145 if (DEFAULT_ABI == ABI_V4)
5148 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5149 && cum->nargs_prototype >= 0)
5152 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5153 parm_offset = TARGET_32BIT ? 2 : 0;
5154 align_words = cum->words + ((parm_offset - cum->words) & align);
5156 if (USE_FP_FOR_ARG_P (cum, mode, type)
5157 /* If we are passing this arg in gprs as well, then this function
5158 should return the number of gprs (or memory) partially passed,
5159 *not* the number of fprs. */
5161 && (cum->nargs_prototype <= 0
5162 || (DEFAULT_ABI == ABI_AIX
5164 && align_words >= GP_ARG_NUM_REG))))
5166 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5167 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
5168 else if (cum->nargs_prototype >= 0)
5172 if (align_words < GP_ARG_NUM_REG
5173 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5174 ret = GP_ARG_NUM_REG - align_words;
5176 if (ret != 0 && TARGET_DEBUG_ARG)
5177 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
5182 /* A C expression that indicates when an argument must be passed by
5183 reference. If nonzero for an argument, a copy of that argument is
5184 made in memory and a pointer to the argument is passed instead of
5185 the argument itself. The pointer is passed in whatever way is
5186 appropriate for passing a pointer to that type.
5188 Under V.4, aggregates and long double are passed by reference.
5190 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5191 reference unless the AltiVec vector extension ABI is in force.
5193 As an extension to all ABIs, variable sized types are passed by
5197 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5198 enum machine_mode mode ATTRIBUTE_UNUSED,
5199 tree type, bool named ATTRIBUTE_UNUSED)
5201 if ((DEFAULT_ABI == ABI_V4
5202 && ((type && AGGREGATE_TYPE_P (type))
5204 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5205 || (type && int_size_in_bytes (type) < 0))
5207 if (TARGET_DEBUG_ARG)
5208 fprintf (stderr, "function_arg_pass_by_reference\n");
5216 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5219 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5224 for (i = 0; i < nregs; i++)
5226 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
5227 if (reload_completed)
5229 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5232 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5233 i * GET_MODE_SIZE(reg_mode));
5236 tem = replace_equiv_address (tem, XEXP (tem, 0));
5238 if (tem == NULL_RTX)
5241 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5246 /* Perform any needed actions needed for a function that is receiving a
5247 variable number of arguments.
5251 MODE and TYPE are the mode and type of the current parameter.
5253 PRETEND_SIZE is a variable that should be set to the amount of stack
5254 that must be pushed by the prolog to pretend that our caller pushed
5257 Normally, this macro will push all remaining incoming registers on the
5258 stack and set PRETEND_SIZE to the length of the registers pushed. */
5261 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5262 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
5264 CUMULATIVE_ARGS next_cum;
5265 int reg_size = TARGET_32BIT ? 4 : 8;
5266 rtx save_area = NULL_RTX, mem;
5267 int first_reg_offset, set;
5269 /* Skip the last named argument. */
5271 function_arg_advance (&next_cum, mode, type, 1);
5273 if (DEFAULT_ABI == ABI_V4)
5275 /* Indicate to allocate space on the stack for varargs save area. */
5276 cfun->machine->sysv_varargs_p = 1;
5278 save_area = plus_constant (virtual_stack_vars_rtx,
5279 - RS6000_VARARGS_SIZE);
5281 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5285 first_reg_offset = next_cum.words;
5286 save_area = virtual_incoming_args_rtx;
5287 cfun->machine->sysv_varargs_p = 0;
5289 if (targetm.calls.must_pass_in_stack (mode, type))
5290 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5293 set = get_varargs_alias_set ();
5294 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
5296 mem = gen_rtx_MEM (BLKmode,
5297 plus_constant (save_area,
5298 first_reg_offset * reg_size)),
5299 set_mem_alias_set (mem, set);
5300 set_mem_align (mem, BITS_PER_WORD);
5302 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5303 GP_ARG_NUM_REG - first_reg_offset);
5306 /* Save FP registers if needed. */
5307 if (DEFAULT_ABI == ABI_V4
5308 && TARGET_HARD_FLOAT && TARGET_FPRS
5310 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
5312 int fregno = next_cum.fregno;
5313 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5314 rtx lab = gen_label_rtx ();
5315 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5317 emit_jump_insn (gen_rtx_SET (VOIDmode,
5319 gen_rtx_IF_THEN_ELSE (VOIDmode,
5320 gen_rtx_NE (VOIDmode, cr1,
5322 gen_rtx_LABEL_REF (VOIDmode, lab),
5325 while (fregno <= FP_ARG_V4_MAX_REG)
5327 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5328 set_mem_alias_set (mem, set);
5329 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5338 /* Create the va_list data type. */
5341 rs6000_build_builtin_va_list (void)
5343 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5345 /* For AIX, prefer 'char *' because that's what the system
5346 header files like. */
5347 if (DEFAULT_ABI != ABI_V4)
5348 return build_pointer_type (char_type_node);
5350 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5351 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5353 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5354 unsigned_char_type_node);
5355 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5356 unsigned_char_type_node);
5357 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5359 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5360 short_unsigned_type_node);
5361 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5363 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5366 DECL_FIELD_CONTEXT (f_gpr) = record;
5367 DECL_FIELD_CONTEXT (f_fpr) = record;
5368 DECL_FIELD_CONTEXT (f_res) = record;
5369 DECL_FIELD_CONTEXT (f_ovf) = record;
5370 DECL_FIELD_CONTEXT (f_sav) = record;
5372 TREE_CHAIN (record) = type_decl;
5373 TYPE_NAME (record) = type_decl;
5374 TYPE_FIELDS (record) = f_gpr;
5375 TREE_CHAIN (f_gpr) = f_fpr;
5376 TREE_CHAIN (f_fpr) = f_res;
5377 TREE_CHAIN (f_res) = f_ovf;
5378 TREE_CHAIN (f_ovf) = f_sav;
5380 layout_type (record);
5382 /* The correct type is an array type of one element. */
5383 return build_array_type (record, build_index_type (size_zero_node));
5386 /* Implement va_start. */
5389 rs6000_va_start (tree valist, rtx nextarg)
5391 HOST_WIDE_INT words, n_gpr, n_fpr;
5392 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5393 tree gpr, fpr, ovf, sav, t;
5395 /* Only SVR4 needs something special. */
5396 if (DEFAULT_ABI != ABI_V4)
5398 std_expand_builtin_va_start (valist, nextarg);
5402 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5403 f_fpr = TREE_CHAIN (f_gpr);
5404 f_res = TREE_CHAIN (f_fpr);
5405 f_ovf = TREE_CHAIN (f_res);
5406 f_sav = TREE_CHAIN (f_ovf);
5408 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5409 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5410 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5411 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5412 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5414 /* Count number of gp and fp argument registers used. */
5415 words = current_function_args_info.words;
5416 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5417 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5419 if (TARGET_DEBUG_ARG)
5420 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5421 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5422 words, n_gpr, n_fpr);
5424 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5425 TREE_SIDE_EFFECTS (t) = 1;
5426 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5428 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5429 TREE_SIDE_EFFECTS (t) = 1;
5430 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5432 /* Find the overflow area. */
5433 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5435 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5436 build_int_2 (words * UNITS_PER_WORD, 0));
5437 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5438 TREE_SIDE_EFFECTS (t) = 1;
5439 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5441 /* Find the register save area. */
5442 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5443 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5444 build_int_2 (-RS6000_VARARGS_SIZE, -1));
5445 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5446 TREE_SIDE_EFFECTS (t) = 1;
5447 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5450 /* Implement va_arg. */
5453 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5455 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5456 tree gpr, fpr, ovf, sav, reg, t, u;
5457 int size, rsize, n_reg, sav_ofs, sav_scale;
5458 tree lab_false, lab_over, addr;
5460 tree ptrtype = build_pointer_type (type);
5462 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5464 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5465 return build_fold_indirect_ref (t);
5468 if (DEFAULT_ABI != ABI_V4)
5470 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5472 tree elem_type = TREE_TYPE (type);
5473 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5474 int elem_size = GET_MODE_SIZE (elem_mode);
5476 if (elem_size < UNITS_PER_WORD)
5478 tree real_part, imag_part;
5479 tree post = NULL_TREE;
5481 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5483 /* Copy the value into a temporary, lest the formal temporary
5484 be reused out from under us. */
5485 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5486 append_to_statement_list (post, pre_p);
5488 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5491 return build (COMPLEX_EXPR, type, real_part, imag_part);
5495 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5498 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5499 f_fpr = TREE_CHAIN (f_gpr);
5500 f_res = TREE_CHAIN (f_fpr);
5501 f_ovf = TREE_CHAIN (f_res);
5502 f_sav = TREE_CHAIN (f_ovf);
5504 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5505 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5506 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5507 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5508 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5510 size = int_size_in_bytes (type);
5511 rsize = (size + 3) / 4;
5514 if (TARGET_HARD_FLOAT && TARGET_FPRS
5515 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5517 /* FP args go in FP registers, if present. */
5522 if (TYPE_MODE (type) == DFmode)
5527 /* Otherwise into GP registers. */
5536 /* Pull the value out of the saved registers.... */
5539 addr = create_tmp_var (ptr_type_node, "addr");
5540 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5542 /* AltiVec vectors never go in registers when -mabi=altivec. */
5543 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5547 lab_false = create_artificial_label ();
5548 lab_over = create_artificial_label ();
5550 /* Long long and SPE vectors are aligned in the registers.
5551 As are any other 2 gpr item such as complex int due to a
5552 historical mistake. */
5556 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5557 size_int (n_reg - 1));
5558 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5561 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5562 t = build2 (GE_EXPR, boolean_type_node, u, t);
5563 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5564 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5565 gimplify_and_add (t, pre_p);
5569 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5571 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5572 u = build1 (CONVERT_EXPR, integer_type_node, u);
5573 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5574 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5576 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5577 gimplify_and_add (t, pre_p);
5579 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5580 gimplify_and_add (t, pre_p);
5582 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5583 append_to_statement_list (t, pre_p);
5587 /* Ensure that we don't find any more args in regs.
5588 Alignment has taken care of the n_reg == 2 case. */
5589 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5590 gimplify_and_add (t, pre_p);
5594 /* ... otherwise out of the overflow area. */
5596 /* Care for on-stack alignment if needed. */
5600 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5601 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5603 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5605 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5606 gimplify_and_add (u, pre_p);
5608 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5609 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5610 gimplify_and_add (t, pre_p);
5614 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5615 append_to_statement_list (t, pre_p);
5618 addr = fold_convert (ptrtype, addr);
5619 return build_fold_indirect_ref (addr);
5624 #define def_builtin(MASK, NAME, TYPE, CODE) \
5626 if ((MASK) & target_flags) \
5627 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5631 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5633 static const struct builtin_description bdesc_3arg[] =
5635 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5636 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5637 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5638 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5639 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5640 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5641 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5642 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5643 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5644 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5645 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5646 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5647 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5648 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5649 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5650 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5651 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5652 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5653 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5654 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5655 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5656 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5657 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5660 /* DST operations: void foo (void *, const int, const char). */
5662 static const struct builtin_description bdesc_dst[] =
5664 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5665 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5666 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5667 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5670 /* Simple binary operations: VECc = foo (VECa, VECb). */
5672 static struct builtin_description bdesc_2arg[] =
5674 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5675 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5676 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5677 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5685 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5686 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5687 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5688 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5689 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5690 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5691 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5692 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5693 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5694 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5695 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5696 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5697 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5698 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5699 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5700 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5701 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5702 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5703 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5704 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5705 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5706 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5707 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5708 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5709 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5710 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5711 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5712 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5713 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5714 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5715 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5716 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5717 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5718 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5719 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5720 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5721 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5722 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5723 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5724 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5725 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5726 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5727 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5728 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5729 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5730 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5731 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5732 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5733 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5734 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5735 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5736 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5737 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5738 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5739 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5740 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5741 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5742 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5743 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5744 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5745 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5746 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5747 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5748 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5749 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5750 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5751 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5752 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5753 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5754 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5755 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5756 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5757 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5758 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5759 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5760 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5761 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5762 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5763 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5764 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5765 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5766 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5767 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5768 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5769 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5770 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5771 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5772 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5773 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5774 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5775 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5776 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5777 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5778 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5779 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5780 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5781 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5782 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5783 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5784 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5785 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5786 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5788 /* Place holder, leave as first spe builtin. */
5789 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5790 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5791 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5792 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5793 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5794 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5795 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5796 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5797 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5798 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5799 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5800 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5801 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5802 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5803 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5804 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5805 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5806 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5807 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5808 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5809 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5810 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5811 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5812 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5813 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5814 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5815 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5816 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5817 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5818 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5819 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5820 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5821 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5822 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5823 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5824 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5825 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5826 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5827 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5828 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5829 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5830 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5831 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5832 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5833 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5834 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5835 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5836 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5837 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5838 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5839 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5840 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5841 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5842 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5843 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5844 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5845 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5846 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5847 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5848 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5849 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5850 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5851 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5852 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5853 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5854 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5855 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5856 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5857 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5858 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5859 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5860 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5861 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5862 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5863 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5864 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5865 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5866 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5867 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5868 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5869 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5870 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5871 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5872 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5873 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5874 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5875 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5876 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5877 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5878 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5879 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5880 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5881 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5882 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5883 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5884 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5885 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5886 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5887 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5888 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5889 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5890 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5891 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5892 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5893 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5894 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5895 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5896 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5897 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5899 /* SPE binary operations expecting a 5-bit unsigned literal. */
5900 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5902 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5903 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5904 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5905 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5906 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5907 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5908 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5909 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5910 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5911 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5912 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5913 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5914 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5915 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5916 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5917 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5918 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5919 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5920 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5921 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5922 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5923 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5924 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5925 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5926 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5927 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5929 /* Place-holder. Leave as last binary SPE builtin. */
5930 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5933 /* AltiVec predicates. */
5935 struct builtin_description_predicates
5937 const unsigned int mask;
5938 const enum insn_code icode;
5940 const char *const name;
5941 const enum rs6000_builtins code;
5944 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5946 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5947 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5948 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5949 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5950 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5951 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5952 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5953 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5954 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5955 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5956 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5957 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5958 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5961 /* SPE predicates. */
5962 static struct builtin_description bdesc_spe_predicates[] =
5964 /* Place-holder. Leave as first. */
5965 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5966 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5967 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5968 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5969 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5970 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5971 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5972 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5973 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5974 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5975 /* Place-holder. Leave as last. */
5976 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5979 /* SPE evsel predicates. */
5980 static struct builtin_description bdesc_spe_evsel[] =
5982 /* Place-holder. Leave as first. */
5983 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5984 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5985 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5986 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5987 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5988 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5989 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5990 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5991 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5992 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5993 /* Place-holder. Leave as last. */
5994 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5997 /* ABS* operations. */
5999 static const struct builtin_description bdesc_abs[] =
6001 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6002 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6003 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6004 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6005 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6007 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6010 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6013 static struct builtin_description bdesc_1arg[] =
6015 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6016 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6017 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6018 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6019 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6020 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6021 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6022 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6023 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6024 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6025 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6026 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6027 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6028 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6029 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6030 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6031 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6033 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6034 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6035 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6036 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6037 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6038 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6039 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6040 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6041 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6042 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6043 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6044 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6045 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6046 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6047 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6048 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6049 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6050 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6051 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6052 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6053 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6054 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6055 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6056 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6057 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6058 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6059 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6060 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6061 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6062 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6064 /* Place-holder. Leave as last unary SPE builtin. */
6065 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
6069 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6072 tree arg0 = TREE_VALUE (arglist);
6073 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6074 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6075 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6077 if (icode == CODE_FOR_nothing)
6078 /* Builtin not supported on this processor. */
6081 /* If we got invalid arguments bail out before generating bad rtl. */
6082 if (arg0 == error_mark_node)
6085 if (icode == CODE_FOR_altivec_vspltisb
6086 || icode == CODE_FOR_altivec_vspltish
6087 || icode == CODE_FOR_altivec_vspltisw
6088 || icode == CODE_FOR_spe_evsplatfi
6089 || icode == CODE_FOR_spe_evsplati)
6091 /* Only allow 5-bit *signed* literals. */
6092 if (GET_CODE (op0) != CONST_INT
6093 || INTVAL (op0) > 0x1f
6094 || INTVAL (op0) < -0x1f)
6096 error ("argument 1 must be a 5-bit signed literal");
6102 || GET_MODE (target) != tmode
6103 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6104 target = gen_reg_rtx (tmode);
6106 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6107 op0 = copy_to_mode_reg (mode0, op0);
6109 pat = GEN_FCN (icode) (target, op0);
6118 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6120 rtx pat, scratch1, scratch2;
6121 tree arg0 = TREE_VALUE (arglist);
6122 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6123 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6124 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6126 /* If we have invalid arguments, bail out before generating bad rtl. */
6127 if (arg0 == error_mark_node)
6131 || GET_MODE (target) != tmode
6132 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6133 target = gen_reg_rtx (tmode);
6135 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6136 op0 = copy_to_mode_reg (mode0, op0);
6138 scratch1 = gen_reg_rtx (mode0);
6139 scratch2 = gen_reg_rtx (mode0);
6141 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6150 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6153 tree arg0 = TREE_VALUE (arglist);
6154 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6155 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6156 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6157 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6158 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6159 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6161 if (icode == CODE_FOR_nothing)
6162 /* Builtin not supported on this processor. */
6165 /* If we got invalid arguments bail out before generating bad rtl. */
6166 if (arg0 == error_mark_node || arg1 == error_mark_node)
6169 if (icode == CODE_FOR_altivec_vcfux
6170 || icode == CODE_FOR_altivec_vcfsx
6171 || icode == CODE_FOR_altivec_vctsxs
6172 || icode == CODE_FOR_altivec_vctuxs
6173 || icode == CODE_FOR_altivec_vspltb
6174 || icode == CODE_FOR_altivec_vsplth
6175 || icode == CODE_FOR_altivec_vspltw
6176 || icode == CODE_FOR_spe_evaddiw
6177 || icode == CODE_FOR_spe_evldd
6178 || icode == CODE_FOR_spe_evldh
6179 || icode == CODE_FOR_spe_evldw
6180 || icode == CODE_FOR_spe_evlhhesplat
6181 || icode == CODE_FOR_spe_evlhhossplat
6182 || icode == CODE_FOR_spe_evlhhousplat
6183 || icode == CODE_FOR_spe_evlwhe
6184 || icode == CODE_FOR_spe_evlwhos
6185 || icode == CODE_FOR_spe_evlwhou
6186 || icode == CODE_FOR_spe_evlwhsplat
6187 || icode == CODE_FOR_spe_evlwwsplat
6188 || icode == CODE_FOR_spe_evrlwi
6189 || icode == CODE_FOR_spe_evslwi
6190 || icode == CODE_FOR_spe_evsrwis
6191 || icode == CODE_FOR_spe_evsubifw
6192 || icode == CODE_FOR_spe_evsrwiu)
6194 /* Only allow 5-bit unsigned literals. */
6196 if (TREE_CODE (arg1) != INTEGER_CST
6197 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6199 error ("argument 2 must be a 5-bit unsigned literal");
6205 || GET_MODE (target) != tmode
6206 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6207 target = gen_reg_rtx (tmode);
6209 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6210 op0 = copy_to_mode_reg (mode0, op0);
6211 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6212 op1 = copy_to_mode_reg (mode1, op1);
6214 pat = GEN_FCN (icode) (target, op0, op1);
6223 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6224 tree arglist, rtx target)
6227 tree cr6_form = TREE_VALUE (arglist);
6228 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6229 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6230 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6231 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6232 enum machine_mode tmode = SImode;
6233 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6234 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6237 if (TREE_CODE (cr6_form) != INTEGER_CST)
6239 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6243 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6248 /* If we have invalid arguments, bail out before generating bad rtl. */
6249 if (arg0 == error_mark_node || arg1 == error_mark_node)
6253 || GET_MODE (target) != tmode
6254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6255 target = gen_reg_rtx (tmode);
6257 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6258 op0 = copy_to_mode_reg (mode0, op0);
6259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6260 op1 = copy_to_mode_reg (mode1, op1);
6262 scratch = gen_reg_rtx (mode0);
6264 pat = GEN_FCN (icode) (scratch, op0, op1,
6265 gen_rtx_SYMBOL_REF (Pmode, opcode));
6270 /* The vec_any* and vec_all* predicates use the same opcodes for two
6271 different operations, but the bits in CR6 will be different
6272 depending on what information we want. So we have to play tricks
6273 with CR6 to get the right bits out.
6275 If you think this is disgusting, look at the specs for the
6276 AltiVec predicates. */
6278 switch (cr6_form_int)
6281 emit_insn (gen_cr6_test_for_zero (target));
6284 emit_insn (gen_cr6_test_for_zero_reverse (target));
6287 emit_insn (gen_cr6_test_for_lt (target));
6290 emit_insn (gen_cr6_test_for_lt_reverse (target));
6293 error ("argument 1 of __builtin_altivec_predicate is out of range");
6301 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6304 tree arg0 = TREE_VALUE (arglist);
6305 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6306 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6307 enum machine_mode mode0 = Pmode;
6308 enum machine_mode mode1 = Pmode;
6309 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6310 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6312 if (icode == CODE_FOR_nothing)
6313 /* Builtin not supported on this processor. */
6316 /* If we got invalid arguments bail out before generating bad rtl. */
6317 if (arg0 == error_mark_node || arg1 == error_mark_node)
6321 || GET_MODE (target) != tmode
6322 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6323 target = gen_reg_rtx (tmode);
6325 op1 = copy_to_mode_reg (mode1, op1);
6327 if (op0 == const0_rtx)
6329 addr = gen_rtx_MEM (tmode, op1);
6333 op0 = copy_to_mode_reg (mode0, op0);
6334 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6337 pat = GEN_FCN (icode) (target, addr);
6347 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6349 tree arg0 = TREE_VALUE (arglist);
6350 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6351 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6352 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6353 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6354 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6356 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6357 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6358 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6360 /* Invalid arguments. Bail before doing anything stoopid! */
6361 if (arg0 == error_mark_node
6362 || arg1 == error_mark_node
6363 || arg2 == error_mark_node)
6366 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6367 op0 = copy_to_mode_reg (mode2, op0);
6368 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6369 op1 = copy_to_mode_reg (mode0, op1);
6370 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6371 op2 = copy_to_mode_reg (mode1, op2);
6373 pat = GEN_FCN (icode) (op1, op2, op0);
6380 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6382 tree arg0 = TREE_VALUE (arglist);
6383 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6384 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6385 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6386 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6387 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6389 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6390 enum machine_mode mode1 = Pmode;
6391 enum machine_mode mode2 = Pmode;
6393 /* Invalid arguments. Bail before doing anything stoopid! */
6394 if (arg0 == error_mark_node
6395 || arg1 == error_mark_node
6396 || arg2 == error_mark_node)
6399 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6400 op0 = copy_to_mode_reg (tmode, op0);
6402 op2 = copy_to_mode_reg (mode2, op2);
6404 if (op1 == const0_rtx)
6406 addr = gen_rtx_MEM (tmode, op2);
6410 op1 = copy_to_mode_reg (mode1, op1);
6411 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6414 pat = GEN_FCN (icode) (addr, op0);
6421 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6424 tree arg0 = TREE_VALUE (arglist);
6425 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6426 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6427 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6428 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6429 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6430 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6431 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6432 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6433 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6435 if (icode == CODE_FOR_nothing)
6436 /* Builtin not supported on this processor. */
6439 /* If we got invalid arguments bail out before generating bad rtl. */
6440 if (arg0 == error_mark_node
6441 || arg1 == error_mark_node
6442 || arg2 == error_mark_node)
6445 if (icode == CODE_FOR_altivec_vsldoi_4sf
6446 || icode == CODE_FOR_altivec_vsldoi_4si
6447 || icode == CODE_FOR_altivec_vsldoi_8hi
6448 || icode == CODE_FOR_altivec_vsldoi_16qi)
6450 /* Only allow 4-bit unsigned literals. */
6452 if (TREE_CODE (arg2) != INTEGER_CST
6453 || TREE_INT_CST_LOW (arg2) & ~0xf)
6455 error ("argument 3 must be a 4-bit unsigned literal");
6461 || GET_MODE (target) != tmode
6462 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6463 target = gen_reg_rtx (tmode);
6465 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6466 op0 = copy_to_mode_reg (mode0, op0);
6467 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6468 op1 = copy_to_mode_reg (mode1, op1);
6469 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6470 op2 = copy_to_mode_reg (mode2, op2);
6472 pat = GEN_FCN (icode) (target, op0, op1, op2);
6480 /* Expand the lvx builtins. */
6482 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6484 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6485 tree arglist = TREE_OPERAND (exp, 1);
6486 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6488 enum machine_mode tmode, mode0;
6490 enum insn_code icode;
6494 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6495 icode = CODE_FOR_altivec_lvx_16qi;
6497 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6498 icode = CODE_FOR_altivec_lvx_8hi;
6500 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6501 icode = CODE_FOR_altivec_lvx_4si;
6503 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6504 icode = CODE_FOR_altivec_lvx_4sf;
6513 arg0 = TREE_VALUE (arglist);
6514 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6515 tmode = insn_data[icode].operand[0].mode;
6516 mode0 = insn_data[icode].operand[1].mode;
6519 || GET_MODE (target) != tmode
6520 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6521 target = gen_reg_rtx (tmode);
6523 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6524 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6526 pat = GEN_FCN (icode) (target, op0);
6533 /* Expand the stvx builtins. */
6535 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6538 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6539 tree arglist = TREE_OPERAND (exp, 1);
6540 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6542 enum machine_mode mode0, mode1;
6544 enum insn_code icode;
6548 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6549 icode = CODE_FOR_altivec_stvx_16qi;
6551 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6552 icode = CODE_FOR_altivec_stvx_8hi;
6554 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6555 icode = CODE_FOR_altivec_stvx_4si;
6557 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6558 icode = CODE_FOR_altivec_stvx_4sf;
6565 arg0 = TREE_VALUE (arglist);
6566 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6567 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6568 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6569 mode0 = insn_data[icode].operand[0].mode;
6570 mode1 = insn_data[icode].operand[1].mode;
6572 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6573 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6574 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6575 op1 = copy_to_mode_reg (mode1, op1);
6577 pat = GEN_FCN (icode) (op0, op1);
6585 /* Expand the dst builtins. */
6587 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6590 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6591 tree arglist = TREE_OPERAND (exp, 1);
6592 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6593 tree arg0, arg1, arg2;
6594 enum machine_mode mode0, mode1, mode2;
6595 rtx pat, op0, op1, op2;
6596 struct builtin_description *d;
6601 /* Handle DST variants. */
6602 d = (struct builtin_description *) bdesc_dst;
6603 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6604 if (d->code == fcode)
6606 arg0 = TREE_VALUE (arglist);
6607 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6608 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6609 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6610 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6611 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6612 mode0 = insn_data[d->icode].operand[0].mode;
6613 mode1 = insn_data[d->icode].operand[1].mode;
6614 mode2 = insn_data[d->icode].operand[2].mode;
6616 /* Invalid arguments, bail out before generating bad rtl. */
6617 if (arg0 == error_mark_node
6618 || arg1 == error_mark_node
6619 || arg2 == error_mark_node)
6624 if (TREE_CODE (arg2) != INTEGER_CST
6625 || TREE_INT_CST_LOW (arg2) & ~0x3)
6627 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6631 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6632 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6633 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6634 op1 = copy_to_mode_reg (mode1, op1);
6636 pat = GEN_FCN (d->icode) (op0, op1, op2);
6646 /* Expand the builtin in EXP and store the result in TARGET. Store
6647 true in *EXPANDEDP if we found a builtin to expand. */
6649 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6651 struct builtin_description *d;
6652 struct builtin_description_predicates *dp;
6654 enum insn_code icode;
6655 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6656 tree arglist = TREE_OPERAND (exp, 1);
6659 enum machine_mode tmode, mode0;
6660 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6662 target = altivec_expand_ld_builtin (exp, target, expandedp);
6666 target = altivec_expand_st_builtin (exp, target, expandedp);
6670 target = altivec_expand_dst_builtin (exp, target, expandedp);
6678 case ALTIVEC_BUILTIN_STVX:
6679 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6680 case ALTIVEC_BUILTIN_STVEBX:
6681 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6682 case ALTIVEC_BUILTIN_STVEHX:
6683 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6684 case ALTIVEC_BUILTIN_STVEWX:
6685 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6686 case ALTIVEC_BUILTIN_STVXL:
6687 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6689 case ALTIVEC_BUILTIN_MFVSCR:
6690 icode = CODE_FOR_altivec_mfvscr;
6691 tmode = insn_data[icode].operand[0].mode;
6694 || GET_MODE (target) != tmode
6695 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6696 target = gen_reg_rtx (tmode);
6698 pat = GEN_FCN (icode) (target);
6704 case ALTIVEC_BUILTIN_MTVSCR:
6705 icode = CODE_FOR_altivec_mtvscr;
6706 arg0 = TREE_VALUE (arglist);
6707 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6708 mode0 = insn_data[icode].operand[0].mode;
6710 /* If we got invalid arguments bail out before generating bad rtl. */
6711 if (arg0 == error_mark_node)
6714 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6715 op0 = copy_to_mode_reg (mode0, op0);
6717 pat = GEN_FCN (icode) (op0);
6722 case ALTIVEC_BUILTIN_DSSALL:
6723 emit_insn (gen_altivec_dssall ());
6726 case ALTIVEC_BUILTIN_DSS:
6727 icode = CODE_FOR_altivec_dss;
6728 arg0 = TREE_VALUE (arglist);
6730 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6731 mode0 = insn_data[icode].operand[0].mode;
6733 /* If we got invalid arguments bail out before generating bad rtl. */
6734 if (arg0 == error_mark_node)
6737 if (TREE_CODE (arg0) != INTEGER_CST
6738 || TREE_INT_CST_LOW (arg0) & ~0x3)
6740 error ("argument to dss must be a 2-bit unsigned literal");
6744 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6745 op0 = copy_to_mode_reg (mode0, op0);
6747 emit_insn (gen_altivec_dss (op0));
6750 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6751 arg0 = TREE_VALUE (arglist);
6752 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6753 arg0 = TREE_OPERAND (arg0, 0);
6754 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6755 TREE_STRING_POINTER (arg0));
6760 /* Expand abs* operations. */
6761 d = (struct builtin_description *) bdesc_abs;
6762 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6763 if (d->code == fcode)
6764 return altivec_expand_abs_builtin (d->icode, arglist, target);
6766 /* Expand the AltiVec predicates. */
6767 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6768 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6769 if (dp->code == fcode)
6770 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6772 /* LV* are funky. We initialized them differently. */
6775 case ALTIVEC_BUILTIN_LVSL:
6776 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6778 case ALTIVEC_BUILTIN_LVSR:
6779 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6781 case ALTIVEC_BUILTIN_LVEBX:
6782 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6784 case ALTIVEC_BUILTIN_LVEHX:
6785 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6787 case ALTIVEC_BUILTIN_LVEWX:
6788 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6790 case ALTIVEC_BUILTIN_LVXL:
6791 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6793 case ALTIVEC_BUILTIN_LVX:
6794 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6805 /* Binops that need to be initialized manually, but can be expanded
6806 automagically by rs6000_expand_binop_builtin. */
6807 static struct builtin_description bdesc_2arg_spe[] =
6809 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6810 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6811 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6812 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6813 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6814 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6815 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6816 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6817 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6818 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6819 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6820 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6821 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6822 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6823 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6824 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6825 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6826 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6827 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6828 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6829 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6830 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6833 /* Expand the builtin in EXP and store the result in TARGET. Store
6834 true in *EXPANDEDP if we found a builtin to expand.
6836 This expands the SPE builtins that are not simple unary and binary
6839 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6841 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6842 tree arglist = TREE_OPERAND (exp, 1);
6844 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6845 enum insn_code icode;
6846 enum machine_mode tmode, mode0;
6848 struct builtin_description *d;
6853 /* Syntax check for a 5-bit unsigned immediate. */
6856 case SPE_BUILTIN_EVSTDD:
6857 case SPE_BUILTIN_EVSTDH:
6858 case SPE_BUILTIN_EVSTDW:
6859 case SPE_BUILTIN_EVSTWHE:
6860 case SPE_BUILTIN_EVSTWHO:
6861 case SPE_BUILTIN_EVSTWWE:
6862 case SPE_BUILTIN_EVSTWWO:
6863 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6864 if (TREE_CODE (arg1) != INTEGER_CST
6865 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6867 error ("argument 2 must be a 5-bit unsigned literal");
6875 /* The evsplat*i instructions are not quite generic. */
6878 case SPE_BUILTIN_EVSPLATFI:
6879 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6881 case SPE_BUILTIN_EVSPLATI:
6882 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6888 d = (struct builtin_description *) bdesc_2arg_spe;
6889 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6890 if (d->code == fcode)
6891 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6893 d = (struct builtin_description *) bdesc_spe_predicates;
6894 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6895 if (d->code == fcode)
6896 return spe_expand_predicate_builtin (d->icode, arglist, target);
6898 d = (struct builtin_description *) bdesc_spe_evsel;
6899 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6900 if (d->code == fcode)
6901 return spe_expand_evsel_builtin (d->icode, arglist, target);
6905 case SPE_BUILTIN_EVSTDDX:
6906 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6907 case SPE_BUILTIN_EVSTDHX:
6908 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6909 case SPE_BUILTIN_EVSTDWX:
6910 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6911 case SPE_BUILTIN_EVSTWHEX:
6912 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6913 case SPE_BUILTIN_EVSTWHOX:
6914 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6915 case SPE_BUILTIN_EVSTWWEX:
6916 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6917 case SPE_BUILTIN_EVSTWWOX:
6918 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6919 case SPE_BUILTIN_EVSTDD:
6920 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6921 case SPE_BUILTIN_EVSTDH:
6922 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6923 case SPE_BUILTIN_EVSTDW:
6924 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6925 case SPE_BUILTIN_EVSTWHE:
6926 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6927 case SPE_BUILTIN_EVSTWHO:
6928 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6929 case SPE_BUILTIN_EVSTWWE:
6930 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6931 case SPE_BUILTIN_EVSTWWO:
6932 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6933 case SPE_BUILTIN_MFSPEFSCR:
6934 icode = CODE_FOR_spe_mfspefscr;
6935 tmode = insn_data[icode].operand[0].mode;
6938 || GET_MODE (target) != tmode
6939 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6940 target = gen_reg_rtx (tmode);
6942 pat = GEN_FCN (icode) (target);
6947 case SPE_BUILTIN_MTSPEFSCR:
6948 icode = CODE_FOR_spe_mtspefscr;
6949 arg0 = TREE_VALUE (arglist);
6950 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6951 mode0 = insn_data[icode].operand[0].mode;
6953 if (arg0 == error_mark_node)
6956 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6957 op0 = copy_to_mode_reg (mode0, op0);
6959 pat = GEN_FCN (icode) (op0);
6972 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6974 rtx pat, scratch, tmp;
6975 tree form = TREE_VALUE (arglist);
6976 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6977 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6978 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6979 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6980 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6981 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6985 if (TREE_CODE (form) != INTEGER_CST)
6987 error ("argument 1 of __builtin_spe_predicate must be a constant");
6991 form_int = TREE_INT_CST_LOW (form);
6996 if (arg0 == error_mark_node || arg1 == error_mark_node)
7000 || GET_MODE (target) != SImode
7001 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7002 target = gen_reg_rtx (SImode);
7004 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7005 op0 = copy_to_mode_reg (mode0, op0);
7006 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7007 op1 = copy_to_mode_reg (mode1, op1);
7009 scratch = gen_reg_rtx (CCmode);
7011 pat = GEN_FCN (icode) (scratch, op0, op1);
7016 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7017 _lower_. We use one compare, but look in different bits of the
7018 CR for each variant.
7020 There are 2 elements in each SPE simd type (upper/lower). The CR
7021 bits are set as follows:
7023 BIT0 | BIT 1 | BIT 2 | BIT 3
7024 U | L | (U | L) | (U & L)
7026 So, for an "all" relationship, BIT 3 would be set.
7027 For an "any" relationship, BIT 2 would be set. Etc.
7029 Following traditional nomenclature, these bits map to:
7031 BIT0 | BIT 1 | BIT 2 | BIT 3
7034 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7039 /* All variant. OV bit. */
7041 /* We need to get to the OV bit, which is the ORDERED bit. We
7042 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7043 that's ugly and will trigger a validate_condition_mode abort.
7044 So let's just use another pattern. */
7045 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7047 /* Any variant. EQ bit. */
7051 /* Upper variant. LT bit. */
7055 /* Lower variant. GT bit. */
7060 error ("argument 1 of __builtin_spe_predicate is out of range");
7064 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7065 emit_move_insn (target, tmp);
7070 /* The evsel builtins look like this:
7072 e = __builtin_spe_evsel_OP (a, b, c, d);
7076 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7077 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7081 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7084 tree arg0 = TREE_VALUE (arglist);
7085 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7086 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7087 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7088 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7089 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7090 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7091 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7092 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7093 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7098 if (arg0 == error_mark_node || arg1 == error_mark_node
7099 || arg2 == error_mark_node || arg3 == error_mark_node)
7103 || GET_MODE (target) != mode0
7104 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7105 target = gen_reg_rtx (mode0);
7107 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7108 op0 = copy_to_mode_reg (mode0, op0);
7109 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7110 op1 = copy_to_mode_reg (mode0, op1);
7111 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7112 op2 = copy_to_mode_reg (mode0, op2);
7113 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7114 op3 = copy_to_mode_reg (mode0, op3);
7116 /* Generate the compare. */
7117 scratch = gen_reg_rtx (CCmode);
7118 pat = GEN_FCN (icode) (scratch, op0, op1);
7123 if (mode0 == V2SImode)
7124 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7126 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7131 /* Expand an expression EXP that calls a built-in function,
7132 with result going to TARGET if that's convenient
7133 (and in mode MODE if that's convenient).
7134 SUBTARGET may be used as the target for computing one of EXP's operands.
7135 IGNORE is nonzero if the value is to be ignored. */
7138 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7139 enum machine_mode mode ATTRIBUTE_UNUSED,
7140 int ignore ATTRIBUTE_UNUSED)
7142 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7143 tree arglist = TREE_OPERAND (exp, 1);
7144 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7145 struct builtin_description *d;
7152 ret = altivec_expand_builtin (exp, target, &success);
7159 ret = spe_expand_builtin (exp, target, &success);
7165 if (TARGET_ALTIVEC || TARGET_SPE)
7167 /* Handle simple unary operations. */
7168 d = (struct builtin_description *) bdesc_1arg;
7169 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7170 if (d->code == fcode)
7171 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7173 /* Handle simple binary operations. */
7174 d = (struct builtin_description *) bdesc_2arg;
7175 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7176 if (d->code == fcode)
7177 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7179 /* Handle simple ternary operations. */
7180 d = (struct builtin_description *) bdesc_3arg;
7181 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7182 if (d->code == fcode)
7183 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7191 build_opaque_vector_type (tree node, int nunits)
7193 node = copy_node (node);
7194 TYPE_MAIN_VARIANT (node) = node;
7195 return build_vector_type (node, nunits);
7199 rs6000_init_builtins (void)
7201 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7202 V2SF_type_node = build_vector_type (float_type_node, 2);
7203 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7204 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7205 V4SF_type_node = build_vector_type (float_type_node, 4);
7206 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7207 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7209 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7210 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7211 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7213 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7214 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7215 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7217 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7218 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7219 'vector unsigned short'. */
7221 bool_char_type_node = copy_node (unsigned_intQI_type_node);
7222 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
7223 bool_short_type_node = copy_node (unsigned_intHI_type_node);
7224 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
7225 bool_int_type_node = copy_node (unsigned_intSI_type_node);
7226 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
7227 pixel_type_node = copy_node (unsigned_intHI_type_node);
7228 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
7230 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7231 get_identifier ("__bool char"),
7232 bool_char_type_node));
7233 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7234 get_identifier ("__bool short"),
7235 bool_short_type_node));
7236 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7237 get_identifier ("__bool int"),
7238 bool_int_type_node));
7239 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7240 get_identifier ("__pixel"),
7243 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7244 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7245 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7246 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7248 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7249 get_identifier ("__vector unsigned char"),
7250 unsigned_V16QI_type_node));
7251 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7252 get_identifier ("__vector signed char"),
7254 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7255 get_identifier ("__vector __bool char"),
7256 bool_V16QI_type_node));
7258 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7259 get_identifier ("__vector unsigned short"),
7260 unsigned_V8HI_type_node));
7261 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7262 get_identifier ("__vector signed short"),
7264 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7265 get_identifier ("__vector __bool short"),
7266 bool_V8HI_type_node));
7268 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7269 get_identifier ("__vector unsigned int"),
7270 unsigned_V4SI_type_node));
7271 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7272 get_identifier ("__vector signed int"),
7274 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7275 get_identifier ("__vector __bool int"),
7276 bool_V4SI_type_node));
7278 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7279 get_identifier ("__vector float"),
7281 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7282 get_identifier ("__vector __pixel"),
7283 pixel_V8HI_type_node));
7286 spe_init_builtins ();
7288 altivec_init_builtins ();
7289 if (TARGET_ALTIVEC || TARGET_SPE)
7290 rs6000_common_init_builtins ();
7293 /* Search through a set of builtins and enable the mask bits.
7294 DESC is an array of builtins.
7295 SIZE is the total number of builtins.
7296 START is the builtin enum at which to start.
7297 END is the builtin enum at which to end. */
7299 enable_mask_for_builtins (struct builtin_description *desc, int size,
7300 enum rs6000_builtins start,
7301 enum rs6000_builtins end)
7305 for (i = 0; i < size; ++i)
7306 if (desc[i].code == start)
7312 for (; i < size; ++i)
7314 /* Flip all the bits on. */
7315 desc[i].mask = target_flags;
7316 if (desc[i].code == end)
7322 spe_init_builtins (void)
7324 tree endlink = void_list_node;
7325 tree puint_type_node = build_pointer_type (unsigned_type_node);
7326 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7327 struct builtin_description *d;
7330 tree v2si_ftype_4_v2si
7331 = build_function_type
7332 (opaque_V2SI_type_node,
7333 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7334 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7335 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7336 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7339 tree v2sf_ftype_4_v2sf
7340 = build_function_type
7341 (opaque_V2SF_type_node,
7342 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7343 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7344 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7345 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7348 tree int_ftype_int_v2si_v2si
7349 = build_function_type
7351 tree_cons (NULL_TREE, integer_type_node,
7352 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7353 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7356 tree int_ftype_int_v2sf_v2sf
7357 = build_function_type
7359 tree_cons (NULL_TREE, integer_type_node,
7360 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7361 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7364 tree void_ftype_v2si_puint_int
7365 = build_function_type (void_type_node,
7366 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7367 tree_cons (NULL_TREE, puint_type_node,
7368 tree_cons (NULL_TREE,
7372 tree void_ftype_v2si_puint_char
7373 = build_function_type (void_type_node,
7374 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7375 tree_cons (NULL_TREE, puint_type_node,
7376 tree_cons (NULL_TREE,
7380 tree void_ftype_v2si_pv2si_int
7381 = build_function_type (void_type_node,
7382 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7383 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7384 tree_cons (NULL_TREE,
7388 tree void_ftype_v2si_pv2si_char
7389 = build_function_type (void_type_node,
7390 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7391 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7392 tree_cons (NULL_TREE,
7397 = build_function_type (void_type_node,
7398 tree_cons (NULL_TREE, integer_type_node, endlink));
7401 = build_function_type (integer_type_node, endlink);
7403 tree v2si_ftype_pv2si_int
7404 = build_function_type (opaque_V2SI_type_node,
7405 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7406 tree_cons (NULL_TREE, integer_type_node,
7409 tree v2si_ftype_puint_int
7410 = build_function_type (opaque_V2SI_type_node,
7411 tree_cons (NULL_TREE, puint_type_node,
7412 tree_cons (NULL_TREE, integer_type_node,
7415 tree v2si_ftype_pushort_int
7416 = build_function_type (opaque_V2SI_type_node,
7417 tree_cons (NULL_TREE, pushort_type_node,
7418 tree_cons (NULL_TREE, integer_type_node,
7421 tree v2si_ftype_signed_char
7422 = build_function_type (opaque_V2SI_type_node,
7423 tree_cons (NULL_TREE, signed_char_type_node,
7426 /* The initialization of the simple binary and unary builtins is
7427 done in rs6000_common_init_builtins, but we have to enable the
7428 mask bits here manually because we have run out of `target_flags'
7429 bits. We really need to redesign this mask business. */
7431 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7432 ARRAY_SIZE (bdesc_2arg),
7435 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7436 ARRAY_SIZE (bdesc_1arg),
7438 SPE_BUILTIN_EVSUBFUSIAAW);
7439 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7440 ARRAY_SIZE (bdesc_spe_predicates),
7441 SPE_BUILTIN_EVCMPEQ,
7442 SPE_BUILTIN_EVFSTSTLT);
7443 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7444 ARRAY_SIZE (bdesc_spe_evsel),
7445 SPE_BUILTIN_EVSEL_CMPGTS,
7446 SPE_BUILTIN_EVSEL_FSTSTEQ);
7448 (*lang_hooks.decls.pushdecl)
7449 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7450 opaque_V2SI_type_node));
7452 /* Initialize irregular SPE builtins. */
7454 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7455 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7456 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7457 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7458 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7459 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7460 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7461 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7462 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7463 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7464 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7465 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7466 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7467 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7468 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7469 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7470 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7471 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7474 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7475 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7476 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7477 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7478 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7479 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7480 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7481 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7482 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7483 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7484 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7485 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7486 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7487 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7488 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7489 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7490 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7491 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7492 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7493 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7494 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7495 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7498 d = (struct builtin_description *) bdesc_spe_predicates;
7499 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7503 switch (insn_data[d->icode].operand[1].mode)
7506 type = int_ftype_int_v2si_v2si;
7509 type = int_ftype_int_v2sf_v2sf;
7515 def_builtin (d->mask, d->name, type, d->code);
7518 /* Evsel predicates. */
7519 d = (struct builtin_description *) bdesc_spe_evsel;
7520 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7524 switch (insn_data[d->icode].operand[1].mode)
7527 type = v2si_ftype_4_v2si;
7530 type = v2sf_ftype_4_v2sf;
7536 def_builtin (d->mask, d->name, type, d->code);
7541 altivec_init_builtins (void)
7543 struct builtin_description *d;
7544 struct builtin_description_predicates *dp;
7546 tree pfloat_type_node = build_pointer_type (float_type_node);
7547 tree pint_type_node = build_pointer_type (integer_type_node);
7548 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7549 tree pchar_type_node = build_pointer_type (char_type_node);
7551 tree pvoid_type_node = build_pointer_type (void_type_node);
7553 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7554 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7555 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7556 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7558 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7560 tree int_ftype_int_v4si_v4si
7561 = build_function_type_list (integer_type_node,
7562 integer_type_node, V4SI_type_node,
7563 V4SI_type_node, NULL_TREE);
7564 tree v4sf_ftype_pcfloat
7565 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7566 tree void_ftype_pfloat_v4sf
7567 = build_function_type_list (void_type_node,
7568 pfloat_type_node, V4SF_type_node, NULL_TREE);
7569 tree v4si_ftype_pcint
7570 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7571 tree void_ftype_pint_v4si
7572 = build_function_type_list (void_type_node,
7573 pint_type_node, V4SI_type_node, NULL_TREE);
7574 tree v8hi_ftype_pcshort
7575 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7576 tree void_ftype_pshort_v8hi
7577 = build_function_type_list (void_type_node,
7578 pshort_type_node, V8HI_type_node, NULL_TREE);
7579 tree v16qi_ftype_pcchar
7580 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7581 tree void_ftype_pchar_v16qi
7582 = build_function_type_list (void_type_node,
7583 pchar_type_node, V16QI_type_node, NULL_TREE);
7584 tree void_ftype_v4si
7585 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7586 tree v8hi_ftype_void
7587 = build_function_type (V8HI_type_node, void_list_node);
7588 tree void_ftype_void
7589 = build_function_type (void_type_node, void_list_node);
7591 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
7593 tree v16qi_ftype_long_pcvoid
7594 = build_function_type_list (V16QI_type_node,
7595 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7596 tree v8hi_ftype_long_pcvoid
7597 = build_function_type_list (V8HI_type_node,
7598 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7599 tree v4si_ftype_long_pcvoid
7600 = build_function_type_list (V4SI_type_node,
7601 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7603 tree void_ftype_v4si_long_pvoid
7604 = build_function_type_list (void_type_node,
7605 V4SI_type_node, long_integer_type_node,
7606 pvoid_type_node, NULL_TREE);
7607 tree void_ftype_v16qi_long_pvoid
7608 = build_function_type_list (void_type_node,
7609 V16QI_type_node, long_integer_type_node,
7610 pvoid_type_node, NULL_TREE);
7611 tree void_ftype_v8hi_long_pvoid
7612 = build_function_type_list (void_type_node,
7613 V8HI_type_node, long_integer_type_node,
7614 pvoid_type_node, NULL_TREE);
7615 tree int_ftype_int_v8hi_v8hi
7616 = build_function_type_list (integer_type_node,
7617 integer_type_node, V8HI_type_node,
7618 V8HI_type_node, NULL_TREE);
7619 tree int_ftype_int_v16qi_v16qi
7620 = build_function_type_list (integer_type_node,
7621 integer_type_node, V16QI_type_node,
7622 V16QI_type_node, NULL_TREE);
7623 tree int_ftype_int_v4sf_v4sf
7624 = build_function_type_list (integer_type_node,
7625 integer_type_node, V4SF_type_node,
7626 V4SF_type_node, NULL_TREE);
7627 tree v4si_ftype_v4si
7628 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7629 tree v8hi_ftype_v8hi
7630 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7631 tree v16qi_ftype_v16qi
7632 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7633 tree v4sf_ftype_v4sf
7634 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7635 tree void_ftype_pcvoid_int_int
7636 = build_function_type_list (void_type_node,
7637 pcvoid_type_node, integer_type_node,
7638 integer_type_node, NULL_TREE);
7639 tree int_ftype_pcchar
7640 = build_function_type_list (integer_type_node,
7641 pcchar_type_node, NULL_TREE);
7643 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7644 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7645 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7646 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7647 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7648 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7649 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7650 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7651 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7652 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7653 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7654 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7655 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7656 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7657 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7658 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7659 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7660 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7661 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7662 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7663 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7664 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7665 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7666 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7667 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7668 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7669 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7670 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7671 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7672 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7673 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7674 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7676 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7678 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7680 /* Add the DST variants. */
7681 d = (struct builtin_description *) bdesc_dst;
7682 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7683 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7685 /* Initialize the predicates. */
7686 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7687 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7689 enum machine_mode mode1;
7692 mode1 = insn_data[dp->icode].operand[1].mode;
7697 type = int_ftype_int_v4si_v4si;
7700 type = int_ftype_int_v8hi_v8hi;
7703 type = int_ftype_int_v16qi_v16qi;
7706 type = int_ftype_int_v4sf_v4sf;
7712 def_builtin (dp->mask, dp->name, type, dp->code);
7715 /* Initialize the abs* operators. */
7716 d = (struct builtin_description *) bdesc_abs;
7717 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7719 enum machine_mode mode0;
7722 mode0 = insn_data[d->icode].operand[0].mode;
7727 type = v4si_ftype_v4si;
7730 type = v8hi_ftype_v8hi;
7733 type = v16qi_ftype_v16qi;
7736 type = v4sf_ftype_v4sf;
7742 def_builtin (d->mask, d->name, type, d->code);
7747 rs6000_common_init_builtins (void)
7749 struct builtin_description *d;
7752 tree v4sf_ftype_v4sf_v4sf_v16qi
7753 = build_function_type_list (V4SF_type_node,
7754 V4SF_type_node, V4SF_type_node,
7755 V16QI_type_node, NULL_TREE);
7756 tree v4si_ftype_v4si_v4si_v16qi
7757 = build_function_type_list (V4SI_type_node,
7758 V4SI_type_node, V4SI_type_node,
7759 V16QI_type_node, NULL_TREE);
7760 tree v8hi_ftype_v8hi_v8hi_v16qi
7761 = build_function_type_list (V8HI_type_node,
7762 V8HI_type_node, V8HI_type_node,
7763 V16QI_type_node, NULL_TREE);
7764 tree v16qi_ftype_v16qi_v16qi_v16qi
7765 = build_function_type_list (V16QI_type_node,
7766 V16QI_type_node, V16QI_type_node,
7767 V16QI_type_node, NULL_TREE);
7769 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7771 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7772 tree v16qi_ftype_int
7773 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7774 tree v8hi_ftype_v16qi
7775 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7776 tree v4sf_ftype_v4sf
7777 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7779 tree v2si_ftype_v2si_v2si
7780 = build_function_type_list (opaque_V2SI_type_node,
7781 opaque_V2SI_type_node,
7782 opaque_V2SI_type_node, NULL_TREE);
7784 tree v2sf_ftype_v2sf_v2sf
7785 = build_function_type_list (opaque_V2SF_type_node,
7786 opaque_V2SF_type_node,
7787 opaque_V2SF_type_node, NULL_TREE);
7789 tree v2si_ftype_int_int
7790 = build_function_type_list (opaque_V2SI_type_node,
7791 integer_type_node, integer_type_node,
7794 tree v2si_ftype_v2si
7795 = build_function_type_list (opaque_V2SI_type_node,
7796 opaque_V2SI_type_node, NULL_TREE);
7798 tree v2sf_ftype_v2sf
7799 = build_function_type_list (opaque_V2SF_type_node,
7800 opaque_V2SF_type_node, NULL_TREE);
7802 tree v2sf_ftype_v2si
7803 = build_function_type_list (opaque_V2SF_type_node,
7804 opaque_V2SI_type_node, NULL_TREE);
7806 tree v2si_ftype_v2sf
7807 = build_function_type_list (opaque_V2SI_type_node,
7808 opaque_V2SF_type_node, NULL_TREE);
7810 tree v2si_ftype_v2si_char
7811 = build_function_type_list (opaque_V2SI_type_node,
7812 opaque_V2SI_type_node,
7813 char_type_node, NULL_TREE);
7815 tree v2si_ftype_int_char
7816 = build_function_type_list (opaque_V2SI_type_node,
7817 integer_type_node, char_type_node, NULL_TREE);
7819 tree v2si_ftype_char
7820 = build_function_type_list (opaque_V2SI_type_node,
7821 char_type_node, NULL_TREE);
7823 tree int_ftype_int_int
7824 = build_function_type_list (integer_type_node,
7825 integer_type_node, integer_type_node,
7828 tree v4si_ftype_v4si_v4si
7829 = build_function_type_list (V4SI_type_node,
7830 V4SI_type_node, V4SI_type_node, NULL_TREE);
7831 tree v4sf_ftype_v4si_int
7832 = build_function_type_list (V4SF_type_node,
7833 V4SI_type_node, integer_type_node, NULL_TREE);
7834 tree v4si_ftype_v4sf_int
7835 = build_function_type_list (V4SI_type_node,
7836 V4SF_type_node, integer_type_node, NULL_TREE);
7837 tree v4si_ftype_v4si_int
7838 = build_function_type_list (V4SI_type_node,
7839 V4SI_type_node, integer_type_node, NULL_TREE);
7840 tree v8hi_ftype_v8hi_int
7841 = build_function_type_list (V8HI_type_node,
7842 V8HI_type_node, integer_type_node, NULL_TREE);
7843 tree v16qi_ftype_v16qi_int
7844 = build_function_type_list (V16QI_type_node,
7845 V16QI_type_node, integer_type_node, NULL_TREE);
7846 tree v16qi_ftype_v16qi_v16qi_int
7847 = build_function_type_list (V16QI_type_node,
7848 V16QI_type_node, V16QI_type_node,
7849 integer_type_node, NULL_TREE);
7850 tree v8hi_ftype_v8hi_v8hi_int
7851 = build_function_type_list (V8HI_type_node,
7852 V8HI_type_node, V8HI_type_node,
7853 integer_type_node, NULL_TREE);
7854 tree v4si_ftype_v4si_v4si_int
7855 = build_function_type_list (V4SI_type_node,
7856 V4SI_type_node, V4SI_type_node,
7857 integer_type_node, NULL_TREE);
7858 tree v4sf_ftype_v4sf_v4sf_int
7859 = build_function_type_list (V4SF_type_node,
7860 V4SF_type_node, V4SF_type_node,
7861 integer_type_node, NULL_TREE);
7862 tree v4sf_ftype_v4sf_v4sf
7863 = build_function_type_list (V4SF_type_node,
7864 V4SF_type_node, V4SF_type_node, NULL_TREE);
7865 tree v4sf_ftype_v4sf_v4sf_v4si
7866 = build_function_type_list (V4SF_type_node,
7867 V4SF_type_node, V4SF_type_node,
7868 V4SI_type_node, NULL_TREE);
7869 tree v4sf_ftype_v4sf_v4sf_v4sf
7870 = build_function_type_list (V4SF_type_node,
7871 V4SF_type_node, V4SF_type_node,
7872 V4SF_type_node, NULL_TREE);
7873 tree v4si_ftype_v4si_v4si_v4si
7874 = build_function_type_list (V4SI_type_node,
7875 V4SI_type_node, V4SI_type_node,
7876 V4SI_type_node, NULL_TREE);
7877 tree v8hi_ftype_v8hi_v8hi
7878 = build_function_type_list (V8HI_type_node,
7879 V8HI_type_node, V8HI_type_node, NULL_TREE);
7880 tree v8hi_ftype_v8hi_v8hi_v8hi
7881 = build_function_type_list (V8HI_type_node,
7882 V8HI_type_node, V8HI_type_node,
7883 V8HI_type_node, NULL_TREE);
7884 tree v4si_ftype_v8hi_v8hi_v4si
7885 = build_function_type_list (V4SI_type_node,
7886 V8HI_type_node, V8HI_type_node,
7887 V4SI_type_node, NULL_TREE);
7888 tree v4si_ftype_v16qi_v16qi_v4si
7889 = build_function_type_list (V4SI_type_node,
7890 V16QI_type_node, V16QI_type_node,
7891 V4SI_type_node, NULL_TREE);
7892 tree v16qi_ftype_v16qi_v16qi
7893 = build_function_type_list (V16QI_type_node,
7894 V16QI_type_node, V16QI_type_node, NULL_TREE);
7895 tree v4si_ftype_v4sf_v4sf
7896 = build_function_type_list (V4SI_type_node,
7897 V4SF_type_node, V4SF_type_node, NULL_TREE);
7898 tree v8hi_ftype_v16qi_v16qi
7899 = build_function_type_list (V8HI_type_node,
7900 V16QI_type_node, V16QI_type_node, NULL_TREE);
7901 tree v4si_ftype_v8hi_v8hi
7902 = build_function_type_list (V4SI_type_node,
7903 V8HI_type_node, V8HI_type_node, NULL_TREE);
7904 tree v8hi_ftype_v4si_v4si
7905 = build_function_type_list (V8HI_type_node,
7906 V4SI_type_node, V4SI_type_node, NULL_TREE);
7907 tree v16qi_ftype_v8hi_v8hi
7908 = build_function_type_list (V16QI_type_node,
7909 V8HI_type_node, V8HI_type_node, NULL_TREE);
7910 tree v4si_ftype_v16qi_v4si
7911 = build_function_type_list (V4SI_type_node,
7912 V16QI_type_node, V4SI_type_node, NULL_TREE);
7913 tree v4si_ftype_v16qi_v16qi
7914 = build_function_type_list (V4SI_type_node,
7915 V16QI_type_node, V16QI_type_node, NULL_TREE);
7916 tree v4si_ftype_v8hi_v4si
7917 = build_function_type_list (V4SI_type_node,
7918 V8HI_type_node, V4SI_type_node, NULL_TREE);
7919 tree v4si_ftype_v8hi
7920 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7921 tree int_ftype_v4si_v4si
7922 = build_function_type_list (integer_type_node,
7923 V4SI_type_node, V4SI_type_node, NULL_TREE);
7924 tree int_ftype_v4sf_v4sf
7925 = build_function_type_list (integer_type_node,
7926 V4SF_type_node, V4SF_type_node, NULL_TREE);
7927 tree int_ftype_v16qi_v16qi
7928 = build_function_type_list (integer_type_node,
7929 V16QI_type_node, V16QI_type_node, NULL_TREE);
7930 tree int_ftype_v8hi_v8hi
7931 = build_function_type_list (integer_type_node,
7932 V8HI_type_node, V8HI_type_node, NULL_TREE);
7934 /* Add the simple ternary operators. */
7935 d = (struct builtin_description *) bdesc_3arg;
7936 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7939 enum machine_mode mode0, mode1, mode2, mode3;
7942 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7945 mode0 = insn_data[d->icode].operand[0].mode;
7946 mode1 = insn_data[d->icode].operand[1].mode;
7947 mode2 = insn_data[d->icode].operand[2].mode;
7948 mode3 = insn_data[d->icode].operand[3].mode;
7950 /* When all four are of the same mode. */
7951 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7956 type = v4si_ftype_v4si_v4si_v4si;
7959 type = v4sf_ftype_v4sf_v4sf_v4sf;
7962 type = v8hi_ftype_v8hi_v8hi_v8hi;
7965 type = v16qi_ftype_v16qi_v16qi_v16qi;
7971 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7976 type = v4si_ftype_v4si_v4si_v16qi;
7979 type = v4sf_ftype_v4sf_v4sf_v16qi;
7982 type = v8hi_ftype_v8hi_v8hi_v16qi;
7985 type = v16qi_ftype_v16qi_v16qi_v16qi;
7991 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7992 && mode3 == V4SImode)
7993 type = v4si_ftype_v16qi_v16qi_v4si;
7994 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7995 && mode3 == V4SImode)
7996 type = v4si_ftype_v8hi_v8hi_v4si;
7997 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7998 && mode3 == V4SImode)
7999 type = v4sf_ftype_v4sf_v4sf_v4si;
8001 /* vchar, vchar, vchar, 4 bit literal. */
8002 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8004 type = v16qi_ftype_v16qi_v16qi_int;
8006 /* vshort, vshort, vshort, 4 bit literal. */
8007 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8009 type = v8hi_ftype_v8hi_v8hi_int;
8011 /* vint, vint, vint, 4 bit literal. */
8012 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8014 type = v4si_ftype_v4si_v4si_int;
8016 /* vfloat, vfloat, vfloat, 4 bit literal. */
8017 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8019 type = v4sf_ftype_v4sf_v4sf_int;
8024 def_builtin (d->mask, d->name, type, d->code);
8027 /* Add the simple binary operators. */
8028 d = (struct builtin_description *) bdesc_2arg;
8029 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8031 enum machine_mode mode0, mode1, mode2;
8034 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8037 mode0 = insn_data[d->icode].operand[0].mode;
8038 mode1 = insn_data[d->icode].operand[1].mode;
8039 mode2 = insn_data[d->icode].operand[2].mode;
8041 /* When all three operands are of the same mode. */
8042 if (mode0 == mode1 && mode1 == mode2)
8047 type = v4sf_ftype_v4sf_v4sf;
8050 type = v4si_ftype_v4si_v4si;
8053 type = v16qi_ftype_v16qi_v16qi;
8056 type = v8hi_ftype_v8hi_v8hi;
8059 type = v2si_ftype_v2si_v2si;
8062 type = v2sf_ftype_v2sf_v2sf;
8065 type = int_ftype_int_int;
8072 /* A few other combos we really don't want to do manually. */
8074 /* vint, vfloat, vfloat. */
8075 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8076 type = v4si_ftype_v4sf_v4sf;
8078 /* vshort, vchar, vchar. */
8079 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8080 type = v8hi_ftype_v16qi_v16qi;
8082 /* vint, vshort, vshort. */
8083 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8084 type = v4si_ftype_v8hi_v8hi;
8086 /* vshort, vint, vint. */
8087 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8088 type = v8hi_ftype_v4si_v4si;
8090 /* vchar, vshort, vshort. */
8091 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8092 type = v16qi_ftype_v8hi_v8hi;
8094 /* vint, vchar, vint. */
8095 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8096 type = v4si_ftype_v16qi_v4si;
8098 /* vint, vchar, vchar. */
8099 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8100 type = v4si_ftype_v16qi_v16qi;
8102 /* vint, vshort, vint. */
8103 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8104 type = v4si_ftype_v8hi_v4si;
8106 /* vint, vint, 5 bit literal. */
8107 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8108 type = v4si_ftype_v4si_int;
8110 /* vshort, vshort, 5 bit literal. */
8111 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8112 type = v8hi_ftype_v8hi_int;
8114 /* vchar, vchar, 5 bit literal. */
8115 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8116 type = v16qi_ftype_v16qi_int;
8118 /* vfloat, vint, 5 bit literal. */
8119 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8120 type = v4sf_ftype_v4si_int;
8122 /* vint, vfloat, 5 bit literal. */
8123 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8124 type = v4si_ftype_v4sf_int;
8126 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8127 type = v2si_ftype_int_int;
8129 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8130 type = v2si_ftype_v2si_char;
8132 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8133 type = v2si_ftype_int_char;
8136 else if (mode0 == SImode)
8141 type = int_ftype_v4si_v4si;
8144 type = int_ftype_v4sf_v4sf;
8147 type = int_ftype_v16qi_v16qi;
8150 type = int_ftype_v8hi_v8hi;
8160 def_builtin (d->mask, d->name, type, d->code);
8163 /* Add the simple unary operators. */
8164 d = (struct builtin_description *) bdesc_1arg;
8165 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8167 enum machine_mode mode0, mode1;
8170 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8173 mode0 = insn_data[d->icode].operand[0].mode;
8174 mode1 = insn_data[d->icode].operand[1].mode;
8176 if (mode0 == V4SImode && mode1 == QImode)
8177 type = v4si_ftype_int;
8178 else if (mode0 == V8HImode && mode1 == QImode)
8179 type = v8hi_ftype_int;
8180 else if (mode0 == V16QImode && mode1 == QImode)
8181 type = v16qi_ftype_int;
8182 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8183 type = v4sf_ftype_v4sf;
8184 else if (mode0 == V8HImode && mode1 == V16QImode)
8185 type = v8hi_ftype_v16qi;
8186 else if (mode0 == V4SImode && mode1 == V8HImode)
8187 type = v4si_ftype_v8hi;
8188 else if (mode0 == V2SImode && mode1 == V2SImode)
8189 type = v2si_ftype_v2si;
8190 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8191 type = v2sf_ftype_v2sf;
8192 else if (mode0 == V2SFmode && mode1 == V2SImode)
8193 type = v2sf_ftype_v2si;
8194 else if (mode0 == V2SImode && mode1 == V2SFmode)
8195 type = v2si_ftype_v2sf;
8196 else if (mode0 == V2SImode && mode1 == QImode)
8197 type = v2si_ftype_char;
8201 def_builtin (d->mask, d->name, type, d->code);
8206 rs6000_init_libfuncs (void)
8208 if (!TARGET_HARD_FLOAT)
8211 if (DEFAULT_ABI != ABI_V4)
8213 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8215 /* AIX library routines for float->int conversion. */
8216 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8217 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8218 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8219 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8222 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8223 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8224 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8225 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8226 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8230 /* 32-bit SVR4 quad floating point routines. */
8232 set_optab_libfunc (add_optab, TFmode, "_q_add");
8233 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8234 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8235 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8236 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8237 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8238 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8240 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8241 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8242 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8243 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8244 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8245 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8247 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8248 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8249 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8250 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8251 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8252 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8253 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8257 /* Expand a block move operation, and return 1 if successful. Return 0
8258 if we should let the compiler generate normal code.
8260 operands[0] is the destination
8261 operands[1] is the source
8262 operands[2] is the length
8263 operands[3] is the alignment */
8265 #define MAX_MOVE_REG 4
8268 expand_block_move (rtx operands[])
8270 rtx orig_dest = operands[0];
8271 rtx orig_src = operands[1];
8272 rtx bytes_rtx = operands[2];
8273 rtx align_rtx = operands[3];
8274 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8279 rtx stores[MAX_MOVE_REG];
8282 /* If this is not a fixed size move, just call memcpy */
8286 /* If this is not a fixed size alignment, abort */
8287 if (GET_CODE (align_rtx) != CONST_INT)
8289 align = INTVAL (align_rtx);
8291 /* Anything to move? */
8292 bytes = INTVAL (bytes_rtx);
8296 /* store_one_arg depends on expand_block_move to handle at least the size of
8297 reg_parm_stack_space. */
8298 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8301 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8304 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8305 rtx (*mov) (rtx, rtx);
8307 enum machine_mode mode = BLKmode;
8311 && bytes > 24 /* move up to 32 bytes at a time */
8319 && ! fixed_regs[12])
8321 move_bytes = (bytes > 32) ? 32 : bytes;
8322 gen_func.movmemsi = gen_movmemsi_8reg;
8324 else if (TARGET_STRING
8325 && bytes > 16 /* move up to 24 bytes at a time */
8331 && ! fixed_regs[10])
8333 move_bytes = (bytes > 24) ? 24 : bytes;
8334 gen_func.movmemsi = gen_movmemsi_6reg;
8336 else if (TARGET_STRING
8337 && bytes > 8 /* move up to 16 bytes at a time */
8343 move_bytes = (bytes > 16) ? 16 : bytes;
8344 gen_func.movmemsi = gen_movmemsi_4reg;
8346 else if (bytes >= 8 && TARGET_POWERPC64
8347 /* 64-bit loads and stores require word-aligned
8349 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
8353 gen_func.mov = gen_movdi;
8355 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8356 { /* move up to 8 bytes at a time */
8357 move_bytes = (bytes > 8) ? 8 : bytes;
8358 gen_func.movmemsi = gen_movmemsi_2reg;
8360 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
8361 { /* move 4 bytes */
8364 gen_func.mov = gen_movsi;
8366 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
8367 { /* move 2 bytes */
8370 gen_func.mov = gen_movhi;
8372 else if (TARGET_STRING && bytes > 1)
8373 { /* move up to 4 bytes at a time */
8374 move_bytes = (bytes > 4) ? 4 : bytes;
8375 gen_func.movmemsi = gen_movmemsi_1reg;
8377 else /* move 1 byte at a time */
8381 gen_func.mov = gen_movqi;
8384 src = adjust_address (orig_src, mode, offset);
8385 dest = adjust_address (orig_dest, mode, offset);
8387 if (mode != BLKmode)
8389 rtx tmp_reg = gen_reg_rtx (mode);
8391 emit_insn ((*gen_func.mov) (tmp_reg, src));
8392 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8395 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8398 for (i = 0; i < num_reg; i++)
8399 emit_insn (stores[i]);
8403 if (mode == BLKmode)
8405 /* Move the address into scratch registers. The movmemsi
8406 patterns require zero offset. */
8407 if (!REG_P (XEXP (src, 0)))
8409 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8410 src = replace_equiv_address (src, src_reg);
8412 set_mem_size (src, GEN_INT (move_bytes));
8414 if (!REG_P (XEXP (dest, 0)))
8416 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8417 dest = replace_equiv_address (dest, dest_reg);
8419 set_mem_size (dest, GEN_INT (move_bytes));
8421 emit_insn ((*gen_func.movmemsi) (dest, src,
8422 GEN_INT (move_bytes & 31),
8431 /* Return 1 if OP is a load multiple operation. It is known to be a
8432 PARALLEL and the first section will be tested. */
8435 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8437 int count = XVECLEN (op, 0);
8438 unsigned int dest_regno;
8442 /* Perform a quick check so we don't blow up below. */
8444 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8445 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8446 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8449 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8450 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8452 for (i = 1; i < count; i++)
8454 rtx elt = XVECEXP (op, 0, i);
8456 if (GET_CODE (elt) != SET
8457 || GET_CODE (SET_DEST (elt)) != REG
8458 || GET_MODE (SET_DEST (elt)) != SImode
8459 || REGNO (SET_DEST (elt)) != dest_regno + i
8460 || GET_CODE (SET_SRC (elt)) != MEM
8461 || GET_MODE (SET_SRC (elt)) != SImode
8462 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8463 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8464 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8465 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8472 /* Similar, but tests for store multiple. Here, the second vector element
8473 is a CLOBBER. It will be tested later. */
8476 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8478 int count = XVECLEN (op, 0) - 1;
8479 unsigned int src_regno;
8483 /* Perform a quick check so we don't blow up below. */
8485 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8486 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8487 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8490 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8491 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8493 for (i = 1; i < count; i++)
8495 rtx elt = XVECEXP (op, 0, i + 1);
8497 if (GET_CODE (elt) != SET
8498 || GET_CODE (SET_SRC (elt)) != REG
8499 || GET_MODE (SET_SRC (elt)) != SImode
8500 || REGNO (SET_SRC (elt)) != src_regno + i
8501 || GET_CODE (SET_DEST (elt)) != MEM
8502 || GET_MODE (SET_DEST (elt)) != SImode
8503 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8504 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8505 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8506 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8513 /* Return a string to perform a load_multiple operation.
8514 operands[0] is the vector.
8515 operands[1] is the source address.
8516 operands[2] is the first destination register. */
8519 rs6000_output_load_multiple (rtx operands[3])
8521 /* We have to handle the case where the pseudo used to contain the address
8522 is assigned to one of the output registers. */
8524 int words = XVECLEN (operands[0], 0);
8527 if (XVECLEN (operands[0], 0) == 1)
8528 return "{l|lwz} %2,0(%1)";
8530 for (i = 0; i < words; i++)
8531 if (refers_to_regno_p (REGNO (operands[2]) + i,
8532 REGNO (operands[2]) + i + 1, operands[1], 0))
8536 xop[0] = GEN_INT (4 * (words-1));
8537 xop[1] = operands[1];
8538 xop[2] = operands[2];
8539 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8544 xop[0] = GEN_INT (4 * (words-1));
8545 xop[1] = operands[1];
8546 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8547 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8552 for (j = 0; j < words; j++)
8555 xop[0] = GEN_INT (j * 4);
8556 xop[1] = operands[1];
8557 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8558 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8560 xop[0] = GEN_INT (i * 4);
8561 xop[1] = operands[1];
8562 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8567 return "{lsi|lswi} %2,%1,%N0";
8570 /* Return 1 for a parallel vrsave operation. */
8573 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8575 int count = XVECLEN (op, 0);
8576 unsigned int dest_regno, src_regno;
8580 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8581 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8582 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8585 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8586 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8588 if (dest_regno != VRSAVE_REGNO
8589 && src_regno != VRSAVE_REGNO)
8592 for (i = 1; i < count; i++)
8594 rtx elt = XVECEXP (op, 0, i);
8596 if (GET_CODE (elt) != CLOBBER
8597 && GET_CODE (elt) != SET)
8604 /* Return 1 for an PARALLEL suitable for mfcr. */
8607 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8609 int count = XVECLEN (op, 0);
8612 /* Perform a quick check so we don't blow up below. */
8614 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8615 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8616 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8619 for (i = 0; i < count; i++)
8621 rtx exp = XVECEXP (op, 0, i);
8626 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8628 if (GET_CODE (src_reg) != REG
8629 || GET_MODE (src_reg) != CCmode
8630 || ! CR_REGNO_P (REGNO (src_reg)))
8633 if (GET_CODE (exp) != SET
8634 || GET_CODE (SET_DEST (exp)) != REG
8635 || GET_MODE (SET_DEST (exp)) != SImode
8636 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8638 unspec = SET_SRC (exp);
8639 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8641 if (GET_CODE (unspec) != UNSPEC
8642 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8643 || XVECLEN (unspec, 0) != 2
8644 || XVECEXP (unspec, 0, 0) != src_reg
8645 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8646 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8652 /* Return 1 for an PARALLEL suitable for mtcrf. */
8655 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8657 int count = XVECLEN (op, 0);
8661 /* Perform a quick check so we don't blow up below. */
8663 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8664 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8665 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8667 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8669 if (GET_CODE (src_reg) != REG
8670 || GET_MODE (src_reg) != SImode
8671 || ! INT_REGNO_P (REGNO (src_reg)))
8674 for (i = 0; i < count; i++)
8676 rtx exp = XVECEXP (op, 0, i);
8680 if (GET_CODE (exp) != SET
8681 || GET_CODE (SET_DEST (exp)) != REG
8682 || GET_MODE (SET_DEST (exp)) != CCmode
8683 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8685 unspec = SET_SRC (exp);
8686 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8688 if (GET_CODE (unspec) != UNSPEC
8689 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8690 || XVECLEN (unspec, 0) != 2
8691 || XVECEXP (unspec, 0, 0) != src_reg
8692 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8693 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8699 /* Return 1 for an PARALLEL suitable for lmw. */
8702 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8704 int count = XVECLEN (op, 0);
8705 unsigned int dest_regno;
8707 unsigned int base_regno;
8708 HOST_WIDE_INT offset;
8711 /* Perform a quick check so we don't blow up below. */
8713 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8714 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8715 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8718 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8719 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8722 || count != 32 - (int) dest_regno)
8725 if (legitimate_indirect_address_p (src_addr, 0))
8728 base_regno = REGNO (src_addr);
8729 if (base_regno == 0)
8732 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, 0))
8734 offset = INTVAL (XEXP (src_addr, 1));
8735 base_regno = REGNO (XEXP (src_addr, 0));
8740 for (i = 0; i < count; i++)
8742 rtx elt = XVECEXP (op, 0, i);
8745 HOST_WIDE_INT newoffset;
8747 if (GET_CODE (elt) != SET
8748 || GET_CODE (SET_DEST (elt)) != REG
8749 || GET_MODE (SET_DEST (elt)) != SImode
8750 || REGNO (SET_DEST (elt)) != dest_regno + i
8751 || GET_CODE (SET_SRC (elt)) != MEM
8752 || GET_MODE (SET_SRC (elt)) != SImode)
8754 newaddr = XEXP (SET_SRC (elt), 0);
8755 if (legitimate_indirect_address_p (newaddr, 0))
8760 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
8762 addr_reg = XEXP (newaddr, 0);
8763 newoffset = INTVAL (XEXP (newaddr, 1));
8767 if (REGNO (addr_reg) != base_regno
8768 || newoffset != offset + 4 * i)
8775 /* Return 1 for an PARALLEL suitable for stmw. */
8778 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8780 int count = XVECLEN (op, 0);
8781 unsigned int src_regno;
8783 unsigned int base_regno;
8784 HOST_WIDE_INT offset;
8787 /* Perform a quick check so we don't blow up below. */
8789 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8790 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8791 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8794 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8795 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8798 || count != 32 - (int) src_regno)
8801 if (legitimate_indirect_address_p (dest_addr, 0))
8804 base_regno = REGNO (dest_addr);
8805 if (base_regno == 0)
8808 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, 0))
8810 offset = INTVAL (XEXP (dest_addr, 1));
8811 base_regno = REGNO (XEXP (dest_addr, 0));
8816 for (i = 0; i < count; i++)
8818 rtx elt = XVECEXP (op, 0, i);
8821 HOST_WIDE_INT newoffset;
8823 if (GET_CODE (elt) != SET
8824 || GET_CODE (SET_SRC (elt)) != REG
8825 || GET_MODE (SET_SRC (elt)) != SImode
8826 || REGNO (SET_SRC (elt)) != src_regno + i
8827 || GET_CODE (SET_DEST (elt)) != MEM
8828 || GET_MODE (SET_DEST (elt)) != SImode)
8830 newaddr = XEXP (SET_DEST (elt), 0);
8831 if (legitimate_indirect_address_p (newaddr, 0))
8836 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
8838 addr_reg = XEXP (newaddr, 0);
8839 newoffset = INTVAL (XEXP (newaddr, 1));
8843 if (REGNO (addr_reg) != base_regno
8844 || newoffset != offset + 4 * i)
8851 /* A validation routine: say whether CODE, a condition code, and MODE
8852 match. The other alternatives either don't make sense or should
8853 never be generated. */
8856 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8858 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8859 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8860 || GET_MODE_CLASS (mode) != MODE_CC)
8863 /* These don't make sense. */
8864 if ((code == GT || code == LT || code == GE || code == LE)
8865 && mode == CCUNSmode)
8868 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8869 && mode != CCUNSmode)
8872 if (mode != CCFPmode
8873 && (code == ORDERED || code == UNORDERED
8874 || code == UNEQ || code == LTGT
8875 || code == UNGT || code == UNLT
8876 || code == UNGE || code == UNLE))
8879 /* These should never be generated except for
8880 flag_finite_math_only. */
8881 if (mode == CCFPmode
8882 && ! flag_finite_math_only
8883 && (code == LE || code == GE
8884 || code == UNEQ || code == LTGT
8885 || code == UNGT || code == UNLT))
8888 /* These are invalid; the information is not there. */
8889 if (mode == CCEQmode
8890 && code != EQ && code != NE)
8894 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8895 We only check the opcode against the mode of the CC value here. */
8898 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8900 enum rtx_code code = GET_CODE (op);
8901 enum machine_mode cc_mode;
8903 if (!COMPARISON_P (op))
8906 cc_mode = GET_MODE (XEXP (op, 0));
8907 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8910 validate_condition_mode (code, cc_mode);
8915 /* Return 1 if OP is a comparison operation that is valid for a branch
8916 insn and which is true if the corresponding bit in the CC register
8920 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8924 if (! branch_comparison_operator (op, mode))
8927 code = GET_CODE (op);
8928 return (code == EQ || code == LT || code == GT
8929 || code == LTU || code == GTU
8930 || code == UNORDERED);
8933 /* Return 1 if OP is a comparison operation that is valid for an scc
8934 insn: it must be a positive comparison. */
8937 scc_comparison_operator (rtx op, enum machine_mode mode)
8939 return branch_positive_comparison_operator (op, mode);
8943 trap_comparison_operator (rtx op, enum machine_mode mode)
8945 if (mode != VOIDmode && mode != GET_MODE (op))
8947 return COMPARISON_P (op);
8951 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8953 enum rtx_code code = GET_CODE (op);
8954 return (code == AND || code == IOR || code == XOR);
8958 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8960 enum rtx_code code = GET_CODE (op);
8961 return (code == IOR || code == XOR);
8965 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8967 enum rtx_code code = GET_CODE (op);
8968 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8971 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8972 mask required to convert the result of a rotate insn into a shift
8973 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8976 includes_lshift_p (rtx shiftop, rtx andop)
8978 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8980 shift_mask <<= INTVAL (shiftop);
8982 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8985 /* Similar, but for right shift. */
8988 includes_rshift_p (rtx shiftop, rtx andop)
8990 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8992 shift_mask >>= INTVAL (shiftop);
8994 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8997 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8998 to perform a left shift. It must have exactly SHIFTOP least
8999 significant 0's, then one or more 1's, then zero or more 0's. */
9002 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9004 if (GET_CODE (andop) == CONST_INT)
9006 HOST_WIDE_INT c, lsb, shift_mask;
9009 if (c == 0 || c == ~0)
9013 shift_mask <<= INTVAL (shiftop);
9015 /* Find the least significant one bit. */
9018 /* It must coincide with the LSB of the shift mask. */
9019 if (-lsb != shift_mask)
9022 /* Invert to look for the next transition (if any). */
9025 /* Remove the low group of ones (originally low group of zeros). */
9028 /* Again find the lsb, and check we have all 1's above. */
9032 else if (GET_CODE (andop) == CONST_DOUBLE
9033 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9035 HOST_WIDE_INT low, high, lsb;
9036 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9038 low = CONST_DOUBLE_LOW (andop);
9039 if (HOST_BITS_PER_WIDE_INT < 64)
9040 high = CONST_DOUBLE_HIGH (andop);
9042 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9043 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9046 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9048 shift_mask_high = ~0;
9049 if (INTVAL (shiftop) > 32)
9050 shift_mask_high <<= INTVAL (shiftop) - 32;
9054 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9061 return high == -lsb;
9064 shift_mask_low = ~0;
9065 shift_mask_low <<= INTVAL (shiftop);
9069 if (-lsb != shift_mask_low)
9072 if (HOST_BITS_PER_WIDE_INT < 64)
9077 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9080 return high == -lsb;
9084 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9090 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9091 to perform a left shift. It must have SHIFTOP or more least
9092 significant 0's, with the remainder of the word 1's. */
9095 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9097 if (GET_CODE (andop) == CONST_INT)
9099 HOST_WIDE_INT c, lsb, shift_mask;
9102 shift_mask <<= INTVAL (shiftop);
9105 /* Find the least significant one bit. */
9108 /* It must be covered by the shift mask.
9109 This test also rejects c == 0. */
9110 if ((lsb & shift_mask) == 0)
9113 /* Check we have all 1's above the transition, and reject all 1's. */
9114 return c == -lsb && lsb != 1;
9116 else if (GET_CODE (andop) == CONST_DOUBLE
9117 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9119 HOST_WIDE_INT low, lsb, shift_mask_low;
9121 low = CONST_DOUBLE_LOW (andop);
9123 if (HOST_BITS_PER_WIDE_INT < 64)
9125 HOST_WIDE_INT high, shift_mask_high;
9127 high = CONST_DOUBLE_HIGH (andop);
9131 shift_mask_high = ~0;
9132 if (INTVAL (shiftop) > 32)
9133 shift_mask_high <<= INTVAL (shiftop) - 32;
9137 if ((lsb & shift_mask_high) == 0)
9140 return high == -lsb;
9146 shift_mask_low = ~0;
9147 shift_mask_low <<= INTVAL (shiftop);
9151 if ((lsb & shift_mask_low) == 0)
9154 return low == -lsb && lsb != 1;
9160 /* Return 1 if operands will generate a valid arguments to rlwimi
9161 instruction for insert with right shift in 64-bit mode. The mask may
9162 not start on the first bit or stop on the last bit because wrap-around
9163 effects of instruction do not correspond to semantics of RTL insn. */
9166 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9168 if (INTVAL (startop) < 64
9169 && INTVAL (startop) > 32
9170 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9171 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9172 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9173 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9174 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9180 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9181 for lfq and stfq insns iff the registers are hard registers. */
9184 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9186 /* We might have been passed a SUBREG. */
9187 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9190 /* We might have been passed non floating point registers. */
9191 if (!FP_REGNO_P (REGNO (reg1))
9192 || !FP_REGNO_P (REGNO (reg2)))
9195 return (REGNO (reg1) == REGNO (reg2) - 1);
9198 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9199 addr1 and addr2 must be in consecutive memory locations
9200 (addr2 == addr1 + 8). */
9203 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9209 /* The mems cannot be volatile. */
9210 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9213 addr1 = XEXP (mem1, 0);
9214 addr2 = XEXP (mem2, 0);
9216 /* Extract an offset (if used) from the first addr. */
9217 if (GET_CODE (addr1) == PLUS)
9219 /* If not a REG, return zero. */
9220 if (GET_CODE (XEXP (addr1, 0)) != REG)
9224 reg1 = REGNO (XEXP (addr1, 0));
9225 /* The offset must be constant! */
9226 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9228 offset1 = INTVAL (XEXP (addr1, 1));
9231 else if (GET_CODE (addr1) != REG)
9235 reg1 = REGNO (addr1);
9236 /* This was a simple (mem (reg)) expression. Offset is 0. */
9240 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9241 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9242 register as addr1. */
9243 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9245 if (GET_CODE (addr2) != PLUS)
9248 if (GET_CODE (XEXP (addr2, 0)) != REG
9249 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9252 if (reg1 != REGNO (XEXP (addr2, 0)))
9255 /* The offset for the second addr must be 8 more than the first addr. */
9256 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9259 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9264 /* Return the register class of a scratch register needed to copy IN into
9265 or out of a register in CLASS in MODE. If it can be done directly,
9266 NO_REGS is returned. */
9269 secondary_reload_class (enum reg_class class,
9270 enum machine_mode mode ATTRIBUTE_UNUSED,
9275 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9277 && MACHOPIC_INDIRECT
9281 /* We cannot copy a symbolic operand directly into anything
9282 other than BASE_REGS for TARGET_ELF. So indicate that a
9283 register from BASE_REGS is needed as an intermediate
9286 On Darwin, pic addresses require a load from memory, which
9287 needs a base register. */
9288 if (class != BASE_REGS
9289 && (GET_CODE (in) == SYMBOL_REF
9290 || GET_CODE (in) == HIGH
9291 || GET_CODE (in) == LABEL_REF
9292 || GET_CODE (in) == CONST))
9296 if (GET_CODE (in) == REG)
9299 if (regno >= FIRST_PSEUDO_REGISTER)
9301 regno = true_regnum (in);
9302 if (regno >= FIRST_PSEUDO_REGISTER)
9306 else if (GET_CODE (in) == SUBREG)
9308 regno = true_regnum (in);
9309 if (regno >= FIRST_PSEUDO_REGISTER)
9315 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9317 if (class == GENERAL_REGS || class == BASE_REGS
9318 || (regno >= 0 && INT_REGNO_P (regno)))
9321 /* Constants, memory, and FP registers can go into FP registers. */
9322 if ((regno == -1 || FP_REGNO_P (regno))
9323 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9326 /* Memory, and AltiVec registers can go into AltiVec registers. */
9327 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9328 && class == ALTIVEC_REGS)
9331 /* We can copy among the CR registers. */
9332 if ((class == CR_REGS || class == CR0_REGS)
9333 && regno >= 0 && CR_REGNO_P (regno))
9336 /* Otherwise, we need GENERAL_REGS. */
9337 return GENERAL_REGS;
9340 /* Given a comparison operation, return the bit number in CCR to test. We
9341 know this is a valid comparison.
9343 SCC_P is 1 if this is for an scc. That means that %D will have been
9344 used instead of %C, so the bits will be in different places.
9346 Return -1 if OP isn't a valid comparison for some reason. */
9349 ccr_bit (rtx op, int scc_p)
9351 enum rtx_code code = GET_CODE (op);
9352 enum machine_mode cc_mode;
9357 if (!COMPARISON_P (op))
9362 if (GET_CODE (reg) != REG
9363 || ! CR_REGNO_P (REGNO (reg)))
9366 cc_mode = GET_MODE (reg);
9367 cc_regnum = REGNO (reg);
9368 base_bit = 4 * (cc_regnum - CR0_REGNO);
9370 validate_condition_mode (code, cc_mode);
9372 /* When generating a sCOND operation, only positive conditions are
9374 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9375 && code != GTU && code != LTU)
9381 return scc_p ? base_bit + 3 : base_bit + 2;
9383 return base_bit + 2;
9384 case GT: case GTU: case UNLE:
9385 return base_bit + 1;
9386 case LT: case LTU: case UNGE:
9388 case ORDERED: case UNORDERED:
9389 return base_bit + 3;
9392 /* If scc, we will have done a cror to put the bit in the
9393 unordered position. So test that bit. For integer, this is ! LT
9394 unless this is an scc insn. */
9395 return scc_p ? base_bit + 3 : base_bit;
9398 return scc_p ? base_bit + 3 : base_bit + 1;
9405 /* Return the GOT register. */
9408 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9410 /* The second flow pass currently (June 1999) can't update
9411 regs_ever_live without disturbing other parts of the compiler, so
9412 update it here to make the prolog/epilogue code happy. */
9413 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9414 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9416 current_function_uses_pic_offset_table = 1;
9418 return pic_offset_table_rtx;
9421 /* Function to init struct machine_function.
9422 This will be called, via a pointer variable,
9423 from push_function_context. */
9425 static struct machine_function *
9426 rs6000_init_machine_status (void)
9428 return ggc_alloc_cleared (sizeof (machine_function));
9431 /* These macros test for integers and extract the low-order bits. */
9433 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9434 && GET_MODE (X) == VOIDmode)
9436 #define INT_LOWPART(X) \
9437 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9443 unsigned long val = INT_LOWPART (op);
9445 /* If the high bit is zero, the value is the first 1 bit we find
9447 if ((val & 0x80000000) == 0)
9449 if ((val & 0xffffffff) == 0)
9453 while (((val <<= 1) & 0x80000000) == 0)
9458 /* If the high bit is set and the low bit is not, or the mask is all
9459 1's, the value is zero. */
9460 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9463 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9466 while (((val >>= 1) & 1) != 0)
9476 unsigned long val = INT_LOWPART (op);
9478 /* If the low bit is zero, the value is the first 1 bit we find from
9482 if ((val & 0xffffffff) == 0)
9486 while (((val >>= 1) & 1) == 0)
9492 /* If the low bit is set and the high bit is not, or the mask is all
9493 1's, the value is 31. */
9494 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9497 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9500 while (((val <<= 1) & 0x80000000) != 0)
9506 /* Locate some local-dynamic symbol still in use by this function
9507 so that we can print its name in some tls_ld pattern. */
9510 rs6000_get_some_local_dynamic_name (void)
9514 if (cfun->machine->some_ld_name)
9515 return cfun->machine->some_ld_name;
9517 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9519 && for_each_rtx (&PATTERN (insn),
9520 rs6000_get_some_local_dynamic_name_1, 0))
9521 return cfun->machine->some_ld_name;
9526 /* Helper function for rs6000_get_some_local_dynamic_name. */
9529 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9533 if (GET_CODE (x) == SYMBOL_REF)
9535 const char *str = XSTR (x, 0);
9536 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9538 cfun->machine->some_ld_name = str;
9546 /* Print an operand. Recognize special options, documented below. */
9549 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9550 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9552 #define SMALL_DATA_RELOC "sda21"
9553 #define SMALL_DATA_REG 0
9557 print_operand (FILE *file, rtx x, int code)
9561 unsigned HOST_WIDE_INT uval;
9566 /* Write out an instruction after the call which may be replaced
9567 with glue code by the loader. This depends on the AIX version. */
9568 asm_fprintf (file, RS6000_CALL_GLUE);
9571 /* %a is output_address. */
9574 /* If X is a constant integer whose low-order 5 bits are zero,
9575 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9576 in the AIX assembler where "sri" with a zero shift count
9577 writes a trash instruction. */
9578 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9585 /* If constant, low-order 16 bits of constant, unsigned.
9586 Otherwise, write normally. */
9588 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9590 print_operand (file, x, 0);
9594 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9595 for 64-bit mask direction. */
9596 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9599 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9603 /* X is a CR register. Print the number of the GT bit of the CR. */
9604 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9605 output_operand_lossage ("invalid %%E value");
9607 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9611 /* Like 'J' but get to the EQ bit. */
9612 if (GET_CODE (x) != REG)
9615 /* Bit 1 is EQ bit. */
9616 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9618 /* If we want bit 31, write a shift count of zero, not 32. */
9619 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9623 /* X is a CR register. Print the number of the EQ bit of the CR */
9624 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9625 output_operand_lossage ("invalid %%E value");
9627 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9631 /* X is a CR register. Print the shift count needed to move it
9632 to the high-order four bits. */
9633 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9634 output_operand_lossage ("invalid %%f value");
9636 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9640 /* Similar, but print the count for the rotate in the opposite
9642 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9643 output_operand_lossage ("invalid %%F value");
9645 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9649 /* X is a constant integer. If it is negative, print "m",
9650 otherwise print "z". This is to make an aze or ame insn. */
9651 if (GET_CODE (x) != CONST_INT)
9652 output_operand_lossage ("invalid %%G value");
9653 else if (INTVAL (x) >= 0)
9660 /* If constant, output low-order five bits. Otherwise, write
9663 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9665 print_operand (file, x, 0);
9669 /* If constant, output low-order six bits. Otherwise, write
9672 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9674 print_operand (file, x, 0);
9678 /* Print `i' if this is a constant, else nothing. */
9684 /* Write the bit number in CCR for jump. */
9687 output_operand_lossage ("invalid %%j code");
9689 fprintf (file, "%d", i);
9693 /* Similar, but add one for shift count in rlinm for scc and pass
9694 scc flag to `ccr_bit'. */
9697 output_operand_lossage ("invalid %%J code");
9699 /* If we want bit 31, write a shift count of zero, not 32. */
9700 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9704 /* X must be a constant. Write the 1's complement of the
9707 output_operand_lossage ("invalid %%k value");
9709 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9713 /* X must be a symbolic constant on ELF. Write an
9714 expression suitable for an 'addi' that adds in the low 16
9716 if (GET_CODE (x) != CONST)
9718 print_operand_address (file, x);
9723 if (GET_CODE (XEXP (x, 0)) != PLUS
9724 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9725 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9726 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9727 output_operand_lossage ("invalid %%K value");
9728 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9730 /* For GNU as, there must be a non-alphanumeric character
9731 between 'l' and the number. The '-' is added by
9732 print_operand() already. */
9733 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9735 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9739 /* %l is output_asm_label. */
9742 /* Write second word of DImode or DFmode reference. Works on register
9743 or non-indexed memory only. */
9744 if (GET_CODE (x) == REG)
9745 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9746 else if (GET_CODE (x) == MEM)
9748 /* Handle possible auto-increment. Since it is pre-increment and
9749 we have already done it, we can just use an offset of word. */
9750 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9751 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9752 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9755 output_address (XEXP (adjust_address_nv (x, SImode,
9759 if (small_data_operand (x, GET_MODE (x)))
9760 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9761 reg_names[SMALL_DATA_REG]);
9766 /* MB value for a mask operand. */
9767 if (! mask_operand (x, SImode))
9768 output_operand_lossage ("invalid %%m value");
9770 fprintf (file, "%d", extract_MB (x));
9774 /* ME value for a mask operand. */
9775 if (! mask_operand (x, SImode))
9776 output_operand_lossage ("invalid %%M value");
9778 fprintf (file, "%d", extract_ME (x));
9781 /* %n outputs the negative of its operand. */
9784 /* Write the number of elements in the vector times 4. */
9785 if (GET_CODE (x) != PARALLEL)
9786 output_operand_lossage ("invalid %%N value");
9788 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9792 /* Similar, but subtract 1 first. */
9793 if (GET_CODE (x) != PARALLEL)
9794 output_operand_lossage ("invalid %%O value");
9796 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9800 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9802 || INT_LOWPART (x) < 0
9803 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9804 output_operand_lossage ("invalid %%p value");
9806 fprintf (file, "%d", i);
9810 /* The operand must be an indirect memory reference. The result
9811 is the register name. */
9812 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9813 || REGNO (XEXP (x, 0)) >= 32)
9814 output_operand_lossage ("invalid %%P value");
9816 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9820 /* This outputs the logical code corresponding to a boolean
9821 expression. The expression may have one or both operands
9822 negated (if one, only the first one). For condition register
9823 logical operations, it will also treat the negated
9824 CR codes as NOTs, but not handle NOTs of them. */
9826 const char *const *t = 0;
9828 enum rtx_code code = GET_CODE (x);
9829 static const char * const tbl[3][3] = {
9830 { "and", "andc", "nor" },
9831 { "or", "orc", "nand" },
9832 { "xor", "eqv", "xor" } };
9836 else if (code == IOR)
9838 else if (code == XOR)
9841 output_operand_lossage ("invalid %%q value");
9843 if (GET_CODE (XEXP (x, 0)) != NOT)
9847 if (GET_CODE (XEXP (x, 1)) == NOT)
9865 /* X is a CR register. Print the mask for `mtcrf'. */
9866 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9867 output_operand_lossage ("invalid %%R value");
9869 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9873 /* Low 5 bits of 32 - value */
9875 output_operand_lossage ("invalid %%s value");
9877 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9881 /* PowerPC64 mask position. All 0's is excluded.
9882 CONST_INT 32-bit mask is considered sign-extended so any
9883 transition must occur within the CONST_INT, not on the boundary. */
9884 if (! mask64_operand (x, DImode))
9885 output_operand_lossage ("invalid %%S value");
9887 uval = INT_LOWPART (x);
9889 if (uval & 1) /* Clear Left */
9891 #if HOST_BITS_PER_WIDE_INT > 64
9892 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9896 else /* Clear Right */
9899 #if HOST_BITS_PER_WIDE_INT > 64
9900 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9908 fprintf (file, "%d", i);
9912 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9913 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9916 /* Bit 3 is OV bit. */
9917 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9919 /* If we want bit 31, write a shift count of zero, not 32. */
9920 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9924 /* Print the symbolic name of a branch target register. */
9925 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9926 && REGNO (x) != COUNT_REGISTER_REGNUM))
9927 output_operand_lossage ("invalid %%T value");
9928 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9929 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9931 fputs ("ctr", file);
9935 /* High-order 16 bits of constant for use in unsigned operand. */
9937 output_operand_lossage ("invalid %%u value");
9939 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9940 (INT_LOWPART (x) >> 16) & 0xffff);
9944 /* High-order 16 bits of constant for use in signed operand. */
9946 output_operand_lossage ("invalid %%v value");
9948 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9949 (INT_LOWPART (x) >> 16) & 0xffff);
9953 /* Print `u' if this has an auto-increment or auto-decrement. */
9954 if (GET_CODE (x) == MEM
9955 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9956 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9961 /* Print the trap code for this operand. */
9962 switch (GET_CODE (x))
9965 fputs ("eq", file); /* 4 */
9968 fputs ("ne", file); /* 24 */
9971 fputs ("lt", file); /* 16 */
9974 fputs ("le", file); /* 20 */
9977 fputs ("gt", file); /* 8 */
9980 fputs ("ge", file); /* 12 */
9983 fputs ("llt", file); /* 2 */
9986 fputs ("lle", file); /* 6 */
9989 fputs ("lgt", file); /* 1 */
9992 fputs ("lge", file); /* 5 */
10000 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10003 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10004 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10006 print_operand (file, x, 0);
10010 /* MB value for a PowerPC64 rldic operand. */
10011 val = (GET_CODE (x) == CONST_INT
10012 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10017 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10018 if ((val <<= 1) < 0)
10021 #if HOST_BITS_PER_WIDE_INT == 32
10022 if (GET_CODE (x) == CONST_INT && i >= 0)
10023 i += 32; /* zero-extend high-part was all 0's */
10024 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10026 val = CONST_DOUBLE_LOW (x);
10033 for ( ; i < 64; i++)
10034 if ((val <<= 1) < 0)
10039 fprintf (file, "%d", i + 1);
10043 if (GET_CODE (x) == MEM
10044 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10049 /* Like 'L', for third word of TImode */
10050 if (GET_CODE (x) == REG)
10051 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
10052 else if (GET_CODE (x) == MEM)
10054 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10055 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10056 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10058 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10059 if (small_data_operand (x, GET_MODE (x)))
10060 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10061 reg_names[SMALL_DATA_REG]);
10066 /* X is a SYMBOL_REF. Write out the name preceded by a
10067 period and without any trailing data in brackets. Used for function
10068 names. If we are configured for System V (or the embedded ABI) on
10069 the PowerPC, do not emit the period, since those systems do not use
10070 TOCs and the like. */
10071 if (GET_CODE (x) != SYMBOL_REF)
10074 /* Mark the decl as referenced so that cgraph will output the function. */
10075 if (SYMBOL_REF_DECL (x))
10076 mark_decl_referenced (SYMBOL_REF_DECL (x));
10078 if (XSTR (x, 0)[0] != '.')
10080 switch (DEFAULT_ABI)
10094 /* For macho, we need to check it see if we need a stub. */
10097 const char *name = XSTR (x, 0);
10099 if (MACHOPIC_INDIRECT
10100 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10101 name = machopic_indirection_name (x, /*stub_p=*/true);
10103 assemble_name (file, name);
10105 else if (TARGET_AIX)
10106 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
10108 assemble_name (file, XSTR (x, 0));
10112 /* Like 'L', for last word of TImode. */
10113 if (GET_CODE (x) == REG)
10114 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
10115 else if (GET_CODE (x) == MEM)
10117 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10118 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10119 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10121 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10122 if (small_data_operand (x, GET_MODE (x)))
10123 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10124 reg_names[SMALL_DATA_REG]);
10128 /* Print AltiVec or SPE memory operand. */
10133 if (GET_CODE (x) != MEM)
10140 /* Handle [reg]. */
10141 if (GET_CODE (tmp) == REG)
10143 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10146 /* Handle [reg+UIMM]. */
10147 else if (GET_CODE (tmp) == PLUS &&
10148 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10152 if (GET_CODE (XEXP (tmp, 0)) != REG)
10155 x = INTVAL (XEXP (tmp, 1));
10156 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10160 /* Fall through. Must be [reg+reg]. */
10162 if (GET_CODE (tmp) == REG)
10163 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10164 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
10166 if (REGNO (XEXP (tmp, 0)) == 0)
10167 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10168 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10170 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10171 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10179 if (GET_CODE (x) == REG)
10180 fprintf (file, "%s", reg_names[REGNO (x)]);
10181 else if (GET_CODE (x) == MEM)
10183 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10184 know the width from the mode. */
10185 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10186 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10187 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10188 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10189 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10190 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10192 output_address (XEXP (x, 0));
10195 output_addr_const (file, x);
10199 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10203 output_operand_lossage ("invalid %%xn code");
10207 /* Print the address of an operand. */
10210 print_operand_address (FILE *file, rtx x)
10212 if (GET_CODE (x) == REG)
10213 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10214 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10215 || GET_CODE (x) == LABEL_REF)
10217 output_addr_const (file, x);
10218 if (small_data_operand (x, GET_MODE (x)))
10219 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10220 reg_names[SMALL_DATA_REG]);
10221 else if (TARGET_TOC)
10224 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10226 if (REGNO (XEXP (x, 0)) == 0)
10227 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10228 reg_names[ REGNO (XEXP (x, 0)) ]);
10230 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10231 reg_names[ REGNO (XEXP (x, 1)) ]);
10233 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10234 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10235 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10237 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10238 && CONSTANT_P (XEXP (x, 1)))
10240 output_addr_const (file, XEXP (x, 1));
10241 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10245 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10246 && CONSTANT_P (XEXP (x, 1)))
10248 fprintf (file, "lo16(");
10249 output_addr_const (file, XEXP (x, 1));
10250 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10253 else if (legitimate_constant_pool_address_p (x))
10255 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10257 rtx contains_minus = XEXP (x, 1);
10261 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10262 turn it into (sym) for output_addr_const. */
10263 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10264 contains_minus = XEXP (contains_minus, 0);
10266 minus = XEXP (contains_minus, 0);
10267 symref = XEXP (minus, 0);
10268 XEXP (contains_minus, 0) = symref;
10273 name = XSTR (symref, 0);
10274 newname = alloca (strlen (name) + sizeof ("@toc"));
10275 strcpy (newname, name);
10276 strcat (newname, "@toc");
10277 XSTR (symref, 0) = newname;
10279 output_addr_const (file, XEXP (x, 1));
10281 XSTR (symref, 0) = name;
10282 XEXP (contains_minus, 0) = minus;
10285 output_addr_const (file, XEXP (x, 1));
10287 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10293 /* Target hook for assembling integer objects. The PowerPC version has
10294 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10295 is defined. It also needs to handle DI-mode objects on 64-bit
10299 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10301 #ifdef RELOCATABLE_NEEDS_FIXUP
10302 /* Special handling for SI values. */
10303 if (size == 4 && aligned_p)
10305 extern int in_toc_section (void);
10306 static int recurse = 0;
10308 /* For -mrelocatable, we mark all addresses that need to be fixed up
10309 in the .fixup section. */
10310 if (TARGET_RELOCATABLE
10311 && !in_toc_section ()
10312 && !in_text_section ()
10313 && !in_unlikely_text_section ()
10315 && GET_CODE (x) != CONST_INT
10316 && GET_CODE (x) != CONST_DOUBLE
10322 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10324 ASM_OUTPUT_LABEL (asm_out_file, buf);
10325 fprintf (asm_out_file, "\t.long\t(");
10326 output_addr_const (asm_out_file, x);
10327 fprintf (asm_out_file, ")@fixup\n");
10328 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10329 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10330 fprintf (asm_out_file, "\t.long\t");
10331 assemble_name (asm_out_file, buf);
10332 fprintf (asm_out_file, "\n\t.previous\n");
10336 /* Remove initial .'s to turn a -mcall-aixdesc function
10337 address into the address of the descriptor, not the function
10339 else if (GET_CODE (x) == SYMBOL_REF
10340 && XSTR (x, 0)[0] == '.'
10341 && DEFAULT_ABI == ABI_AIX)
10343 const char *name = XSTR (x, 0);
10344 while (*name == '.')
10347 fprintf (asm_out_file, "\t.long\t%s\n", name);
10351 #endif /* RELOCATABLE_NEEDS_FIXUP */
10352 return default_assemble_integer (x, size, aligned_p);
10355 #ifdef HAVE_GAS_HIDDEN
10356 /* Emit an assembler directive to set symbol visibility for DECL to
10357 VISIBILITY_TYPE. */
10360 rs6000_assemble_visibility (tree decl, int vis)
10362 /* Functions need to have their entry point symbol visibility set as
10363 well as their descriptor symbol visibility. */
10364 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
10366 static const char * const visibility_types[] = {
10367 NULL, "internal", "hidden", "protected"
10370 const char *name, *type;
10372 name = ((* targetm.strip_name_encoding)
10373 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10374 type = visibility_types[vis];
10376 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10377 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10380 default_assemble_visibility (decl, vis);
10385 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10387 /* Reversal of FP compares takes care -- an ordered compare
10388 becomes an unordered compare and vice versa. */
10389 if (mode == CCFPmode
10390 && (!flag_finite_math_only
10391 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10392 || code == UNEQ || code == LTGT))
10393 return reverse_condition_maybe_unordered (code);
10395 return reverse_condition (code);
10398 /* Generate a compare for CODE. Return a brand-new rtx that
10399 represents the result of the compare. */
10402 rs6000_generate_compare (enum rtx_code code)
10404 enum machine_mode comp_mode;
10405 rtx compare_result;
10407 if (rs6000_compare_fp_p)
10408 comp_mode = CCFPmode;
10409 else if (code == GTU || code == LTU
10410 || code == GEU || code == LEU)
10411 comp_mode = CCUNSmode;
10413 comp_mode = CCmode;
10415 /* First, the compare. */
10416 compare_result = gen_reg_rtx (comp_mode);
10418 /* SPE FP compare instructions on the GPRs. Yuck! */
10419 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10420 && rs6000_compare_fp_p)
10422 rtx cmp, or1, or2, or_result, compare_result2;
10424 /* Note: The E500 comparison instructions set the GT bit (x +
10425 1), on success. This explains the mess. */
10429 case EQ: case UNEQ: case NE: case LTGT:
10430 cmp = flag_finite_math_only
10431 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10432 rs6000_compare_op1)
10433 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10434 rs6000_compare_op1);
10436 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10437 cmp = flag_finite_math_only
10438 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10439 rs6000_compare_op1)
10440 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10441 rs6000_compare_op1);
10443 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10444 cmp = flag_finite_math_only
10445 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10446 rs6000_compare_op1)
10447 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10448 rs6000_compare_op1);
10454 /* Synthesize LE and GE from LT/GT || EQ. */
10455 if (code == LE || code == GE || code == LEU || code == GEU)
10461 case LE: code = LT; break;
10462 case GE: code = GT; break;
10463 case LEU: code = LT; break;
10464 case GEU: code = GT; break;
10468 or1 = gen_reg_rtx (SImode);
10469 or2 = gen_reg_rtx (SImode);
10470 or_result = gen_reg_rtx (CCEQmode);
10471 compare_result2 = gen_reg_rtx (CCFPmode);
10474 cmp = flag_finite_math_only
10475 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10476 rs6000_compare_op1)
10477 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10478 rs6000_compare_op1);
10481 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10482 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10484 /* OR them together. */
10485 cmp = gen_rtx_SET (VOIDmode, or_result,
10486 gen_rtx_COMPARE (CCEQmode,
10487 gen_rtx_IOR (SImode, or1, or2),
10489 compare_result = or_result;
10494 if (code == NE || code == LTGT)
10503 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10504 gen_rtx_COMPARE (comp_mode,
10505 rs6000_compare_op0,
10506 rs6000_compare_op1)));
10508 /* Some kinds of FP comparisons need an OR operation;
10509 under flag_finite_math_only we don't bother. */
10510 if (rs6000_compare_fp_p
10511 && ! flag_finite_math_only
10512 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10513 && (code == LE || code == GE
10514 || code == UNEQ || code == LTGT
10515 || code == UNGT || code == UNLT))
10517 enum rtx_code or1, or2;
10518 rtx or1_rtx, or2_rtx, compare2_rtx;
10519 rtx or_result = gen_reg_rtx (CCEQmode);
10523 case LE: or1 = LT; or2 = EQ; break;
10524 case GE: or1 = GT; or2 = EQ; break;
10525 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10526 case LTGT: or1 = LT; or2 = GT; break;
10527 case UNGT: or1 = UNORDERED; or2 = GT; break;
10528 case UNLT: or1 = UNORDERED; or2 = LT; break;
10531 validate_condition_mode (or1, comp_mode);
10532 validate_condition_mode (or2, comp_mode);
10533 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10534 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10535 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10536 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10538 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10540 compare_result = or_result;
10544 validate_condition_mode (code, GET_MODE (compare_result));
10546 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10550 /* Emit the RTL for an sCOND pattern. */
10553 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10556 enum machine_mode op_mode;
10557 enum rtx_code cond_code;
10559 condition_rtx = rs6000_generate_compare (code);
10560 cond_code = GET_CODE (condition_rtx);
10562 if (TARGET_E500 && rs6000_compare_fp_p
10563 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10567 PUT_MODE (condition_rtx, SImode);
10568 t = XEXP (condition_rtx, 0);
10570 if (cond_code != NE && cond_code != EQ)
10573 if (cond_code == NE)
10574 emit_insn (gen_e500_flip_eq_bit (t, t));
10576 emit_insn (gen_move_from_CR_eq_bit (result, t));
10580 if (cond_code == NE
10581 || cond_code == GE || cond_code == LE
10582 || cond_code == GEU || cond_code == LEU
10583 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10585 rtx not_result = gen_reg_rtx (CCEQmode);
10586 rtx not_op, rev_cond_rtx;
10587 enum machine_mode cc_mode;
10589 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10591 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10592 SImode, XEXP (condition_rtx, 0), const0_rtx);
10593 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10594 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10595 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10598 op_mode = GET_MODE (rs6000_compare_op0);
10599 if (op_mode == VOIDmode)
10600 op_mode = GET_MODE (rs6000_compare_op1);
10602 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10604 PUT_MODE (condition_rtx, DImode);
10605 convert_move (result, condition_rtx, 0);
10609 PUT_MODE (condition_rtx, SImode);
10610 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10614 /* Emit a branch of kind CODE to location LOC. */
10617 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10619 rtx condition_rtx, loc_ref;
10621 condition_rtx = rs6000_generate_compare (code);
10622 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10623 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10624 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10625 loc_ref, pc_rtx)));
10628 /* Return the string to output a conditional branch to LABEL, which is
10629 the operand number of the label, or -1 if the branch is really a
10630 conditional return.
10632 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10633 condition code register and its mode specifies what kind of
10634 comparison we made.
10636 REVERSED is nonzero if we should reverse the sense of the comparison.
10638 INSN is the insn. */
10641 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10643 static char string[64];
10644 enum rtx_code code = GET_CODE (op);
10645 rtx cc_reg = XEXP (op, 0);
10646 enum machine_mode mode = GET_MODE (cc_reg);
10647 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10648 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10649 int really_reversed = reversed ^ need_longbranch;
10655 validate_condition_mode (code, mode);
10657 /* Work out which way this really branches. We could use
10658 reverse_condition_maybe_unordered here always but this
10659 makes the resulting assembler clearer. */
10660 if (really_reversed)
10662 /* Reversal of FP compares takes care -- an ordered compare
10663 becomes an unordered compare and vice versa. */
10664 if (mode == CCFPmode)
10665 code = reverse_condition_maybe_unordered (code);
10667 code = reverse_condition (code);
10670 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10672 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10675 /* Opposite of GT. */
10677 else if (code == NE)
10685 /* Not all of these are actually distinct opcodes, but
10686 we distinguish them for clarity of the resulting assembler. */
10687 case NE: case LTGT:
10688 ccode = "ne"; break;
10689 case EQ: case UNEQ:
10690 ccode = "eq"; break;
10692 ccode = "ge"; break;
10693 case GT: case GTU: case UNGT:
10694 ccode = "gt"; break;
10696 ccode = "le"; break;
10697 case LT: case LTU: case UNLT:
10698 ccode = "lt"; break;
10699 case UNORDERED: ccode = "un"; break;
10700 case ORDERED: ccode = "nu"; break;
10701 case UNGE: ccode = "nl"; break;
10702 case UNLE: ccode = "ng"; break;
10707 /* Maybe we have a guess as to how likely the branch is.
10708 The old mnemonics don't have a way to specify this information. */
10710 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10711 if (note != NULL_RTX)
10713 /* PROB is the difference from 50%. */
10714 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10716 /* Only hint for highly probable/improbable branches on newer
10717 cpus as static prediction overrides processor dynamic
10718 prediction. For older cpus we may as well always hint, but
10719 assume not taken for branches that are very close to 50% as a
10720 mispredicted taken branch is more expensive than a
10721 mispredicted not-taken branch. */
10722 if (rs6000_always_hint
10723 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10725 if (abs (prob) > REG_BR_PROB_BASE / 20
10726 && ((prob > 0) ^ need_longbranch))
10734 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10736 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10738 /* We need to escape any '%' characters in the reg_names string.
10739 Assume they'd only be the first character.... */
10740 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10742 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10746 /* If the branch distance was too far, we may have to use an
10747 unconditional branch to go the distance. */
10748 if (need_longbranch)
10749 s += sprintf (s, ",$+8\n\tb %s", label);
10751 s += sprintf (s, ",%s", label);
10757 /* Return the string to flip the EQ bit on a CR. */
10759 output_e500_flip_eq_bit (rtx dst, rtx src)
10761 static char string[64];
10764 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10765 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10769 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
10770 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
10772 sprintf (string, "crnot %d,%d", a, b);
10776 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10777 operands of the last comparison is nonzero/true, FALSE_COND if it
10778 is zero/false. Return 0 if the hardware has no such operation. */
10781 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10783 enum rtx_code code = GET_CODE (op);
10784 rtx op0 = rs6000_compare_op0;
10785 rtx op1 = rs6000_compare_op1;
10786 REAL_VALUE_TYPE c1;
10787 enum machine_mode compare_mode = GET_MODE (op0);
10788 enum machine_mode result_mode = GET_MODE (dest);
10791 /* These modes should always match. */
10792 if (GET_MODE (op1) != compare_mode
10793 /* In the isel case however, we can use a compare immediate, so
10794 op1 may be a small constant. */
10795 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10797 if (GET_MODE (true_cond) != result_mode)
10799 if (GET_MODE (false_cond) != result_mode)
10802 /* First, work out if the hardware can do this at all, or
10803 if it's too slow.... */
10804 if (! rs6000_compare_fp_p)
10807 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10810 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10811 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10814 /* Eliminate half of the comparisons by switching operands, this
10815 makes the remaining code simpler. */
10816 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10817 || code == LTGT || code == LT || code == UNLE)
10819 code = reverse_condition_maybe_unordered (code);
10821 true_cond = false_cond;
10825 /* UNEQ and LTGT take four instructions for a comparison with zero,
10826 it'll probably be faster to use a branch here too. */
10827 if (code == UNEQ && HONOR_NANS (compare_mode))
10830 if (GET_CODE (op1) == CONST_DOUBLE)
10831 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10833 /* We're going to try to implement comparisons by performing
10834 a subtract, then comparing against zero. Unfortunately,
10835 Inf - Inf is NaN which is not zero, and so if we don't
10836 know that the operand is finite and the comparison
10837 would treat EQ different to UNORDERED, we can't do it. */
10838 if (HONOR_INFINITIES (compare_mode)
10839 && code != GT && code != UNGE
10840 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10841 /* Constructs of the form (a OP b ? a : b) are safe. */
10842 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10843 || (! rtx_equal_p (op0, true_cond)
10844 && ! rtx_equal_p (op1, true_cond))))
10846 /* At this point we know we can use fsel. */
10848 /* Reduce the comparison to a comparison against zero. */
10849 temp = gen_reg_rtx (compare_mode);
10850 emit_insn (gen_rtx_SET (VOIDmode, temp,
10851 gen_rtx_MINUS (compare_mode, op0, op1)));
10853 op1 = CONST0_RTX (compare_mode);
10855 /* If we don't care about NaNs we can reduce some of the comparisons
10856 down to faster ones. */
10857 if (! HONOR_NANS (compare_mode))
10863 true_cond = false_cond;
10876 /* Now, reduce everything down to a GE. */
10883 temp = gen_reg_rtx (compare_mode);
10884 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10889 temp = gen_reg_rtx (compare_mode);
10890 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10895 temp = gen_reg_rtx (compare_mode);
10896 emit_insn (gen_rtx_SET (VOIDmode, temp,
10897 gen_rtx_NEG (compare_mode,
10898 gen_rtx_ABS (compare_mode, op0))));
10903 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10904 temp = gen_reg_rtx (result_mode);
10905 emit_insn (gen_rtx_SET (VOIDmode, temp,
10906 gen_rtx_IF_THEN_ELSE (result_mode,
10907 gen_rtx_GE (VOIDmode,
10909 true_cond, false_cond)));
10910 false_cond = true_cond;
10913 temp = gen_reg_rtx (compare_mode);
10914 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10919 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10920 temp = gen_reg_rtx (result_mode);
10921 emit_insn (gen_rtx_SET (VOIDmode, temp,
10922 gen_rtx_IF_THEN_ELSE (result_mode,
10923 gen_rtx_GE (VOIDmode,
10925 true_cond, false_cond)));
10926 true_cond = false_cond;
10929 temp = gen_reg_rtx (compare_mode);
10930 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10938 emit_insn (gen_rtx_SET (VOIDmode, dest,
10939 gen_rtx_IF_THEN_ELSE (result_mode,
10940 gen_rtx_GE (VOIDmode,
10942 true_cond, false_cond)));
10946 /* Same as above, but for ints (isel). */
10949 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10951 rtx condition_rtx, cr;
10953 /* All isel implementations thus far are 32-bits. */
10954 if (GET_MODE (rs6000_compare_op0) != SImode)
10957 /* We still have to do the compare, because isel doesn't do a
10958 compare, it just looks at the CRx bits set by a previous compare
10960 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10961 cr = XEXP (condition_rtx, 0);
10963 if (GET_MODE (cr) == CCmode)
10964 emit_insn (gen_isel_signed (dest, condition_rtx,
10965 true_cond, false_cond, cr));
10967 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10968 true_cond, false_cond, cr));
10974 output_isel (rtx *operands)
10976 enum rtx_code code;
10978 code = GET_CODE (operands[1]);
10979 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10981 PUT_CODE (operands[1], reverse_condition (code));
10982 return "isel %0,%3,%2,%j1";
10985 return "isel %0,%2,%3,%j1";
10989 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10991 enum machine_mode mode = GET_MODE (op0);
10995 if (code == SMAX || code == SMIN)
11000 if (code == SMAX || code == UMAX)
11001 target = emit_conditional_move (dest, c, op0, op1, mode,
11002 op0, op1, mode, 0);
11004 target = emit_conditional_move (dest, c, op0, op1, mode,
11005 op1, op0, mode, 0);
11006 if (target == NULL_RTX)
11008 if (target != dest)
11009 emit_move_insn (dest, target);
11012 /* Emit instructions to move SRC to DST. Called by splitters for
11013 multi-register moves. It will emit at most one instruction for
11014 each register that is accessed; that is, it won't emit li/lis pairs
11015 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11019 rs6000_split_multireg_move (rtx dst, rtx src)
11021 /* The register number of the first register being moved. */
11023 /* The mode that is to be moved. */
11024 enum machine_mode mode;
11025 /* The mode that the move is being done in, and its size. */
11026 enum machine_mode reg_mode;
11028 /* The number of registers that will be moved. */
11031 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11032 mode = GET_MODE (dst);
11033 nregs = HARD_REGNO_NREGS (reg, mode);
11034 if (FP_REGNO_P (reg))
11036 else if (ALTIVEC_REGNO_P (reg))
11037 reg_mode = V16QImode;
11039 reg_mode = word_mode;
11040 reg_mode_size = GET_MODE_SIZE (reg_mode);
11042 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
11045 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11047 /* Move register range backwards, if we might have destructive
11050 for (i = nregs - 1; i >= 0; i--)
11051 emit_insn (gen_rtx_SET (VOIDmode,
11052 simplify_gen_subreg (reg_mode, dst, mode,
11053 i * reg_mode_size),
11054 simplify_gen_subreg (reg_mode, src, mode,
11055 i * reg_mode_size)));
11061 bool used_update = false;
11063 if (MEM_P (src) && INT_REGNO_P (reg))
11067 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11068 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11071 breg = XEXP (XEXP (src, 0), 0);
11072 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
11073 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11074 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
11075 emit_insn (TARGET_32BIT
11076 ? gen_addsi3 (breg, breg, delta_rtx)
11077 : gen_adddi3 (breg, breg, delta_rtx));
11078 src = gen_rtx_MEM (mode, breg);
11080 else if (! offsettable_memref_p (src))
11082 rtx newsrc, basereg;
11083 basereg = gen_rtx_REG (Pmode, reg);
11084 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11085 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11086 MEM_COPY_ATTRIBUTES (newsrc, src);
11090 /* We have now address involving an base register only.
11091 If we use one of the registers to address memory,
11092 we have change that register last. */
11094 breg = (GET_CODE (XEXP (src, 0)) == PLUS
11095 ? XEXP (XEXP (src, 0), 0)
11101 if (REGNO (breg) >= REGNO (dst)
11102 && REGNO (breg) < REGNO (dst) + nregs)
11103 j = REGNO (breg) - REGNO (dst);
11106 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11110 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11111 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11114 breg = XEXP (XEXP (dst, 0), 0);
11115 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
11116 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11117 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
11119 /* We have to update the breg before doing the store.
11120 Use store with update, if available. */
11124 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11125 emit_insn (TARGET_32BIT
11126 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
11127 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
11128 used_update = true;
11131 emit_insn (TARGET_32BIT
11132 ? gen_addsi3 (breg, breg, delta_rtx)
11133 : gen_adddi3 (breg, breg, delta_rtx));
11134 dst = gen_rtx_MEM (mode, breg);
11136 else if (! offsettable_memref_p (dst))
11138 rtx newdst, basereg;
11139 basereg = gen_rtx_REG (Pmode, reg);
11140 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (dst, 0)));
11141 newdst = gen_rtx_MEM (GET_MODE (dst), basereg);
11142 MEM_COPY_ATTRIBUTES (newdst, dst);
11147 for (i = 0; i < nregs; i++)
11149 /* Calculate index to next subword. */
11154 /* If compiler already emited move of first word by
11155 store with update, no need to do anything. */
11156 if (j == 0 && used_update)
11159 emit_insn (gen_rtx_SET (VOIDmode,
11160 simplify_gen_subreg (reg_mode, dst, mode,
11161 j * reg_mode_size),
11162 simplify_gen_subreg (reg_mode, src, mode,
11163 j * reg_mode_size)));
11169 /* This page contains routines that are used to determine what the
11170 function prologue and epilogue code will do and write them out. */
11172 /* Return the first fixed-point register that is required to be
11173 saved. 32 if none. */
11176 first_reg_to_save (void)
11180 /* Find lowest numbered live register. */
11181 for (first_reg = 13; first_reg <= 31; first_reg++)
11182 if (regs_ever_live[first_reg]
11183 && (! call_used_regs[first_reg]
11184 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11185 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11186 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11187 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11192 && current_function_uses_pic_offset_table
11193 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11194 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11200 /* Similar, for FP regs. */
11203 first_fp_reg_to_save (void)
11207 /* Find lowest numbered live register. */
11208 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11209 if (regs_ever_live[first_reg])
11215 /* Similar, for AltiVec regs. */
11218 first_altivec_reg_to_save (void)
11222 /* Stack frame remains as is unless we are in AltiVec ABI. */
11223 if (! TARGET_ALTIVEC_ABI)
11224 return LAST_ALTIVEC_REGNO + 1;
11226 /* Find lowest numbered live register. */
11227 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11228 if (regs_ever_live[i])
11234 /* Return a 32-bit mask of the AltiVec registers we need to set in
11235 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11236 the 32-bit word is 0. */
11238 static unsigned int
11239 compute_vrsave_mask (void)
11241 unsigned int i, mask = 0;
11243 /* First, find out if we use _any_ altivec registers. */
11244 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11245 if (regs_ever_live[i])
11246 mask |= ALTIVEC_REG_BIT (i);
11251 /* Next, remove the argument registers from the set. These must
11252 be in the VRSAVE mask set by the caller, so we don't need to add
11253 them in again. More importantly, the mask we compute here is
11254 used to generate CLOBBERs in the set_vrsave insn, and we do not
11255 wish the argument registers to die. */
11256 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11257 mask &= ~ALTIVEC_REG_BIT (i);
11259 /* Similarly, remove the return value from the set. */
11262 diddle_return_value (is_altivec_return_reg, &yes);
11264 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11271 is_altivec_return_reg (rtx reg, void *xyes)
11273 bool *yes = (bool *) xyes;
11274 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11279 /* Calculate the stack information for the current function. This is
11280 complicated by having two separate calling sequences, the AIX calling
11281 sequence and the V.4 calling sequence.
11283 AIX (and Darwin/Mac OS X) stack frames look like:
11285 SP----> +---------------------------------------+
11286 | back chain to caller | 0 0
11287 +---------------------------------------+
11288 | saved CR | 4 8 (8-11)
11289 +---------------------------------------+
11291 +---------------------------------------+
11292 | reserved for compilers | 12 24
11293 +---------------------------------------+
11294 | reserved for binders | 16 32
11295 +---------------------------------------+
11296 | saved TOC pointer | 20 40
11297 +---------------------------------------+
11298 | Parameter save area (P) | 24 48
11299 +---------------------------------------+
11300 | Alloca space (A) | 24+P etc.
11301 +---------------------------------------+
11302 | Local variable space (L) | 24+P+A
11303 +---------------------------------------+
11304 | Float/int conversion temporary (X) | 24+P+A+L
11305 +---------------------------------------+
11306 | Save area for AltiVec registers (W) | 24+P+A+L+X
11307 +---------------------------------------+
11308 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11309 +---------------------------------------+
11310 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11311 +---------------------------------------+
11312 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11313 +---------------------------------------+
11314 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11315 +---------------------------------------+
11316 old SP->| back chain to caller's caller |
11317 +---------------------------------------+
11319 The required alignment for AIX configurations is two words (i.e., 8
11323 V.4 stack frames look like:
11325 SP----> +---------------------------------------+
11326 | back chain to caller | 0
11327 +---------------------------------------+
11328 | caller's saved LR | 4
11329 +---------------------------------------+
11330 | Parameter save area (P) | 8
11331 +---------------------------------------+
11332 | Alloca space (A) | 8+P
11333 +---------------------------------------+
11334 | Varargs save area (V) | 8+P+A
11335 +---------------------------------------+
11336 | Local variable space (L) | 8+P+A+V
11337 +---------------------------------------+
11338 | Float/int conversion temporary (X) | 8+P+A+V+L
11339 +---------------------------------------+
11340 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11341 +---------------------------------------+
11342 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11343 +---------------------------------------+
11344 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11345 +---------------------------------------+
11346 | SPE: area for 64-bit GP registers |
11347 +---------------------------------------+
11348 | SPE alignment padding |
11349 +---------------------------------------+
11350 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11351 +---------------------------------------+
11352 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11353 +---------------------------------------+
11354 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11355 +---------------------------------------+
11356 old SP->| back chain to caller's caller |
11357 +---------------------------------------+
11359 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11360 given. (But note below and in sysv4.h that we require only 8 and
11361 may round up the size of our stack frame anyways. The historical
11362 reason is early versions of powerpc-linux which didn't properly
11363 align the stack at program startup. A happy side-effect is that
11364 -mno-eabi libraries can be used with -meabi programs.)
11366 The EABI configuration defaults to the V.4 layout. However,
11367 the stack alignment requirements may differ. If -mno-eabi is not
11368 given, the required stack alignment is 8 bytes; if -mno-eabi is
11369 given, the required alignment is 16 bytes. (But see V.4 comment
11372 #ifndef ABI_STACK_BOUNDARY
11373 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11376 static rs6000_stack_t *
11377 rs6000_stack_info (void)
11379 static rs6000_stack_t info, zero_info;
11380 rs6000_stack_t *info_ptr = &info;
11381 int reg_size = TARGET_32BIT ? 4 : 8;
11383 HOST_WIDE_INT non_fixed_size;
11385 /* Zero all fields portably. */
11390 /* Cache value so we don't rescan instruction chain over and over. */
11391 if (cfun->machine->insn_chain_scanned_p == 0)
11393 cfun->machine->insn_chain_scanned_p = 1;
11394 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
11398 /* Select which calling sequence. */
11399 info_ptr->abi = DEFAULT_ABI;
11401 /* Calculate which registers need to be saved & save area size. */
11402 info_ptr->first_gp_reg_save = first_reg_to_save ();
11403 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11404 even if it currently looks like we won't. */
11405 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11406 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11407 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11408 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11409 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11411 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11413 /* For the SPE, we have an additional upper 32-bits on each GPR.
11414 Ideally we should save the entire 64-bits only when the upper
11415 half is used in SIMD instructions. Since we only record
11416 registers live (not the size they are used in), this proves
11417 difficult because we'd have to traverse the instruction chain at
11418 the right time, taking reload into account. This is a real pain,
11419 so we opt to save the GPRs in 64-bits always if but one register
11420 gets used in 64-bits. Otherwise, all the registers in the frame
11421 get saved in 32-bits.
11423 So... since when we save all GPRs (except the SP) in 64-bits, the
11424 traditional GP save area will be empty. */
11425 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11426 info_ptr->gp_size = 0;
11428 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11429 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11431 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11432 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11433 - info_ptr->first_altivec_reg_save);
11435 /* Does this function call anything? */
11436 info_ptr->calls_p = (! current_function_is_leaf
11437 || cfun->machine->ra_needs_full_frame);
11439 /* Determine if we need to save the link register. */
11440 if (rs6000_ra_ever_killed ()
11441 || (DEFAULT_ABI == ABI_AIX
11442 && current_function_profile
11443 && !TARGET_PROFILE_KERNEL)
11444 #ifdef TARGET_RELOCATABLE
11445 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11447 || (info_ptr->first_fp_reg_save != 64
11448 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11449 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11450 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11451 || (DEFAULT_ABI == ABI_DARWIN
11453 && current_function_uses_pic_offset_table)
11454 || info_ptr->calls_p)
11456 info_ptr->lr_save_p = 1;
11457 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11460 /* Determine if we need to save the condition code registers. */
11461 if (regs_ever_live[CR2_REGNO]
11462 || regs_ever_live[CR3_REGNO]
11463 || regs_ever_live[CR4_REGNO])
11465 info_ptr->cr_save_p = 1;
11466 if (DEFAULT_ABI == ABI_V4)
11467 info_ptr->cr_size = reg_size;
11470 /* If the current function calls __builtin_eh_return, then we need
11471 to allocate stack space for registers that will hold data for
11472 the exception handler. */
11473 if (current_function_calls_eh_return)
11476 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11479 /* SPE saves EH registers in 64-bits. */
11480 ehrd_size = i * (TARGET_SPE_ABI
11481 && info_ptr->spe_64bit_regs_used != 0
11482 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11487 /* Determine various sizes. */
11488 info_ptr->reg_size = reg_size;
11489 info_ptr->fixed_size = RS6000_SAVE_AREA;
11490 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11491 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11492 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11493 TARGET_ALTIVEC ? 16 : 8);
11495 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11496 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11498 info_ptr->spe_gp_size = 0;
11500 if (TARGET_ALTIVEC_ABI)
11501 info_ptr->vrsave_mask = compute_vrsave_mask ();
11503 info_ptr->vrsave_mask = 0;
11505 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11506 info_ptr->vrsave_size = 4;
11508 info_ptr->vrsave_size = 0;
11510 /* Calculate the offsets. */
11511 switch (DEFAULT_ABI)
11519 info_ptr->fp_save_offset = - info_ptr->fp_size;
11520 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11522 if (TARGET_ALTIVEC_ABI)
11524 info_ptr->vrsave_save_offset
11525 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11527 /* Align stack so vector save area is on a quadword boundary. */
11528 if (info_ptr->altivec_size != 0)
11529 info_ptr->altivec_padding_size
11530 = 16 - (-info_ptr->vrsave_save_offset % 16);
11532 info_ptr->altivec_padding_size = 0;
11534 info_ptr->altivec_save_offset
11535 = info_ptr->vrsave_save_offset
11536 - info_ptr->altivec_padding_size
11537 - info_ptr->altivec_size;
11539 /* Adjust for AltiVec case. */
11540 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11543 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11544 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11545 info_ptr->lr_save_offset = 2*reg_size;
11549 info_ptr->fp_save_offset = - info_ptr->fp_size;
11550 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11551 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11553 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11555 /* Align stack so SPE GPR save area is aligned on a
11556 double-word boundary. */
11557 if (info_ptr->spe_gp_size != 0)
11558 info_ptr->spe_padding_size
11559 = 8 - (-info_ptr->cr_save_offset % 8);
11561 info_ptr->spe_padding_size = 0;
11563 info_ptr->spe_gp_save_offset
11564 = info_ptr->cr_save_offset
11565 - info_ptr->spe_padding_size
11566 - info_ptr->spe_gp_size;
11568 /* Adjust for SPE case. */
11569 info_ptr->toc_save_offset
11570 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11572 else if (TARGET_ALTIVEC_ABI)
11574 info_ptr->vrsave_save_offset
11575 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11577 /* Align stack so vector save area is on a quadword boundary. */
11578 if (info_ptr->altivec_size != 0)
11579 info_ptr->altivec_padding_size
11580 = 16 - (-info_ptr->vrsave_save_offset % 16);
11582 info_ptr->altivec_padding_size = 0;
11584 info_ptr->altivec_save_offset
11585 = info_ptr->vrsave_save_offset
11586 - info_ptr->altivec_padding_size
11587 - info_ptr->altivec_size;
11589 /* Adjust for AltiVec case. */
11590 info_ptr->toc_save_offset
11591 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11594 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11595 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11596 info_ptr->lr_save_offset = reg_size;
11600 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11601 + info_ptr->gp_size
11602 + info_ptr->altivec_size
11603 + info_ptr->altivec_padding_size
11604 + info_ptr->spe_gp_size
11605 + info_ptr->spe_padding_size
11607 + info_ptr->cr_size
11608 + info_ptr->lr_size
11609 + info_ptr->vrsave_size
11610 + info_ptr->toc_size,
11611 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11614 non_fixed_size = (info_ptr->vars_size
11615 + info_ptr->parm_size
11616 + info_ptr->save_size
11617 + info_ptr->varargs_size);
11619 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11620 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11622 /* Determine if we need to allocate any stack frame:
11624 For AIX we need to push the stack if a frame pointer is needed
11625 (because the stack might be dynamically adjusted), if we are
11626 debugging, if we make calls, or if the sum of fp_save, gp_save,
11627 and local variables are more than the space needed to save all
11628 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11629 + 18*8 = 288 (GPR13 reserved).
11631 For V.4 we don't have the stack cushion that AIX uses, but assume
11632 that the debugger can handle stackless frames. */
11634 if (info_ptr->calls_p)
11635 info_ptr->push_p = 1;
11637 else if (DEFAULT_ABI == ABI_V4)
11638 info_ptr->push_p = non_fixed_size != 0;
11640 else if (frame_pointer_needed)
11641 info_ptr->push_p = 1;
11643 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11644 info_ptr->push_p = 1;
11647 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11649 /* Zero offsets if we're not saving those registers. */
11650 if (info_ptr->fp_size == 0)
11651 info_ptr->fp_save_offset = 0;
11653 if (info_ptr->gp_size == 0)
11654 info_ptr->gp_save_offset = 0;
11656 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11657 info_ptr->altivec_save_offset = 0;
11659 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11660 info_ptr->vrsave_save_offset = 0;
11662 if (! TARGET_SPE_ABI
11663 || info_ptr->spe_64bit_regs_used == 0
11664 || info_ptr->spe_gp_size == 0)
11665 info_ptr->spe_gp_save_offset = 0;
11667 if (! info_ptr->lr_save_p)
11668 info_ptr->lr_save_offset = 0;
11670 if (! info_ptr->cr_save_p)
11671 info_ptr->cr_save_offset = 0;
11673 if (! info_ptr->toc_save_p)
11674 info_ptr->toc_save_offset = 0;
11679 /* Return true if the current function uses any GPRs in 64-bit SIMD
11683 spe_func_has_64bit_regs_p (void)
11687 /* Functions that save and restore all the call-saved registers will
11688 need to save/restore the registers in 64-bits. */
11689 if (current_function_calls_eh_return
11690 || current_function_calls_setjmp
11691 || current_function_has_nonlocal_goto)
11694 insns = get_insns ();
11696 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11702 i = PATTERN (insn);
11703 if (GET_CODE (i) == SET
11704 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11713 debug_stack_info (rs6000_stack_t *info)
11715 const char *abi_string;
11718 info = rs6000_stack_info ();
11720 fprintf (stderr, "\nStack information for function %s:\n",
11721 ((current_function_decl && DECL_NAME (current_function_decl))
11722 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11727 default: abi_string = "Unknown"; break;
11728 case ABI_NONE: abi_string = "NONE"; break;
11729 case ABI_AIX: abi_string = "AIX"; break;
11730 case ABI_DARWIN: abi_string = "Darwin"; break;
11731 case ABI_V4: abi_string = "V.4"; break;
11734 fprintf (stderr, "\tABI = %5s\n", abi_string);
11736 if (TARGET_ALTIVEC_ABI)
11737 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11739 if (TARGET_SPE_ABI)
11740 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11742 if (info->first_gp_reg_save != 32)
11743 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11745 if (info->first_fp_reg_save != 64)
11746 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11748 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11749 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11750 info->first_altivec_reg_save);
11752 if (info->lr_save_p)
11753 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11755 if (info->cr_save_p)
11756 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11758 if (info->toc_save_p)
11759 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11761 if (info->vrsave_mask)
11762 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11765 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11768 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11770 if (info->gp_save_offset)
11771 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11773 if (info->fp_save_offset)
11774 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11776 if (info->altivec_save_offset)
11777 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11778 info->altivec_save_offset);
11780 if (info->spe_gp_save_offset)
11781 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11782 info->spe_gp_save_offset);
11784 if (info->vrsave_save_offset)
11785 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11786 info->vrsave_save_offset);
11788 if (info->lr_save_offset)
11789 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11791 if (info->cr_save_offset)
11792 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11794 if (info->toc_save_offset)
11795 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11797 if (info->varargs_save_offset)
11798 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11800 if (info->total_size)
11801 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11804 if (info->varargs_size)
11805 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11807 if (info->vars_size)
11808 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11811 if (info->parm_size)
11812 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11814 if (info->fixed_size)
11815 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11818 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11820 if (info->spe_gp_size)
11821 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11824 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11826 if (info->altivec_size)
11827 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11829 if (info->vrsave_size)
11830 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11832 if (info->altivec_padding_size)
11833 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11834 info->altivec_padding_size);
11836 if (info->spe_padding_size)
11837 fprintf (stderr, "\tspe_padding_size = %5d\n",
11838 info->spe_padding_size);
11841 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11844 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11846 if (info->toc_size)
11847 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11849 if (info->save_size)
11850 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11852 if (info->reg_size != 4)
11853 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11855 fprintf (stderr, "\n");
11859 rs6000_return_addr (int count, rtx frame)
11861 /* Currently we don't optimize very well between prolog and body
11862 code and for PIC code the code can be actually quite bad, so
11863 don't try to be too clever here. */
11864 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11866 cfun->machine->ra_needs_full_frame = 1;
11873 plus_constant (copy_to_reg
11874 (gen_rtx_MEM (Pmode,
11875 memory_address (Pmode, frame))),
11876 RETURN_ADDRESS_OFFSET)));
11879 cfun->machine->ra_need_lr = 1;
11880 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11883 /* Say whether a function is a candidate for sibcall handling or not.
11884 We do not allow indirect calls to be optimized into sibling calls.
11885 Also, we can't do it if there are any vector parameters; there's
11886 nowhere to put the VRsave code so it works; note that functions with
11887 vector parameters are required to have a prototype, so the argument
11888 type info must be available here. (The tail recursion case can work
11889 with vector parameters, but there's no way to distinguish here.) */
11891 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11896 if (TARGET_ALTIVEC_VRSAVE)
11898 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11899 type; type = TREE_CHAIN (type))
11901 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11905 if (DEFAULT_ABI == ABI_DARWIN
11906 || (*targetm.binds_local_p) (decl))
11908 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11910 if (!lookup_attribute ("longcall", attr_list)
11911 || lookup_attribute ("shortcall", attr_list))
11919 rs6000_ra_ever_killed (void)
11925 if (current_function_is_thunk)
11928 /* regs_ever_live has LR marked as used if any sibcalls are present,
11929 but this should not force saving and restoring in the
11930 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11931 clobbers LR, so that is inappropriate. */
11933 /* Also, the prologue can generate a store into LR that
11934 doesn't really count, like this:
11937 bcl to set PIC register
11941 When we're called from the epilogue, we need to avoid counting
11942 this as a store. */
11944 push_topmost_sequence ();
11945 top = get_insns ();
11946 pop_topmost_sequence ();
11947 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11949 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11953 if (FIND_REG_INC_NOTE (insn, reg))
11955 else if (GET_CODE (insn) == CALL_INSN
11956 && !SIBLING_CALL_P (insn))
11958 else if (set_of (reg, insn) != NULL_RTX
11959 && !prologue_epilogue_contains (insn))
11966 /* Add a REG_MAYBE_DEAD note to the insn. */
11968 rs6000_maybe_dead (rtx insn)
11970 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11975 /* Emit instructions needed to load the TOC register.
11976 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11977 a constant pool; or for SVR4 -fpic. */
11980 rs6000_emit_load_toc_table (int fromprolog)
11983 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11985 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11987 rtx temp = (fromprolog
11988 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11989 : gen_reg_rtx (Pmode));
11990 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11992 rs6000_maybe_dead (insn);
11993 insn = emit_move_insn (dest, temp);
11995 rs6000_maybe_dead (insn);
11997 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12000 rtx tempLR = (fromprolog
12001 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12002 : gen_reg_rtx (Pmode));
12003 rtx temp0 = (fromprolog
12004 ? gen_rtx_REG (Pmode, 0)
12005 : gen_reg_rtx (Pmode));
12008 /* possibly create the toc section */
12009 if (! toc_initialized)
12012 function_section (current_function_decl);
12019 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12020 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12022 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12023 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12025 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12027 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12028 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12035 static int reload_toc_labelno = 0;
12037 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12039 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
12040 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12042 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
12043 emit_move_insn (dest, tempLR);
12044 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12046 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12048 rs6000_maybe_dead (insn);
12050 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12052 /* This is for AIX code running in non-PIC ELF32. */
12055 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12056 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12058 insn = emit_insn (gen_elf_high (dest, realsym));
12060 rs6000_maybe_dead (insn);
12061 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12063 rs6000_maybe_dead (insn);
12065 else if (DEFAULT_ABI == ABI_AIX)
12068 insn = emit_insn (gen_load_toc_aix_si (dest));
12070 insn = emit_insn (gen_load_toc_aix_di (dest));
12072 rs6000_maybe_dead (insn);
12078 /* Emit instructions to restore the link register after determining where
12079 its value has been stored. */
12082 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12084 rs6000_stack_t *info = rs6000_stack_info ();
12087 operands[0] = source;
12088 operands[1] = scratch;
12090 if (info->lr_save_p)
12092 rtx frame_rtx = stack_pointer_rtx;
12093 HOST_WIDE_INT sp_offset = 0;
12096 if (frame_pointer_needed
12097 || current_function_calls_alloca
12098 || info->total_size > 32767)
12100 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12101 frame_rtx = operands[1];
12103 else if (info->push_p)
12104 sp_offset = info->total_size;
12106 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12107 tmp = gen_rtx_MEM (Pmode, tmp);
12108 emit_move_insn (tmp, operands[0]);
12111 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12114 static GTY(()) int set = -1;
12117 get_TOC_alias_set (void)
12120 set = new_alias_set ();
12124 /* This returns nonzero if the current function uses the TOC. This is
12125 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12126 is generated by the ABI_V4 load_toc_* patterns. */
12133 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12136 rtx pat = PATTERN (insn);
12139 if (GET_CODE (pat) == PARALLEL)
12140 for (i = 0; i < XVECLEN (pat, 0); i++)
12142 rtx sub = XVECEXP (pat, 0, i);
12143 if (GET_CODE (sub) == USE)
12145 sub = XEXP (sub, 0);
12146 if (GET_CODE (sub) == UNSPEC
12147 && XINT (sub, 1) == UNSPEC_TOC)
12157 create_TOC_reference (rtx symbol)
12159 return gen_rtx_PLUS (Pmode,
12160 gen_rtx_REG (Pmode, TOC_REGISTER),
12161 gen_rtx_CONST (Pmode,
12162 gen_rtx_MINUS (Pmode, symbol,
12163 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12166 /* If _Unwind_* has been called from within the same module,
12167 toc register is not guaranteed to be saved to 40(1) on function
12168 entry. Save it there in that case. */
12171 rs6000_aix_emit_builtin_unwind_init (void)
12174 rtx stack_top = gen_reg_rtx (Pmode);
12175 rtx opcode_addr = gen_reg_rtx (Pmode);
12176 rtx opcode = gen_reg_rtx (SImode);
12177 rtx tocompare = gen_reg_rtx (SImode);
12178 rtx no_toc_save_needed = gen_label_rtx ();
12180 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12181 emit_move_insn (stack_top, mem);
12183 mem = gen_rtx_MEM (Pmode,
12184 gen_rtx_PLUS (Pmode, stack_top,
12185 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12186 emit_move_insn (opcode_addr, mem);
12187 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12188 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12189 : 0xE8410028, SImode));
12191 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12192 SImode, NULL_RTX, NULL_RTX,
12193 no_toc_save_needed);
12195 mem = gen_rtx_MEM (Pmode,
12196 gen_rtx_PLUS (Pmode, stack_top,
12197 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12198 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12199 emit_label (no_toc_save_needed);
12202 /* This ties together stack memory (MEM with an alias set of
12203 rs6000_sr_alias_set) and the change to the stack pointer. */
12206 rs6000_emit_stack_tie (void)
12208 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12210 set_mem_alias_set (mem, rs6000_sr_alias_set);
12211 emit_insn (gen_stack_tie (mem));
12214 /* Emit the correct code for allocating stack space, as insns.
12215 If COPY_R12, make sure a copy of the old frame is left in r12.
12216 The generated code may use hard register 0 as a temporary. */
12219 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12222 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12223 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12224 rtx todec = GEN_INT (-size);
12226 if (current_function_limit_stack)
12228 if (REG_P (stack_limit_rtx)
12229 && REGNO (stack_limit_rtx) > 1
12230 && REGNO (stack_limit_rtx) <= 31)
12232 emit_insn (TARGET_32BIT
12233 ? gen_addsi3 (tmp_reg,
12236 : gen_adddi3 (tmp_reg,
12240 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12243 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12245 && DEFAULT_ABI == ABI_V4)
12247 rtx toload = gen_rtx_CONST (VOIDmode,
12248 gen_rtx_PLUS (Pmode,
12252 emit_insn (gen_elf_high (tmp_reg, toload));
12253 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12254 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12258 warning ("stack limit expression is not supported");
12261 if (copy_r12 || ! TARGET_UPDATE)
12262 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12268 /* Need a note here so that try_split doesn't get confused. */
12269 if (get_last_insn() == NULL_RTX)
12270 emit_note (NOTE_INSN_DELETED);
12271 insn = emit_move_insn (tmp_reg, todec);
12272 try_split (PATTERN (insn), insn, 0);
12276 insn = emit_insn (TARGET_32BIT
12277 ? gen_movsi_update (stack_reg, stack_reg,
12279 : gen_movdi_update (stack_reg, stack_reg,
12280 todec, stack_reg));
12284 insn = emit_insn (TARGET_32BIT
12285 ? gen_addsi3 (stack_reg, stack_reg, todec)
12286 : gen_adddi3 (stack_reg, stack_reg, todec));
12287 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12288 gen_rtx_REG (Pmode, 12));
12291 RTX_FRAME_RELATED_P (insn) = 1;
12293 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12294 gen_rtx_SET (VOIDmode, stack_reg,
12295 gen_rtx_PLUS (Pmode, stack_reg,
12300 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12301 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12302 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12303 deduce these equivalences by itself so it wasn't necessary to hold
12304 its hand so much. */
12307 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12308 rtx reg2, rtx rreg)
12312 /* copy_rtx will not make unique copies of registers, so we need to
12313 ensure we don't have unwanted sharing here. */
12315 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12318 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12320 real = copy_rtx (PATTERN (insn));
12322 if (reg2 != NULL_RTX)
12323 real = replace_rtx (real, reg2, rreg);
12325 real = replace_rtx (real, reg,
12326 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12327 STACK_POINTER_REGNUM),
12330 /* We expect that 'real' is either a SET or a PARALLEL containing
12331 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12332 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12334 if (GET_CODE (real) == SET)
12338 temp = simplify_rtx (SET_SRC (set));
12340 SET_SRC (set) = temp;
12341 temp = simplify_rtx (SET_DEST (set));
12343 SET_DEST (set) = temp;
12344 if (GET_CODE (SET_DEST (set)) == MEM)
12346 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12348 XEXP (SET_DEST (set), 0) = temp;
12351 else if (GET_CODE (real) == PARALLEL)
12354 for (i = 0; i < XVECLEN (real, 0); i++)
12355 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
12357 rtx set = XVECEXP (real, 0, i);
12359 temp = simplify_rtx (SET_SRC (set));
12361 SET_SRC (set) = temp;
12362 temp = simplify_rtx (SET_DEST (set));
12364 SET_DEST (set) = temp;
12365 if (GET_CODE (SET_DEST (set)) == MEM)
12367 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12369 XEXP (SET_DEST (set), 0) = temp;
12371 RTX_FRAME_RELATED_P (set) = 1;
12378 real = spe_synthesize_frame_save (real);
12380 RTX_FRAME_RELATED_P (insn) = 1;
12381 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12386 /* Given an SPE frame note, return a PARALLEL of SETs with the
12387 original note, plus a synthetic register save. */
12390 spe_synthesize_frame_save (rtx real)
12392 rtx synth, offset, reg, real2;
12394 if (GET_CODE (real) != SET
12395 || GET_MODE (SET_SRC (real)) != V2SImode)
12398 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12399 frame related note. The parallel contains a set of the register
12400 being saved, and another set to a synthetic register (n+1200).
12401 This is so we can differentiate between 64-bit and 32-bit saves.
12402 Words cannot describe this nastiness. */
12404 if (GET_CODE (SET_DEST (real)) != MEM
12405 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
12406 || GET_CODE (SET_SRC (real)) != REG)
12410 (set (mem (plus (reg x) (const y)))
12413 (set (mem (plus (reg x) (const y+4)))
12417 real2 = copy_rtx (real);
12418 PUT_MODE (SET_DEST (real2), SImode);
12419 reg = SET_SRC (real2);
12420 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12421 synth = copy_rtx (real2);
12423 if (BYTES_BIG_ENDIAN)
12425 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12426 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12429 reg = SET_SRC (synth);
12431 synth = replace_rtx (synth, reg,
12432 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12434 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12435 synth = replace_rtx (synth, offset,
12436 GEN_INT (INTVAL (offset)
12437 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12439 RTX_FRAME_RELATED_P (synth) = 1;
12440 RTX_FRAME_RELATED_P (real2) = 1;
12441 if (BYTES_BIG_ENDIAN)
12442 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12444 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12449 /* Returns an insn that has a vrsave set operation with the
12450 appropriate CLOBBERs. */
12453 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12456 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12457 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12460 = gen_rtx_SET (VOIDmode,
12462 gen_rtx_UNSPEC_VOLATILE (SImode,
12463 gen_rtvec (2, reg, vrsave),
12468 /* We need to clobber the registers in the mask so the scheduler
12469 does not move sets to VRSAVE before sets of AltiVec registers.
12471 However, if the function receives nonlocal gotos, reload will set
12472 all call saved registers live. We will end up with:
12474 (set (reg 999) (mem))
12475 (parallel [ (set (reg vrsave) (unspec blah))
12476 (clobber (reg 999))])
12478 The clobber will cause the store into reg 999 to be dead, and
12479 flow will attempt to delete an epilogue insn. In this case, we
12480 need an unspec use/set of the register. */
12482 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12483 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12485 if (!epiloguep || call_used_regs [i])
12486 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12487 gen_rtx_REG (V4SImode, i));
12490 rtx reg = gen_rtx_REG (V4SImode, i);
12493 = gen_rtx_SET (VOIDmode,
12495 gen_rtx_UNSPEC (V4SImode,
12496 gen_rtvec (1, reg), 27));
12500 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12502 for (i = 0; i < nclobs; ++i)
12503 XVECEXP (insn, 0, i) = clobs[i];
12508 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12509 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12512 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12513 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12515 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12516 rtx replacea, replaceb;
12518 int_rtx = GEN_INT (offset);
12520 /* Some cases that need register indexed addressing. */
12521 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12523 && SPE_VECTOR_MODE (mode)
12524 && !SPE_CONST_OFFSET_OK (offset)))
12526 /* Whomever calls us must make sure r11 is available in the
12527 flow path of instructions in the prologue. */
12528 offset_rtx = gen_rtx_REG (Pmode, 11);
12529 emit_move_insn (offset_rtx, int_rtx);
12531 replacea = offset_rtx;
12532 replaceb = int_rtx;
12536 offset_rtx = int_rtx;
12537 replacea = NULL_RTX;
12538 replaceb = NULL_RTX;
12541 reg = gen_rtx_REG (mode, regno);
12542 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12543 mem = gen_rtx_MEM (mode, addr);
12544 set_mem_alias_set (mem, rs6000_sr_alias_set);
12546 insn = emit_move_insn (mem, reg);
12548 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12551 /* Emit an offset memory reference suitable for a frame store, while
12552 converting to a valid addressing mode. */
12555 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12557 rtx int_rtx, offset_rtx;
12559 int_rtx = GEN_INT (offset);
12561 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12563 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12564 emit_move_insn (offset_rtx, int_rtx);
12567 offset_rtx = int_rtx;
12569 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12572 /* Emit function prologue as insns. */
12575 rs6000_emit_prologue (void)
12577 rs6000_stack_t *info = rs6000_stack_info ();
12578 enum machine_mode reg_mode = Pmode;
12579 int reg_size = TARGET_32BIT ? 4 : 8;
12580 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12581 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12582 rtx frame_reg_rtx = sp_reg_rtx;
12583 rtx cr_save_rtx = NULL_RTX;
12585 int saving_FPRs_inline;
12586 int using_store_multiple;
12587 HOST_WIDE_INT sp_offset = 0;
12589 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12591 reg_mode = V2SImode;
12595 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12596 && (!TARGET_SPE_ABI
12597 || info->spe_64bit_regs_used == 0)
12598 && info->first_gp_reg_save < 31);
12599 saving_FPRs_inline = (info->first_fp_reg_save == 64
12600 || FP_SAVE_INLINE (info->first_fp_reg_save)
12601 || current_function_calls_eh_return
12602 || cfun->machine->ra_need_lr);
12604 /* For V.4, update stack before we do any saving and set back pointer. */
12606 && (DEFAULT_ABI == ABI_V4
12607 || current_function_calls_eh_return))
12609 if (info->total_size < 32767)
12610 sp_offset = info->total_size;
12612 frame_reg_rtx = frame_ptr_rtx;
12613 rs6000_emit_allocate_stack (info->total_size,
12614 (frame_reg_rtx != sp_reg_rtx
12615 && (info->cr_save_p
12617 || info->first_fp_reg_save < 64
12618 || info->first_gp_reg_save < 32
12620 if (frame_reg_rtx != sp_reg_rtx)
12621 rs6000_emit_stack_tie ();
12624 /* Save AltiVec registers if needed. */
12625 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12629 /* There should be a non inline version of this, for when we
12630 are saving lots of vector registers. */
12631 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12632 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12634 rtx areg, savereg, mem;
12637 offset = info->altivec_save_offset + sp_offset
12638 + 16 * (i - info->first_altivec_reg_save);
12640 savereg = gen_rtx_REG (V4SImode, i);
12642 areg = gen_rtx_REG (Pmode, 0);
12643 emit_move_insn (areg, GEN_INT (offset));
12645 /* AltiVec addressing mode is [reg+reg]. */
12646 mem = gen_rtx_MEM (V4SImode,
12647 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12649 set_mem_alias_set (mem, rs6000_sr_alias_set);
12651 insn = emit_move_insn (mem, savereg);
12653 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12654 areg, GEN_INT (offset));
12658 /* VRSAVE is a bit vector representing which AltiVec registers
12659 are used. The OS uses this to determine which vector
12660 registers to save on a context switch. We need to save
12661 VRSAVE on the stack frame, add whatever AltiVec registers we
12662 used in this function, and do the corresponding magic in the
12665 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12666 && info->vrsave_mask != 0)
12668 rtx reg, mem, vrsave;
12671 /* Get VRSAVE onto a GPR. */
12672 reg = gen_rtx_REG (SImode, 12);
12673 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12675 emit_insn (gen_get_vrsave_internal (reg));
12677 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12680 offset = info->vrsave_save_offset + sp_offset;
12682 = gen_rtx_MEM (SImode,
12683 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12684 set_mem_alias_set (mem, rs6000_sr_alias_set);
12685 insn = emit_move_insn (mem, reg);
12687 /* Include the registers in the mask. */
12688 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12690 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12693 /* If we use the link register, get it into r0. */
12694 if (info->lr_save_p)
12696 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
12697 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12698 RTX_FRAME_RELATED_P (insn) = 1;
12701 /* If we need to save CR, put it into r12. */
12702 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12706 cr_save_rtx = gen_rtx_REG (SImode, 12);
12707 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
12708 RTX_FRAME_RELATED_P (insn) = 1;
12709 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12710 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12711 But that's OK. All we have to do is specify that _one_ condition
12712 code register is saved in this stack slot. The thrower's epilogue
12713 will then restore all the call-saved registers.
12714 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12715 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
12716 gen_rtx_REG (SImode, CR2_REGNO));
12717 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12722 /* Do any required saving of fpr's. If only one or two to save, do
12723 it ourselves. Otherwise, call function. */
12724 if (saving_FPRs_inline)
12727 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12728 if ((regs_ever_live[info->first_fp_reg_save+i]
12729 && ! call_used_regs[info->first_fp_reg_save+i]))
12730 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12731 info->first_fp_reg_save + i,
12732 info->fp_save_offset + sp_offset + 8 * i,
12735 else if (info->first_fp_reg_save != 64)
12739 const char *alloc_rname;
12741 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12743 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12744 gen_rtx_REG (Pmode,
12745 LINK_REGISTER_REGNUM));
12746 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12747 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12748 alloc_rname = ggc_strdup (rname);
12749 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12750 gen_rtx_SYMBOL_REF (Pmode,
12752 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12754 rtx addr, reg, mem;
12755 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12756 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12757 GEN_INT (info->fp_save_offset
12758 + sp_offset + 8*i));
12759 mem = gen_rtx_MEM (DFmode, addr);
12760 set_mem_alias_set (mem, rs6000_sr_alias_set);
12762 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12764 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12765 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12766 NULL_RTX, NULL_RTX);
12769 /* Save GPRs. This is done as a PARALLEL if we are using
12770 the store-multiple instructions. */
12771 if (using_store_multiple)
12775 p = rtvec_alloc (32 - info->first_gp_reg_save);
12776 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12778 rtx addr, reg, mem;
12779 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12780 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12781 GEN_INT (info->gp_save_offset
12784 mem = gen_rtx_MEM (reg_mode, addr);
12785 set_mem_alias_set (mem, rs6000_sr_alias_set);
12787 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12789 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12790 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12791 NULL_RTX, NULL_RTX);
12796 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12797 if ((regs_ever_live[info->first_gp_reg_save+i]
12798 && (! call_used_regs[info->first_gp_reg_save+i]
12799 || (i+info->first_gp_reg_save
12800 == RS6000_PIC_OFFSET_TABLE_REGNUM
12801 && TARGET_TOC && TARGET_MINIMAL_TOC)))
12802 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12803 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12804 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12806 rtx addr, reg, mem;
12807 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12809 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12811 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12814 if (!SPE_CONST_OFFSET_OK (offset))
12816 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12817 emit_move_insn (b, GEN_INT (offset));
12820 b = GEN_INT (offset);
12822 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12823 mem = gen_rtx_MEM (V2SImode, addr);
12824 set_mem_alias_set (mem, rs6000_sr_alias_set);
12825 insn = emit_move_insn (mem, reg);
12827 if (GET_CODE (b) == CONST_INT)
12828 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12829 NULL_RTX, NULL_RTX);
12831 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12832 b, GEN_INT (offset));
12836 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12837 GEN_INT (info->gp_save_offset
12840 mem = gen_rtx_MEM (reg_mode, addr);
12841 set_mem_alias_set (mem, rs6000_sr_alias_set);
12843 insn = emit_move_insn (mem, reg);
12844 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12845 NULL_RTX, NULL_RTX);
12850 /* ??? There's no need to emit actual instructions here, but it's the
12851 easiest way to get the frame unwind information emitted. */
12852 if (current_function_calls_eh_return)
12854 unsigned int i, regno;
12856 /* In AIX ABI we need to pretend we save r2 here. */
12859 rtx addr, reg, mem;
12861 reg = gen_rtx_REG (reg_mode, 2);
12862 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12863 GEN_INT (sp_offset + 5 * reg_size));
12864 mem = gen_rtx_MEM (reg_mode, addr);
12865 set_mem_alias_set (mem, rs6000_sr_alias_set);
12867 insn = emit_move_insn (mem, reg);
12868 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12869 NULL_RTX, NULL_RTX);
12870 PATTERN (insn) = gen_blockage ();
12875 regno = EH_RETURN_DATA_REGNO (i);
12876 if (regno == INVALID_REGNUM)
12879 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12880 info->ehrd_offset + sp_offset
12881 + reg_size * (int) i,
12886 /* Save lr if we used it. */
12887 if (info->lr_save_p)
12889 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12890 GEN_INT (info->lr_save_offset + sp_offset));
12891 rtx reg = gen_rtx_REG (Pmode, 0);
12892 rtx mem = gen_rtx_MEM (Pmode, addr);
12893 /* This should not be of rs6000_sr_alias_set, because of
12894 __builtin_return_address. */
12896 insn = emit_move_insn (mem, reg);
12897 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12898 NULL_RTX, NULL_RTX);
12901 /* Save CR if we use any that must be preserved. */
12902 if (info->cr_save_p)
12904 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12905 GEN_INT (info->cr_save_offset + sp_offset));
12906 rtx mem = gen_rtx_MEM (SImode, addr);
12907 /* See the large comment above about why CR2_REGNO is used. */
12908 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
12910 set_mem_alias_set (mem, rs6000_sr_alias_set);
12912 /* If r12 was used to hold the original sp, copy cr into r0 now
12914 if (REGNO (frame_reg_rtx) == 12)
12918 cr_save_rtx = gen_rtx_REG (SImode, 0);
12919 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
12920 RTX_FRAME_RELATED_P (insn) = 1;
12921 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
12922 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12927 insn = emit_move_insn (mem, cr_save_rtx);
12929 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12930 NULL_RTX, NULL_RTX);
12933 /* Update stack and set back pointer unless this is V.4,
12934 for which it was done previously. */
12936 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12937 rs6000_emit_allocate_stack (info->total_size, FALSE);
12939 /* Set frame pointer, if needed. */
12940 if (frame_pointer_needed)
12942 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12944 RTX_FRAME_RELATED_P (insn) = 1;
12947 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12948 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12949 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12950 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12952 /* If emit_load_toc_table will use the link register, we need to save
12953 it. We use R12 for this purpose because emit_load_toc_table
12954 can use register 0. This allows us to use a plain 'blr' to return
12955 from the procedure more often. */
12956 int save_LR_around_toc_setup = (TARGET_ELF
12957 && DEFAULT_ABI != ABI_AIX
12959 && ! info->lr_save_p
12960 && EXIT_BLOCK_PTR->pred != NULL);
12961 if (save_LR_around_toc_setup)
12963 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12965 insn = emit_move_insn (frame_ptr_rtx, lr);
12966 rs6000_maybe_dead (insn);
12967 RTX_FRAME_RELATED_P (insn) = 1;
12969 rs6000_emit_load_toc_table (TRUE);
12971 insn = emit_move_insn (lr, frame_ptr_rtx);
12972 rs6000_maybe_dead (insn);
12973 RTX_FRAME_RELATED_P (insn) = 1;
12976 rs6000_emit_load_toc_table (TRUE);
12980 if (DEFAULT_ABI == ABI_DARWIN
12981 && flag_pic && current_function_uses_pic_offset_table)
12983 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12984 rtx src = machopic_function_base_sym ();
12986 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
12988 insn = emit_move_insn (gen_rtx_REG (Pmode,
12989 RS6000_PIC_OFFSET_TABLE_REGNUM),
12991 rs6000_maybe_dead (insn);
12996 /* Write function prologue. */
12999 rs6000_output_function_prologue (FILE *file,
13000 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13002 rs6000_stack_t *info = rs6000_stack_info ();
13004 if (TARGET_DEBUG_STACK)
13005 debug_stack_info (info);
13007 /* Write .extern for any function we will call to save and restore
13009 if (info->first_fp_reg_save < 64
13010 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13011 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13012 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13013 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13014 RESTORE_FP_SUFFIX);
13016 /* Write .extern for AIX common mode routines, if needed. */
13017 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13019 fputs ("\t.extern __mulh\n", file);
13020 fputs ("\t.extern __mull\n", file);
13021 fputs ("\t.extern __divss\n", file);
13022 fputs ("\t.extern __divus\n", file);
13023 fputs ("\t.extern __quoss\n", file);
13024 fputs ("\t.extern __quous\n", file);
13025 common_mode_defined = 1;
13028 if (! HAVE_prologue)
13032 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13033 the "toplevel" insn chain. */
13034 emit_note (NOTE_INSN_DELETED);
13035 rs6000_emit_prologue ();
13036 emit_note (NOTE_INSN_DELETED);
13038 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13042 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13044 INSN_ADDRESSES_NEW (insn, addr);
13049 if (TARGET_DEBUG_STACK)
13050 debug_rtx_list (get_insns (), 100);
13051 final (get_insns (), file, FALSE, FALSE);
13055 rs6000_pic_labelno++;
13058 /* Emit function epilogue as insns.
13060 At present, dwarf2out_frame_debug_expr doesn't understand
13061 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13062 anywhere in the epilogue. Most of the insns below would in any case
13063 need special notes to explain where r11 is in relation to the stack. */
13066 rs6000_emit_epilogue (int sibcall)
13068 rs6000_stack_t *info;
13069 int restoring_FPRs_inline;
13070 int using_load_multiple;
13071 int using_mfcr_multiple;
13072 int use_backchain_to_restore_sp;
13074 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13075 rtx frame_reg_rtx = sp_reg_rtx;
13076 enum machine_mode reg_mode = Pmode;
13077 int reg_size = TARGET_32BIT ? 4 : 8;
13080 info = rs6000_stack_info ();
13082 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13084 reg_mode = V2SImode;
13088 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13089 && (!TARGET_SPE_ABI
13090 || info->spe_64bit_regs_used == 0)
13091 && info->first_gp_reg_save < 31);
13092 restoring_FPRs_inline = (sibcall
13093 || current_function_calls_eh_return
13094 || info->first_fp_reg_save == 64
13095 || FP_SAVE_INLINE (info->first_fp_reg_save));
13096 use_backchain_to_restore_sp = (frame_pointer_needed
13097 || current_function_calls_alloca
13098 || info->total_size > 32767);
13099 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13100 || rs6000_cpu == PROCESSOR_PPC603
13101 || rs6000_cpu == PROCESSOR_PPC750
13104 /* If we have a frame pointer, a call to alloca, or a large stack
13105 frame, restore the old stack pointer using the backchain. Otherwise,
13106 we know what size to update it with. */
13107 if (use_backchain_to_restore_sp)
13109 /* Under V.4, don't reset the stack pointer until after we're done
13110 loading the saved registers. */
13111 if (DEFAULT_ABI == ABI_V4)
13112 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
13114 emit_move_insn (frame_reg_rtx,
13115 gen_rtx_MEM (Pmode, sp_reg_rtx));
13118 else if (info->push_p)
13120 if (DEFAULT_ABI == ABI_V4
13121 || current_function_calls_eh_return)
13122 sp_offset = info->total_size;
13125 emit_insn (TARGET_32BIT
13126 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13127 GEN_INT (info->total_size))
13128 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13129 GEN_INT (info->total_size)));
13133 /* Restore AltiVec registers if needed. */
13134 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13138 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13139 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13141 rtx addr, areg, mem;
13143 areg = gen_rtx_REG (Pmode, 0);
13145 (areg, GEN_INT (info->altivec_save_offset
13147 + 16 * (i - info->first_altivec_reg_save)));
13149 /* AltiVec addressing mode is [reg+reg]. */
13150 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
13151 mem = gen_rtx_MEM (V4SImode, addr);
13152 set_mem_alias_set (mem, rs6000_sr_alias_set);
13154 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
13158 /* Restore VRSAVE if needed. */
13159 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13160 && info->vrsave_mask != 0)
13162 rtx addr, mem, reg;
13164 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13165 GEN_INT (info->vrsave_save_offset + sp_offset));
13166 mem = gen_rtx_MEM (SImode, addr);
13167 set_mem_alias_set (mem, rs6000_sr_alias_set);
13168 reg = gen_rtx_REG (SImode, 12);
13169 emit_move_insn (reg, mem);
13171 emit_insn (generate_set_vrsave (reg, info, 1));
13174 /* Get the old lr if we saved it. */
13175 if (info->lr_save_p)
13177 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
13178 info->lr_save_offset + sp_offset);
13180 set_mem_alias_set (mem, rs6000_sr_alias_set);
13182 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
13185 /* Get the old cr if we saved it. */
13186 if (info->cr_save_p)
13188 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13189 GEN_INT (info->cr_save_offset + sp_offset));
13190 rtx mem = gen_rtx_MEM (SImode, addr);
13192 set_mem_alias_set (mem, rs6000_sr_alias_set);
13194 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
13197 /* Set LR here to try to overlap restores below. */
13198 if (info->lr_save_p)
13199 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
13200 gen_rtx_REG (Pmode, 0));
13202 /* Load exception handler data registers, if needed. */
13203 if (current_function_calls_eh_return)
13205 unsigned int i, regno;
13209 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13210 GEN_INT (sp_offset + 5 * reg_size));
13211 rtx mem = gen_rtx_MEM (reg_mode, addr);
13213 set_mem_alias_set (mem, rs6000_sr_alias_set);
13215 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
13222 regno = EH_RETURN_DATA_REGNO (i);
13223 if (regno == INVALID_REGNUM)
13226 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
13227 info->ehrd_offset + sp_offset
13228 + reg_size * (int) i);
13229 set_mem_alias_set (mem, rs6000_sr_alias_set);
13231 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
13235 /* Restore GPRs. This is done as a PARALLEL if we are using
13236 the load-multiple instructions. */
13237 if (using_load_multiple)
13240 p = rtvec_alloc (32 - info->first_gp_reg_save);
13241 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13243 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13244 GEN_INT (info->gp_save_offset
13247 rtx mem = gen_rtx_MEM (reg_mode, addr);
13249 set_mem_alias_set (mem, rs6000_sr_alias_set);
13252 gen_rtx_SET (VOIDmode,
13253 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
13256 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13259 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13260 if ((regs_ever_live[info->first_gp_reg_save+i]
13261 && (! call_used_regs[info->first_gp_reg_save+i]
13262 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13263 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13264 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13265 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13266 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13268 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13269 GEN_INT (info->gp_save_offset
13272 rtx mem = gen_rtx_MEM (reg_mode, addr);
13274 /* Restore 64-bit quantities for SPE. */
13275 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13277 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13280 if (!SPE_CONST_OFFSET_OK (offset))
13282 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13283 emit_move_insn (b, GEN_INT (offset));
13286 b = GEN_INT (offset);
13288 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13289 mem = gen_rtx_MEM (V2SImode, addr);
13292 set_mem_alias_set (mem, rs6000_sr_alias_set);
13294 emit_move_insn (gen_rtx_REG (reg_mode,
13295 info->first_gp_reg_save + i), mem);
13298 /* Restore fpr's if we need to do it without calling a function. */
13299 if (restoring_FPRs_inline)
13300 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13301 if ((regs_ever_live[info->first_fp_reg_save+i]
13302 && ! call_used_regs[info->first_fp_reg_save+i]))
13305 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13306 GEN_INT (info->fp_save_offset
13309 mem = gen_rtx_MEM (DFmode, addr);
13310 set_mem_alias_set (mem, rs6000_sr_alias_set);
13312 emit_move_insn (gen_rtx_REG (DFmode,
13313 info->first_fp_reg_save + i),
13317 /* If we saved cr, restore it here. Just those that were used. */
13318 if (info->cr_save_p)
13320 rtx r12_rtx = gen_rtx_REG (SImode, 12);
13323 if (using_mfcr_multiple)
13325 for (i = 0; i < 8; i++)
13326 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13332 if (using_mfcr_multiple && count > 1)
13337 p = rtvec_alloc (count);
13340 for (i = 0; i < 8; i++)
13341 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13343 rtvec r = rtvec_alloc (2);
13344 RTVEC_ELT (r, 0) = r12_rtx;
13345 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
13346 RTVEC_ELT (p, ndx) =
13347 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
13348 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
13351 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13356 for (i = 0; i < 8; i++)
13357 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13359 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
13365 /* If this is V.4, unwind the stack pointer after all of the loads
13366 have been done. We need to emit a block here so that sched
13367 doesn't decide to move the sp change before the register restores
13368 (which may not have any obvious dependency on the stack). This
13369 doesn't hurt performance, because there is no scheduling that can
13370 be done after this point. */
13371 if (DEFAULT_ABI == ABI_V4
13372 || current_function_calls_eh_return)
13374 if (frame_reg_rtx != sp_reg_rtx)
13375 rs6000_emit_stack_tie ();
13377 if (use_backchain_to_restore_sp)
13379 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
13381 else if (sp_offset != 0)
13383 emit_insn (TARGET_32BIT
13384 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13385 GEN_INT (sp_offset))
13386 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13387 GEN_INT (sp_offset)));
13391 if (current_function_calls_eh_return)
13393 rtx sa = EH_RETURN_STACKADJ_RTX;
13394 emit_insn (TARGET_32BIT
13395 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
13396 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
13402 if (! restoring_FPRs_inline)
13403 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
13405 p = rtvec_alloc (2);
13407 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
13408 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13409 gen_rtx_REG (Pmode,
13410 LINK_REGISTER_REGNUM));
13412 /* If we have to restore more than two FP registers, branch to the
13413 restore function. It will return to our caller. */
13414 if (! restoring_FPRs_inline)
13418 const char *alloc_rname;
13420 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
13421 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
13422 alloc_rname = ggc_strdup (rname);
13423 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
13424 gen_rtx_SYMBOL_REF (Pmode,
13427 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13430 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
13431 GEN_INT (info->fp_save_offset + 8*i));
13432 mem = gen_rtx_MEM (DFmode, addr);
13433 set_mem_alias_set (mem, rs6000_sr_alias_set);
13435 RTVEC_ELT (p, i+3) =
13436 gen_rtx_SET (VOIDmode,
13437 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
13442 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13446 /* Write function epilogue. */
13449 rs6000_output_function_epilogue (FILE *file,
13450 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13452 rs6000_stack_t *info = rs6000_stack_info ();
13454 if (! HAVE_epilogue)
13456 rtx insn = get_last_insn ();
13457 /* If the last insn was a BARRIER, we don't have to write anything except
13458 the trace table. */
13459 if (GET_CODE (insn) == NOTE)
13460 insn = prev_nonnote_insn (insn);
13461 if (insn == 0 || GET_CODE (insn) != BARRIER)
13463 /* This is slightly ugly, but at least we don't have two
13464 copies of the epilogue-emitting code. */
13467 /* A NOTE_INSN_DELETED is supposed to be at the start
13468 and end of the "toplevel" insn chain. */
13469 emit_note (NOTE_INSN_DELETED);
13470 rs6000_emit_epilogue (FALSE);
13471 emit_note (NOTE_INSN_DELETED);
13473 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13477 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13479 INSN_ADDRESSES_NEW (insn, addr);
13484 if (TARGET_DEBUG_STACK)
13485 debug_rtx_list (get_insns (), 100);
13486 final (get_insns (), file, FALSE, FALSE);
13492 macho_branch_islands ();
13493 /* Mach-O doesn't support labels at the end of objects, so if
13494 it looks like we might want one, insert a NOP. */
13496 rtx insn = get_last_insn ();
13499 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
13500 insn = PREV_INSN (insn);
13504 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
13505 fputs ("\tnop\n", file);
13509 /* Output a traceback table here. See /usr/include/sys/debug.h for info
13512 We don't output a traceback table if -finhibit-size-directive was
13513 used. The documentation for -finhibit-size-directive reads
13514 ``don't output a @code{.size} assembler directive, or anything
13515 else that would cause trouble if the function is split in the
13516 middle, and the two halves are placed at locations far apart in
13517 memory.'' The traceback table has this property, since it
13518 includes the offset from the start of the function to the
13519 traceback table itself.
13521 System V.4 Powerpc's (and the embedded ABI derived from it) use a
13522 different traceback table. */
13523 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
13524 && rs6000_traceback != traceback_none)
13526 const char *fname = NULL;
13527 const char *language_string = lang_hooks.name;
13528 int fixed_parms = 0, float_parms = 0, parm_info = 0;
13530 int optional_tbtab;
13532 if (rs6000_traceback == traceback_full)
13533 optional_tbtab = 1;
13534 else if (rs6000_traceback == traceback_part)
13535 optional_tbtab = 0;
13537 optional_tbtab = !optimize_size && !TARGET_ELF;
13539 if (optional_tbtab)
13541 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
13542 while (*fname == '.') /* V.4 encodes . in the name */
13545 /* Need label immediately before tbtab, so we can compute
13546 its offset from the function start. */
13547 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13548 ASM_OUTPUT_LABEL (file, fname);
13551 /* The .tbtab pseudo-op can only be used for the first eight
13552 expressions, since it can't handle the possibly variable
13553 length fields that follow. However, if you omit the optional
13554 fields, the assembler outputs zeros for all optional fields
13555 anyways, giving each variable length field is minimum length
13556 (as defined in sys/debug.h). Thus we can not use the .tbtab
13557 pseudo-op at all. */
13559 /* An all-zero word flags the start of the tbtab, for debuggers
13560 that have to find it by searching forward from the entry
13561 point or from the current pc. */
13562 fputs ("\t.long 0\n", file);
13564 /* Tbtab format type. Use format type 0. */
13565 fputs ("\t.byte 0,", file);
13567 /* Language type. Unfortunately, there does not seem to be any
13568 official way to discover the language being compiled, so we
13569 use language_string.
13570 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
13571 Java is 13. Objective-C is 14. */
13572 if (! strcmp (language_string, "GNU C"))
13574 else if (! strcmp (language_string, "GNU F77")
13575 || ! strcmp (language_string, "GNU F95"))
13577 else if (! strcmp (language_string, "GNU Pascal"))
13579 else if (! strcmp (language_string, "GNU Ada"))
13581 else if (! strcmp (language_string, "GNU C++"))
13583 else if (! strcmp (language_string, "GNU Java"))
13585 else if (! strcmp (language_string, "GNU Objective-C"))
13589 fprintf (file, "%d,", i);
13591 /* 8 single bit fields: global linkage (not set for C extern linkage,
13592 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13593 from start of procedure stored in tbtab, internal function, function
13594 has controlled storage, function has no toc, function uses fp,
13595 function logs/aborts fp operations. */
13596 /* Assume that fp operations are used if any fp reg must be saved. */
13597 fprintf (file, "%d,",
13598 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13600 /* 6 bitfields: function is interrupt handler, name present in
13601 proc table, function calls alloca, on condition directives
13602 (controls stack walks, 3 bits), saves condition reg, saves
13604 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13605 set up as a frame pointer, even when there is no alloca call. */
13606 fprintf (file, "%d,",
13607 ((optional_tbtab << 6)
13608 | ((optional_tbtab & frame_pointer_needed) << 5)
13609 | (info->cr_save_p << 1)
13610 | (info->lr_save_p)));
13612 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13614 fprintf (file, "%d,",
13615 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13617 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13618 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13620 if (optional_tbtab)
13622 /* Compute the parameter info from the function decl argument
13625 int next_parm_info_bit = 31;
13627 for (decl = DECL_ARGUMENTS (current_function_decl);
13628 decl; decl = TREE_CHAIN (decl))
13630 rtx parameter = DECL_INCOMING_RTL (decl);
13631 enum machine_mode mode = GET_MODE (parameter);
13633 if (GET_CODE (parameter) == REG)
13635 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13641 if (mode == SFmode)
13643 else if (mode == DFmode || mode == TFmode)
13648 /* If only one bit will fit, don't or in this entry. */
13649 if (next_parm_info_bit > 0)
13650 parm_info |= (bits << (next_parm_info_bit - 1));
13651 next_parm_info_bit -= 2;
13655 fixed_parms += ((GET_MODE_SIZE (mode)
13656 + (UNITS_PER_WORD - 1))
13658 next_parm_info_bit -= 1;
13664 /* Number of fixed point parameters. */
13665 /* This is actually the number of words of fixed point parameters; thus
13666 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13667 fprintf (file, "%d,", fixed_parms);
13669 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13671 /* This is actually the number of fp registers that hold parameters;
13672 and thus the maximum value is 13. */
13673 /* Set parameters on stack bit if parameters are not in their original
13674 registers, regardless of whether they are on the stack? Xlc
13675 seems to set the bit when not optimizing. */
13676 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13678 if (! optional_tbtab)
13681 /* Optional fields follow. Some are variable length. */
13683 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13684 11 double float. */
13685 /* There is an entry for each parameter in a register, in the order that
13686 they occur in the parameter list. Any intervening arguments on the
13687 stack are ignored. If the list overflows a long (max possible length
13688 34 bits) then completely leave off all elements that don't fit. */
13689 /* Only emit this long if there was at least one parameter. */
13690 if (fixed_parms || float_parms)
13691 fprintf (file, "\t.long %d\n", parm_info);
13693 /* Offset from start of code to tb table. */
13694 fputs ("\t.long ", file);
13695 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13697 RS6000_OUTPUT_BASENAME (file, fname);
13699 assemble_name (file, fname);
13701 fputs ("-.", file);
13703 RS6000_OUTPUT_BASENAME (file, fname);
13705 assemble_name (file, fname);
13709 /* Interrupt handler mask. */
13710 /* Omit this long, since we never set the interrupt handler bit
13713 /* Number of CTL (controlled storage) anchors. */
13714 /* Omit this long, since the has_ctl bit is never set above. */
13716 /* Displacement into stack of each CTL anchor. */
13717 /* Omit this list of longs, because there are no CTL anchors. */
13719 /* Length of function name. */
13722 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13724 /* Function name. */
13725 assemble_string (fname, strlen (fname));
13727 /* Register for alloca automatic storage; this is always reg 31.
13728 Only emit this if the alloca bit was set above. */
13729 if (frame_pointer_needed)
13730 fputs ("\t.byte 31\n", file);
13732 fputs ("\t.align 2\n", file);
13736 /* A C compound statement that outputs the assembler code for a thunk
13737 function, used to implement C++ virtual function calls with
13738 multiple inheritance. The thunk acts as a wrapper around a virtual
13739 function, adjusting the implicit object parameter before handing
13740 control off to the real function.
13742 First, emit code to add the integer DELTA to the location that
13743 contains the incoming first argument. Assume that this argument
13744 contains a pointer, and is the one used to pass the `this' pointer
13745 in C++. This is the incoming argument *before* the function
13746 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13747 values of all other incoming arguments.
13749 After the addition, emit code to jump to FUNCTION, which is a
13750 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13751 not touch the return address. Hence returning from FUNCTION will
13752 return to whoever called the current `thunk'.
13754 The effect must be as if FUNCTION had been called directly with the
13755 adjusted first argument. This macro is responsible for emitting
13756 all of the code for a thunk function; output_function_prologue()
13757 and output_function_epilogue() are not invoked.
13759 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13760 been extracted from it.) It might possibly be useful on some
13761 targets, but probably not.
13763 If you do not define this macro, the target-independent code in the
13764 C++ frontend will generate a less efficient heavyweight thunk that
13765 calls FUNCTION instead of jumping to it. The generic approach does
13766 not support varargs. */
13769 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13770 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13773 rtx this, insn, funexp;
13775 reload_completed = 1;
13776 epilogue_completed = 1;
13777 no_new_pseudos = 1;
13778 reset_block_changes ();
13780 /* Mark the end of the (empty) prologue. */
13781 emit_note (NOTE_INSN_PROLOGUE_END);
13783 /* Find the "this" pointer. If the function returns a structure,
13784 the structure return pointer is in r3. */
13785 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13786 this = gen_rtx_REG (Pmode, 4);
13788 this = gen_rtx_REG (Pmode, 3);
13790 /* Apply the constant offset, if required. */
13793 rtx delta_rtx = GEN_INT (delta);
13794 emit_insn (TARGET_32BIT
13795 ? gen_addsi3 (this, this, delta_rtx)
13796 : gen_adddi3 (this, this, delta_rtx));
13799 /* Apply the offset from the vtable, if required. */
13802 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13803 rtx tmp = gen_rtx_REG (Pmode, 12);
13805 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13806 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13808 emit_insn (TARGET_32BIT
13809 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13810 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13811 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13815 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13817 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13819 emit_insn (TARGET_32BIT
13820 ? gen_addsi3 (this, this, tmp)
13821 : gen_adddi3 (this, this, tmp));
13824 /* Generate a tail call to the target function. */
13825 if (!TREE_USED (function))
13827 assemble_external (function);
13828 TREE_USED (function) = 1;
13830 funexp = XEXP (DECL_RTL (function), 0);
13831 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13834 if (MACHOPIC_INDIRECT)
13835 funexp = machopic_indirect_call_target (funexp);
13838 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13839 generate sibcall RTL explicitly to avoid constraint abort. */
13840 insn = emit_call_insn (
13841 gen_rtx_PARALLEL (VOIDmode,
13843 gen_rtx_CALL (VOIDmode,
13844 funexp, const0_rtx),
13845 gen_rtx_USE (VOIDmode, const0_rtx),
13846 gen_rtx_USE (VOIDmode,
13847 gen_rtx_REG (SImode,
13848 LINK_REGISTER_REGNUM)),
13849 gen_rtx_RETURN (VOIDmode))));
13850 SIBLING_CALL_P (insn) = 1;
13853 /* Run just enough of rest_of_compilation to get the insns emitted.
13854 There's not really enough bulk here to make other passes such as
13855 instruction scheduling worth while. Note that use_thunk calls
13856 assemble_start_function and assemble_end_function. */
13857 insn = get_insns ();
13858 insn_locators_initialize ();
13859 shorten_branches (insn);
13860 final_start_function (insn, file, 1);
13861 final (insn, file, 1, 0);
13862 final_end_function ();
13864 reload_completed = 0;
13865 epilogue_completed = 0;
13866 no_new_pseudos = 0;
13869 /* A quick summary of the various types of 'constant-pool tables'
13872 Target Flags Name One table per
13873 AIX (none) AIX TOC object file
13874 AIX -mfull-toc AIX TOC object file
13875 AIX -mminimal-toc AIX minimal TOC translation unit
13876 SVR4/EABI (none) SVR4 SDATA object file
13877 SVR4/EABI -fpic SVR4 pic object file
13878 SVR4/EABI -fPIC SVR4 PIC translation unit
13879 SVR4/EABI -mrelocatable EABI TOC function
13880 SVR4/EABI -maix AIX TOC object file
13881 SVR4/EABI -maix -mminimal-toc
13882 AIX minimal TOC translation unit
13884 Name Reg. Set by entries contains:
13885 made by addrs? fp? sum?
13887 AIX TOC 2 crt0 as Y option option
13888 AIX minimal TOC 30 prolog gcc Y Y option
13889 SVR4 SDATA 13 crt0 gcc N Y N
13890 SVR4 pic 30 prolog ld Y not yet N
13891 SVR4 PIC 30 prolog gcc Y option option
13892 EABI TOC 30 prolog gcc Y option option
13896 /* Hash functions for the hash table. */
13899 rs6000_hash_constant (rtx k)
13901 enum rtx_code code = GET_CODE (k);
13902 enum machine_mode mode = GET_MODE (k);
13903 unsigned result = (code << 3) ^ mode;
13904 const char *format;
13907 format = GET_RTX_FORMAT (code);
13908 flen = strlen (format);
13914 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13917 if (mode != VOIDmode)
13918 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13930 for (; fidx < flen; fidx++)
13931 switch (format[fidx])
13936 const char *str = XSTR (k, fidx);
13937 len = strlen (str);
13938 result = result * 613 + len;
13939 for (i = 0; i < len; i++)
13940 result = result * 613 + (unsigned) str[i];
13945 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13949 result = result * 613 + (unsigned) XINT (k, fidx);
13952 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13953 result = result * 613 + (unsigned) XWINT (k, fidx);
13957 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13958 result = result * 613 + (unsigned) (XWINT (k, fidx)
13972 toc_hash_function (const void *hash_entry)
13974 const struct toc_hash_struct *thc =
13975 (const struct toc_hash_struct *) hash_entry;
13976 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13979 /* Compare H1 and H2 for equivalence. */
13982 toc_hash_eq (const void *h1, const void *h2)
13984 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13985 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13987 if (((const struct toc_hash_struct *) h1)->key_mode
13988 != ((const struct toc_hash_struct *) h2)->key_mode)
13991 return rtx_equal_p (r1, r2);
13994 /* These are the names given by the C++ front-end to vtables, and
13995 vtable-like objects. Ideally, this logic should not be here;
13996 instead, there should be some programmatic way of inquiring as
13997 to whether or not an object is a vtable. */
13999 #define VTABLE_NAME_P(NAME) \
14000 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
14001 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14002 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14003 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14004 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14007 rs6000_output_symbol_ref (FILE *file, rtx x)
14009 /* Currently C++ toc references to vtables can be emitted before it
14010 is decided whether the vtable is public or private. If this is
14011 the case, then the linker will eventually complain that there is
14012 a reference to an unknown section. Thus, for vtables only,
14013 we emit the TOC reference to reference the symbol and not the
14015 const char *name = XSTR (x, 0);
14017 if (VTABLE_NAME_P (name))
14019 RS6000_OUTPUT_BASENAME (file, name);
14022 assemble_name (file, name);
14025 /* Output a TOC entry. We derive the entry name from what is being
14029 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14032 const char *name = buf;
14033 const char *real_name;
14040 /* When the linker won't eliminate them, don't output duplicate
14041 TOC entries (this happens on AIX if there is any kind of TOC,
14042 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14044 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14046 struct toc_hash_struct *h;
14049 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14050 time because GGC is not initialized at that point. */
14051 if (toc_hash_table == NULL)
14052 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14053 toc_hash_eq, NULL);
14055 h = ggc_alloc (sizeof (*h));
14057 h->key_mode = mode;
14058 h->labelno = labelno;
14060 found = htab_find_slot (toc_hash_table, h, 1);
14061 if (*found == NULL)
14063 else /* This is indeed a duplicate.
14064 Set this label equal to that label. */
14066 fputs ("\t.set ", file);
14067 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14068 fprintf (file, "%d,", labelno);
14069 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14070 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
14076 /* If we're going to put a double constant in the TOC, make sure it's
14077 aligned properly when strict alignment is on. */
14078 if (GET_CODE (x) == CONST_DOUBLE
14079 && STRICT_ALIGNMENT
14080 && GET_MODE_BITSIZE (mode) >= 64
14081 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
14082 ASM_OUTPUT_ALIGN (file, 3);
14085 (*targetm.asm_out.internal_label) (file, "LC", labelno);
14087 /* Handle FP constants specially. Note that if we have a minimal
14088 TOC, things we put here aren't actually in the TOC, so we can allow
14090 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
14092 REAL_VALUE_TYPE rv;
14095 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14096 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
14100 if (TARGET_MINIMAL_TOC)
14101 fputs (DOUBLE_INT_ASM_OP, file);
14103 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14104 k[0] & 0xffffffff, k[1] & 0xffffffff,
14105 k[2] & 0xffffffff, k[3] & 0xffffffff);
14106 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
14107 k[0] & 0xffffffff, k[1] & 0xffffffff,
14108 k[2] & 0xffffffff, k[3] & 0xffffffff);
14113 if (TARGET_MINIMAL_TOC)
14114 fputs ("\t.long ", file);
14116 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14117 k[0] & 0xffffffff, k[1] & 0xffffffff,
14118 k[2] & 0xffffffff, k[3] & 0xffffffff);
14119 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14120 k[0] & 0xffffffff, k[1] & 0xffffffff,
14121 k[2] & 0xffffffff, k[3] & 0xffffffff);
14125 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
14127 REAL_VALUE_TYPE rv;
14130 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14131 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
14135 if (TARGET_MINIMAL_TOC)
14136 fputs (DOUBLE_INT_ASM_OP, file);
14138 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14139 k[0] & 0xffffffff, k[1] & 0xffffffff);
14140 fprintf (file, "0x%lx%08lx\n",
14141 k[0] & 0xffffffff, k[1] & 0xffffffff);
14146 if (TARGET_MINIMAL_TOC)
14147 fputs ("\t.long ", file);
14149 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14150 k[0] & 0xffffffff, k[1] & 0xffffffff);
14151 fprintf (file, "0x%lx,0x%lx\n",
14152 k[0] & 0xffffffff, k[1] & 0xffffffff);
14156 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
14158 REAL_VALUE_TYPE rv;
14161 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14162 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
14166 if (TARGET_MINIMAL_TOC)
14167 fputs (DOUBLE_INT_ASM_OP, file);
14169 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14170 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
14175 if (TARGET_MINIMAL_TOC)
14176 fputs ("\t.long ", file);
14178 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14179 fprintf (file, "0x%lx\n", l & 0xffffffff);
14183 else if (GET_MODE (x) == VOIDmode
14184 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
14186 unsigned HOST_WIDE_INT low;
14187 HOST_WIDE_INT high;
14189 if (GET_CODE (x) == CONST_DOUBLE)
14191 low = CONST_DOUBLE_LOW (x);
14192 high = CONST_DOUBLE_HIGH (x);
14195 #if HOST_BITS_PER_WIDE_INT == 32
14198 high = (low & 0x80000000) ? ~0 : 0;
14202 low = INTVAL (x) & 0xffffffff;
14203 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
14207 /* TOC entries are always Pmode-sized, but since this
14208 is a bigendian machine then if we're putting smaller
14209 integer constants in the TOC we have to pad them.
14210 (This is still a win over putting the constants in
14211 a separate constant pool, because then we'd have
14212 to have both a TOC entry _and_ the actual constant.)
14214 For a 32-bit target, CONST_INT values are loaded and shifted
14215 entirely within `low' and can be stored in one TOC entry. */
14217 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
14218 abort ();/* It would be easy to make this work, but it doesn't now. */
14220 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
14222 #if HOST_BITS_PER_WIDE_INT == 32
14223 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
14224 POINTER_SIZE, &low, &high, 0);
14227 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
14228 high = (HOST_WIDE_INT) low >> 32;
14235 if (TARGET_MINIMAL_TOC)
14236 fputs (DOUBLE_INT_ASM_OP, file);
14238 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14239 (long) high & 0xffffffff, (long) low & 0xffffffff);
14240 fprintf (file, "0x%lx%08lx\n",
14241 (long) high & 0xffffffff, (long) low & 0xffffffff);
14246 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
14248 if (TARGET_MINIMAL_TOC)
14249 fputs ("\t.long ", file);
14251 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14252 (long) high & 0xffffffff, (long) low & 0xffffffff);
14253 fprintf (file, "0x%lx,0x%lx\n",
14254 (long) high & 0xffffffff, (long) low & 0xffffffff);
14258 if (TARGET_MINIMAL_TOC)
14259 fputs ("\t.long ", file);
14261 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
14262 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
14268 if (GET_CODE (x) == CONST)
14270 if (GET_CODE (XEXP (x, 0)) != PLUS)
14273 base = XEXP (XEXP (x, 0), 0);
14274 offset = INTVAL (XEXP (XEXP (x, 0), 1));
14277 if (GET_CODE (base) == SYMBOL_REF)
14278 name = XSTR (base, 0);
14279 else if (GET_CODE (base) == LABEL_REF)
14280 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
14281 else if (GET_CODE (base) == CODE_LABEL)
14282 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
14286 real_name = (*targetm.strip_name_encoding) (name);
14287 if (TARGET_MINIMAL_TOC)
14288 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
14291 fprintf (file, "\t.tc %s", real_name);
14294 fprintf (file, ".N%d", - offset);
14296 fprintf (file, ".P%d", offset);
14298 fputs ("[TC],", file);
14301 /* Currently C++ toc references to vtables can be emitted before it
14302 is decided whether the vtable is public or private. If this is
14303 the case, then the linker will eventually complain that there is
14304 a TOC reference to an unknown section. Thus, for vtables only,
14305 we emit the TOC reference to reference the symbol and not the
14307 if (VTABLE_NAME_P (name))
14309 RS6000_OUTPUT_BASENAME (file, name);
14311 fprintf (file, "%d", offset);
14312 else if (offset > 0)
14313 fprintf (file, "+%d", offset);
14316 output_addr_const (file, x);
14320 /* Output an assembler pseudo-op to write an ASCII string of N characters
14321 starting at P to FILE.
14323 On the RS/6000, we have to do this using the .byte operation and
14324 write out special characters outside the quoted string.
14325 Also, the assembler is broken; very long strings are truncated,
14326 so we must artificially break them up early. */
14329 output_ascii (FILE *file, const char *p, int n)
14332 int i, count_string;
14333 const char *for_string = "\t.byte \"";
14334 const char *for_decimal = "\t.byte ";
14335 const char *to_close = NULL;
14338 for (i = 0; i < n; i++)
14341 if (c >= ' ' && c < 0177)
14344 fputs (for_string, file);
14347 /* Write two quotes to get one. */
14355 for_decimal = "\"\n\t.byte ";
14359 if (count_string >= 512)
14361 fputs (to_close, file);
14363 for_string = "\t.byte \"";
14364 for_decimal = "\t.byte ";
14372 fputs (for_decimal, file);
14373 fprintf (file, "%d", c);
14375 for_string = "\n\t.byte \"";
14376 for_decimal = ", ";
14382 /* Now close the string if we have written one. Then end the line. */
14384 fputs (to_close, file);
14387 /* Generate a unique section name for FILENAME for a section type
14388 represented by SECTION_DESC. Output goes into BUF.
14390 SECTION_DESC can be any string, as long as it is different for each
14391 possible section type.
14393 We name the section in the same manner as xlc. The name begins with an
14394 underscore followed by the filename (after stripping any leading directory
14395 names) with the last period replaced by the string SECTION_DESC. If
14396 FILENAME does not contain a period, SECTION_DESC is appended to the end of
14400 rs6000_gen_section_name (char **buf, const char *filename,
14401 const char *section_desc)
14403 const char *q, *after_last_slash, *last_period = 0;
14407 after_last_slash = filename;
14408 for (q = filename; *q; q++)
14411 after_last_slash = q + 1;
14412 else if (*q == '.')
14416 len = strlen (after_last_slash) + strlen (section_desc) + 2;
14417 *buf = (char *) xmalloc (len);
14422 for (q = after_last_slash; *q; q++)
14424 if (q == last_period)
14426 strcpy (p, section_desc);
14427 p += strlen (section_desc);
14431 else if (ISALNUM (*q))
14435 if (last_period == 0)
14436 strcpy (p, section_desc);
14441 /* Emit profile function. */
14444 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
14446 if (TARGET_PROFILE_KERNEL)
14449 if (DEFAULT_ABI == ABI_AIX)
14451 #ifndef NO_PROFILE_COUNTERS
14452 # define NO_PROFILE_COUNTERS 0
14454 if (NO_PROFILE_COUNTERS)
14455 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
14459 const char *label_name;
14462 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14463 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
14464 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
14466 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
14470 else if (DEFAULT_ABI == ABI_DARWIN)
14472 const char *mcount_name = RS6000_MCOUNT;
14473 int caller_addr_regno = LINK_REGISTER_REGNUM;
14475 /* Be conservative and always set this, at least for now. */
14476 current_function_uses_pic_offset_table = 1;
14479 /* For PIC code, set up a stub and collect the caller's address
14480 from r0, which is where the prologue puts it. */
14481 if (MACHOPIC_INDIRECT
14482 && current_function_uses_pic_offset_table)
14483 caller_addr_regno = 0;
14485 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
14487 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
14491 /* Write function profiler code. */
14494 output_function_profiler (FILE *file, int labelno)
14499 switch (DEFAULT_ABI)
14508 warning ("no profiling of 64-bit code for this ABI");
14511 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14512 fprintf (file, "\tmflr %s\n", reg_names[0]);
14515 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
14516 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14517 reg_names[0], save_lr, reg_names[1]);
14518 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
14519 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
14520 assemble_name (file, buf);
14521 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
14523 else if (flag_pic > 1)
14525 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14526 reg_names[0], save_lr, reg_names[1]);
14527 /* Now, we need to get the address of the label. */
14528 fputs ("\tbl 1f\n\t.long ", file);
14529 assemble_name (file, buf);
14530 fputs ("-.\n1:", file);
14531 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
14532 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
14533 reg_names[0], reg_names[11]);
14534 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
14535 reg_names[0], reg_names[0], reg_names[11]);
14539 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
14540 assemble_name (file, buf);
14541 fputs ("@ha\n", file);
14542 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14543 reg_names[0], save_lr, reg_names[1]);
14544 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
14545 assemble_name (file, buf);
14546 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
14549 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
14550 fprintf (file, "\tbl %s%s\n",
14551 RS6000_MCOUNT, flag_pic ? "@plt" : "");
14556 if (!TARGET_PROFILE_KERNEL)
14558 /* Don't do anything, done in output_profile_hook (). */
14565 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
14566 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
14568 if (cfun->static_chain_decl != NULL)
14570 asm_fprintf (file, "\tstd %s,24(%s)\n",
14571 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14572 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14573 asm_fprintf (file, "\tld %s,24(%s)\n",
14574 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14577 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14584 /* Power4 load update and store update instructions are cracked into a
14585 load or store and an integer insn which are executed in the same cycle.
14586 Branches have their own dispatch slot which does not count against the
14587 GCC issue rate, but it changes the program flow so there are no other
14588 instructions to issue in this cycle. */
14591 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14592 int verbose ATTRIBUTE_UNUSED,
14593 rtx insn, int more)
14595 if (GET_CODE (PATTERN (insn)) == USE
14596 || GET_CODE (PATTERN (insn)) == CLOBBER)
14599 if (rs6000_sched_groups)
14601 if (is_microcoded_insn (insn))
14603 else if (is_cracked_insn (insn))
14604 return more > 2 ? more - 2 : 0;
14610 /* Adjust the cost of a scheduling dependency. Return the new cost of
14611 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14614 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14617 if (! recog_memoized (insn))
14620 if (REG_NOTE_KIND (link) != 0)
14623 if (REG_NOTE_KIND (link) == 0)
14625 /* Data dependency; DEP_INSN writes a register that INSN reads
14626 some cycles later. */
14627 switch (get_attr_type (insn))
14630 /* Tell the first scheduling pass about the latency between
14631 a mtctr and bctr (and mtlr and br/blr). The first
14632 scheduling pass will not know about this latency since
14633 the mtctr instruction, which has the latency associated
14634 to it, will be generated by reload. */
14635 return TARGET_POWER ? 5 : 4;
14637 /* Leave some extra cycles between a compare and its
14638 dependent branch, to inhibit expensive mispredicts. */
14639 if ((rs6000_cpu_attr == CPU_PPC603
14640 || rs6000_cpu_attr == CPU_PPC604
14641 || rs6000_cpu_attr == CPU_PPC604E
14642 || rs6000_cpu_attr == CPU_PPC620
14643 || rs6000_cpu_attr == CPU_PPC630
14644 || rs6000_cpu_attr == CPU_PPC750
14645 || rs6000_cpu_attr == CPU_PPC7400
14646 || rs6000_cpu_attr == CPU_PPC7450
14647 || rs6000_cpu_attr == CPU_POWER4
14648 || rs6000_cpu_attr == CPU_POWER5)
14649 && recog_memoized (dep_insn)
14650 && (INSN_CODE (dep_insn) >= 0)
14651 && (get_attr_type (dep_insn) == TYPE_CMP
14652 || get_attr_type (dep_insn) == TYPE_COMPARE
14653 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14654 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14655 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14656 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14657 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14658 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14663 /* Fall out to return default cost. */
14669 /* The function returns a true if INSN is microcoded.
14670 Return false otherwise. */
14673 is_microcoded_insn (rtx insn)
14675 if (!insn || !INSN_P (insn)
14676 || GET_CODE (PATTERN (insn)) == USE
14677 || GET_CODE (PATTERN (insn)) == CLOBBER)
14680 if (rs6000_sched_groups)
14682 enum attr_type type = get_attr_type (insn);
14683 if (type == TYPE_LOAD_EXT_U
14684 || type == TYPE_LOAD_EXT_UX
14685 || type == TYPE_LOAD_UX
14686 || type == TYPE_STORE_UX
14687 || type == TYPE_MFCR)
14694 /* The function returns a nonzero value if INSN can be scheduled only
14695 as the first insn in a dispatch group ("dispatch-slot restricted").
14696 In this case, the returned value indicates how many dispatch slots
14697 the insn occupies (at the beginning of the group).
14698 Return 0 otherwise. */
14701 is_dispatch_slot_restricted (rtx insn)
14703 enum attr_type type;
14705 if (!rs6000_sched_groups)
14709 || insn == NULL_RTX
14710 || GET_CODE (insn) == NOTE
14711 || GET_CODE (PATTERN (insn)) == USE
14712 || GET_CODE (PATTERN (insn)) == CLOBBER)
14715 type = get_attr_type (insn);
14722 case TYPE_DELAYED_CR:
14723 case TYPE_CR_LOGICAL:
14731 if (rs6000_cpu == PROCESSOR_POWER5
14732 && is_cracked_insn (insn))
14738 /* The function returns true if INSN is cracked into 2 instructions
14739 by the processor (and therefore occupies 2 issue slots). */
14742 is_cracked_insn (rtx insn)
14744 if (!insn || !INSN_P (insn)
14745 || GET_CODE (PATTERN (insn)) == USE
14746 || GET_CODE (PATTERN (insn)) == CLOBBER)
14749 if (rs6000_sched_groups)
14751 enum attr_type type = get_attr_type (insn);
14752 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14753 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14754 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14755 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14756 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14757 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14758 || type == TYPE_IDIV || type == TYPE_LDIV
14759 || type == TYPE_INSERT_WORD)
14766 /* The function returns true if INSN can be issued only from
14767 the branch slot. */
14770 is_branch_slot_insn (rtx insn)
14772 if (!insn || !INSN_P (insn)
14773 || GET_CODE (PATTERN (insn)) == USE
14774 || GET_CODE (PATTERN (insn)) == CLOBBER)
14777 if (rs6000_sched_groups)
14779 enum attr_type type = get_attr_type (insn);
14780 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14788 /* A C statement (sans semicolon) to update the integer scheduling
14789 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14790 INSN earlier, reduce the priority to execute INSN later. Do not
14791 define this macro if you do not need to adjust the scheduling
14792 priorities of insns. */
14795 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14797 /* On machines (like the 750) which have asymmetric integer units,
14798 where one integer unit can do multiply and divides and the other
14799 can't, reduce the priority of multiply/divide so it is scheduled
14800 before other integer operations. */
14803 if (! INSN_P (insn))
14806 if (GET_CODE (PATTERN (insn)) == USE)
14809 switch (rs6000_cpu_attr) {
14811 switch (get_attr_type (insn))
14818 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14819 priority, priority);
14820 if (priority >= 0 && priority < 0x01000000)
14827 if (is_dispatch_slot_restricted (insn)
14828 && reload_completed
14829 && current_sched_info->sched_max_insns_priority
14830 && rs6000_sched_restricted_insns_priority)
14833 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14834 if (rs6000_sched_restricted_insns_priority == 1)
14835 /* Attach highest priority to insn. This means that in
14836 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14837 precede 'priority' (critical path) considerations. */
14838 return current_sched_info->sched_max_insns_priority;
14839 else if (rs6000_sched_restricted_insns_priority == 2)
14840 /* Increase priority of insn by a minimal amount. This means that in
14841 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14842 precede dispatch-slot restriction considerations. */
14843 return (priority + 1);
14849 /* Return how many instructions the machine can issue per cycle. */
14852 rs6000_issue_rate (void)
14854 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14855 if (!reload_completed)
14858 switch (rs6000_cpu_attr) {
14859 case CPU_RIOS1: /* ? */
14861 case CPU_PPC601: /* ? */
14884 /* Return how many instructions to look ahead for better insn
14888 rs6000_use_sched_lookahead (void)
14890 if (rs6000_cpu_attr == CPU_PPC8540)
14895 /* Determine is PAT refers to memory. */
14898 is_mem_ref (rtx pat)
14904 if (GET_CODE (pat) == MEM)
14907 /* Recursively process the pattern. */
14908 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14910 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14913 ret |= is_mem_ref (XEXP (pat, i));
14914 else if (fmt[i] == 'E')
14915 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14916 ret |= is_mem_ref (XVECEXP (pat, i, j));
14922 /* Determine if PAT is a PATTERN of a load insn. */
14925 is_load_insn1 (rtx pat)
14927 if (!pat || pat == NULL_RTX)
14930 if (GET_CODE (pat) == SET)
14931 return is_mem_ref (SET_SRC (pat));
14933 if (GET_CODE (pat) == PARALLEL)
14937 for (i = 0; i < XVECLEN (pat, 0); i++)
14938 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14945 /* Determine if INSN loads from memory. */
14948 is_load_insn (rtx insn)
14950 if (!insn || !INSN_P (insn))
14953 if (GET_CODE (insn) == CALL_INSN)
14956 return is_load_insn1 (PATTERN (insn));
14959 /* Determine if PAT is a PATTERN of a store insn. */
14962 is_store_insn1 (rtx pat)
14964 if (!pat || pat == NULL_RTX)
14967 if (GET_CODE (pat) == SET)
14968 return is_mem_ref (SET_DEST (pat));
14970 if (GET_CODE (pat) == PARALLEL)
14974 for (i = 0; i < XVECLEN (pat, 0); i++)
14975 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14982 /* Determine if INSN stores to memory. */
14985 is_store_insn (rtx insn)
14987 if (!insn || !INSN_P (insn))
14990 return is_store_insn1 (PATTERN (insn));
14993 /* Returns whether the dependence between INSN and NEXT is considered
14994 costly by the given target. */
14997 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14999 /* If the flag is not enbled - no dependence is considered costly;
15000 allow all dependent insns in the same group.
15001 This is the most aggressive option. */
15002 if (rs6000_sched_costly_dep == no_dep_costly)
15005 /* If the flag is set to 1 - a dependence is always considered costly;
15006 do not allow dependent instructions in the same group.
15007 This is the most conservative option. */
15008 if (rs6000_sched_costly_dep == all_deps_costly)
15011 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15012 && is_load_insn (next)
15013 && is_store_insn (insn))
15014 /* Prevent load after store in the same group. */
15017 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15018 && is_load_insn (next)
15019 && is_store_insn (insn)
15020 && (!link || (int) REG_NOTE_KIND (link) == 0))
15021 /* Prevent load after store in the same group if it is a true dependence. */
15024 /* The flag is set to X; dependences with latency >= X are considered costly,
15025 and will not be scheduled in the same group. */
15026 if (rs6000_sched_costly_dep <= max_dep_latency
15027 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15033 /* Return the next insn after INSN that is found before TAIL is reached,
15034 skipping any "non-active" insns - insns that will not actually occupy
15035 an issue slot. Return NULL_RTX if such an insn is not found. */
15038 get_next_active_insn (rtx insn, rtx tail)
15042 if (!insn || insn == tail)
15045 next_insn = NEXT_INSN (insn);
15048 && next_insn != tail
15049 && (GET_CODE(next_insn) == NOTE
15050 || GET_CODE (PATTERN (next_insn)) == USE
15051 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
15053 next_insn = NEXT_INSN (next_insn);
15056 if (!next_insn || next_insn == tail)
15062 /* Return whether the presence of INSN causes a dispatch group termination
15063 of group WHICH_GROUP.
15065 If WHICH_GROUP == current_group, this function will return true if INSN
15066 causes the termination of the current group (i.e, the dispatch group to
15067 which INSN belongs). This means that INSN will be the last insn in the
15068 group it belongs to.
15070 If WHICH_GROUP == previous_group, this function will return true if INSN
15071 causes the termination of the previous group (i.e, the dispatch group that
15072 precedes the group to which INSN belongs). This means that INSN will be
15073 the first insn in the group it belongs to). */
15076 insn_terminates_group_p (rtx insn, enum group_termination which_group)
15078 enum attr_type type;
15083 type = get_attr_type (insn);
15085 if (is_microcoded_insn (insn))
15088 if (which_group == current_group)
15090 if (is_branch_slot_insn (insn))
15094 else if (which_group == previous_group)
15096 if (is_dispatch_slot_restricted (insn))
15104 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15105 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15108 is_costly_group (rtx *group_insns, rtx next_insn)
15113 int issue_rate = rs6000_issue_rate ();
15115 for (i = 0; i < issue_rate; i++)
15117 rtx insn = group_insns[i];
15120 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
15122 rtx next = XEXP (link, 0);
15123 if (next == next_insn)
15125 cost = insn_cost (insn, link, next_insn);
15126 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
15135 /* Utility of the function redefine_groups.
15136 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15137 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15138 to keep it "far" (in a separate group) from GROUP_INSNS, following
15139 one of the following schemes, depending on the value of the flag
15140 -minsert_sched_nops = X:
15141 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15142 in order to force NEXT_INSN into a separate group.
15143 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15144 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15145 insertion (has a group just ended, how many vacant issue slots remain in the
15146 last group, and how many dispatch groups were encountered so far). */
15149 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
15150 bool *group_end, int can_issue_more, int *group_count)
15154 int issue_rate = rs6000_issue_rate ();
15155 bool end = *group_end;
15158 if (next_insn == NULL_RTX)
15159 return can_issue_more;
15161 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
15162 return can_issue_more;
15164 force = is_costly_group (group_insns, next_insn);
15166 return can_issue_more;
15168 if (sched_verbose > 6)
15169 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
15170 *group_count ,can_issue_more);
15172 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
15175 can_issue_more = 0;
15177 /* Since only a branch can be issued in the last issue_slot, it is
15178 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15179 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15180 in this case the last nop will start a new group and the branch will be
15181 forced to the new group. */
15182 if (can_issue_more && !is_branch_slot_insn (next_insn))
15185 while (can_issue_more > 0)
15188 emit_insn_before (nop, next_insn);
15196 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
15198 int n_nops = rs6000_sched_insert_nops;
15200 /* Nops can't be issued from the branch slot, so the effective
15201 issue_rate for nops is 'issue_rate - 1'. */
15202 if (can_issue_more == 0)
15203 can_issue_more = issue_rate;
15205 if (can_issue_more == 0)
15207 can_issue_more = issue_rate - 1;
15210 for (i = 0; i < issue_rate; i++)
15212 group_insns[i] = 0;
15219 emit_insn_before (nop, next_insn);
15220 if (can_issue_more == issue_rate - 1) /* new group begins */
15223 if (can_issue_more == 0)
15225 can_issue_more = issue_rate - 1;
15228 for (i = 0; i < issue_rate; i++)
15230 group_insns[i] = 0;
15236 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15239 *group_end = /* Is next_insn going to start a new group? */
15241 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15242 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15243 || (can_issue_more < issue_rate &&
15244 insn_terminates_group_p (next_insn, previous_group)));
15245 if (*group_end && end)
15248 if (sched_verbose > 6)
15249 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
15250 *group_count, can_issue_more);
15251 return can_issue_more;
15254 return can_issue_more;
15257 /* This function tries to synch the dispatch groups that the compiler "sees"
15258 with the dispatch groups that the processor dispatcher is expected to
15259 form in practice. It tries to achieve this synchronization by forcing the
15260 estimated processor grouping on the compiler (as opposed to the function
15261 'pad_goups' which tries to force the scheduler's grouping on the processor).
15263 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15264 examines the (estimated) dispatch groups that will be formed by the processor
15265 dispatcher. It marks these group boundaries to reflect the estimated
15266 processor grouping, overriding the grouping that the scheduler had marked.
15267 Depending on the value of the flag '-minsert-sched-nops' this function can
15268 force certain insns into separate groups or force a certain distance between
15269 them by inserting nops, for example, if there exists a "costly dependence"
15272 The function estimates the group boundaries that the processor will form as
15273 folllows: It keeps track of how many vacant issue slots are available after
15274 each insn. A subsequent insn will start a new group if one of the following
15276 - no more vacant issue slots remain in the current dispatch group.
15277 - only the last issue slot, which is the branch slot, is vacant, but the next
15278 insn is not a branch.
15279 - only the last 2 or less issue slots, including the branch slot, are vacant,
15280 which means that a cracked insn (which occupies two issue slots) can't be
15281 issued in this group.
15282 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15283 start a new group. */
15286 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15288 rtx insn, next_insn;
15290 int can_issue_more;
15293 int group_count = 0;
15297 issue_rate = rs6000_issue_rate ();
15298 group_insns = alloca (issue_rate * sizeof (rtx));
15299 for (i = 0; i < issue_rate; i++)
15301 group_insns[i] = 0;
15303 can_issue_more = issue_rate;
15305 insn = get_next_active_insn (prev_head_insn, tail);
15308 while (insn != NULL_RTX)
15310 slot = (issue_rate - can_issue_more);
15311 group_insns[slot] = insn;
15313 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15314 if (insn_terminates_group_p (insn, current_group))
15315 can_issue_more = 0;
15317 next_insn = get_next_active_insn (insn, tail);
15318 if (next_insn == NULL_RTX)
15319 return group_count + 1;
15321 group_end = /* Is next_insn going to start a new group? */
15322 (can_issue_more == 0
15323 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15324 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15325 || (can_issue_more < issue_rate &&
15326 insn_terminates_group_p (next_insn, previous_group)));
15328 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
15329 next_insn, &group_end, can_issue_more, &group_count);
15334 can_issue_more = 0;
15335 for (i = 0; i < issue_rate; i++)
15337 group_insns[i] = 0;
15341 if (GET_MODE (next_insn) == TImode && can_issue_more)
15342 PUT_MODE(next_insn, VOIDmode);
15343 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
15344 PUT_MODE (next_insn, TImode);
15347 if (can_issue_more == 0)
15348 can_issue_more = issue_rate;
15351 return group_count;
15354 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
15355 dispatch group boundaries that the scheduler had marked. Pad with nops
15356 any dispatch groups which have vacant issue slots, in order to force the
15357 scheduler's grouping on the processor dispatcher. The function
15358 returns the number of dispatch groups found. */
15361 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15363 rtx insn, next_insn;
15366 int can_issue_more;
15368 int group_count = 0;
15370 /* Initialize issue_rate. */
15371 issue_rate = rs6000_issue_rate ();
15372 can_issue_more = issue_rate;
15374 insn = get_next_active_insn (prev_head_insn, tail);
15375 next_insn = get_next_active_insn (insn, tail);
15377 while (insn != NULL_RTX)
15380 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15382 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
15384 if (next_insn == NULL_RTX)
15389 /* If the scheduler had marked group termination at this location
15390 (between insn and next_indn), and neither insn nor next_insn will
15391 force group termination, pad the group with nops to force group
15394 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
15395 && !insn_terminates_group_p (insn, current_group)
15396 && !insn_terminates_group_p (next_insn, previous_group))
15398 if (!is_branch_slot_insn(next_insn))
15401 while (can_issue_more)
15404 emit_insn_before (nop, next_insn);
15409 can_issue_more = issue_rate;
15414 next_insn = get_next_active_insn (insn, tail);
15417 return group_count;
15420 /* The following function is called at the end of scheduling BB.
15421 After reload, it inserts nops at insn group bundling. */
15424 rs6000_sched_finish (FILE *dump, int sched_verbose)
15429 fprintf (dump, "=== Finishing schedule.\n");
15431 if (reload_completed && rs6000_sched_groups)
15433 if (rs6000_sched_insert_nops == sched_finish_none)
15436 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
15437 n_groups = pad_groups (dump, sched_verbose,
15438 current_sched_info->prev_head,
15439 current_sched_info->next_tail);
15441 n_groups = redefine_groups (dump, sched_verbose,
15442 current_sched_info->prev_head,
15443 current_sched_info->next_tail);
15445 if (sched_verbose >= 6)
15447 fprintf (dump, "ngroups = %d\n", n_groups);
15448 print_rtl (dump, current_sched_info->prev_head);
15449 fprintf (dump, "Done finish_sched\n");
15454 /* Length in units of the trampoline for entering a nested function. */
15457 rs6000_trampoline_size (void)
15461 switch (DEFAULT_ABI)
15467 ret = (TARGET_32BIT) ? 12 : 24;
15472 ret = (TARGET_32BIT) ? 40 : 48;
15479 /* Emit RTL insns to initialize the variable parts of a trampoline.
15480 FNADDR is an RTX for the address of the function's pure code.
15481 CXT is an RTX for the static chain value for the function. */
15484 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
15486 enum machine_mode pmode = Pmode;
15487 int regsize = (TARGET_32BIT) ? 4 : 8;
15488 rtx ctx_reg = force_reg (pmode, cxt);
15490 switch (DEFAULT_ABI)
15495 /* Macros to shorten the code expansions below. */
15496 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
15497 #define MEM_PLUS(addr,offset) \
15498 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
15500 /* Under AIX, just build the 3 word function descriptor */
15503 rtx fn_reg = gen_reg_rtx (pmode);
15504 rtx toc_reg = gen_reg_rtx (pmode);
15505 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
15506 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
15507 emit_move_insn (MEM_DEREF (addr), fn_reg);
15508 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
15509 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
15513 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
15516 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
15517 FALSE, VOIDmode, 4,
15519 GEN_INT (rs6000_trampoline_size ()), SImode,
15529 /* Table of valid machine attributes. */
15531 const struct attribute_spec rs6000_attribute_table[] =
15533 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
15534 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
15535 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15536 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15537 { NULL, 0, 0, false, false, false, NULL }
15540 /* Handle the "altivec" attribute. The attribute may have
15541 arguments as follows:
15543 __attribute__((altivec(vector__)))
15544 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
15545 __attribute__((altivec(bool__))) (always followed by 'unsigned')
15547 and may appear more than once (e.g., 'vector bool char') in a
15548 given declaration. */
15551 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
15552 int flags ATTRIBUTE_UNUSED,
15553 bool *no_add_attrs)
15555 tree type = *node, result = NULL_TREE;
15556 enum machine_mode mode;
15559 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
15560 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
15561 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
15564 while (POINTER_TYPE_P (type)
15565 || TREE_CODE (type) == FUNCTION_TYPE
15566 || TREE_CODE (type) == METHOD_TYPE
15567 || TREE_CODE (type) == ARRAY_TYPE)
15568 type = TREE_TYPE (type);
15570 mode = TYPE_MODE (type);
15572 if (rs6000_warn_altivec_long
15573 && (type == long_unsigned_type_node || type == long_integer_type_node))
15574 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15576 switch (altivec_type)
15579 unsigned_p = TYPE_UNSIGNED (type);
15583 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15586 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15589 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15591 case SFmode: result = V4SF_type_node; break;
15592 /* If the user says 'vector int bool', we may be handed the 'bool'
15593 attribute _before_ the 'vector' attribute, and so select the proper
15594 type in the 'b' case below. */
15595 case V4SImode: case V8HImode: case V16QImode: result = type;
15602 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15603 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15604 case QImode: case V16QImode: result = bool_V16QI_type_node;
15611 case V8HImode: result = pixel_V8HI_type_node;
15617 if (result && result != type && TYPE_READONLY (type))
15618 result = build_qualified_type (result, TYPE_QUAL_CONST);
15620 *no_add_attrs = true; /* No need to hang on to the attribute. */
15623 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15625 *node = reconstruct_complex_type (*node, result);
15630 /* AltiVec defines four built-in scalar types that serve as vector
15631 elements; we must teach the compiler how to mangle them. */
15633 static const char *
15634 rs6000_mangle_fundamental_type (tree type)
15636 if (type == bool_char_type_node) return "U6__boolc";
15637 if (type == bool_short_type_node) return "U6__bools";
15638 if (type == pixel_type_node) return "u7__pixel";
15639 if (type == bool_int_type_node) return "U6__booli";
15641 /* For all other types, use normal C++ mangling. */
15645 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15646 struct attribute_spec.handler. */
15649 rs6000_handle_longcall_attribute (tree *node, tree name,
15650 tree args ATTRIBUTE_UNUSED,
15651 int flags ATTRIBUTE_UNUSED,
15652 bool *no_add_attrs)
15654 if (TREE_CODE (*node) != FUNCTION_TYPE
15655 && TREE_CODE (*node) != FIELD_DECL
15656 && TREE_CODE (*node) != TYPE_DECL)
15658 warning ("`%s' attribute only applies to functions",
15659 IDENTIFIER_POINTER (name));
15660 *no_add_attrs = true;
15666 /* Set longcall attributes on all functions declared when
15667 rs6000_default_long_calls is true. */
15669 rs6000_set_default_type_attributes (tree type)
15671 if (rs6000_default_long_calls
15672 && (TREE_CODE (type) == FUNCTION_TYPE
15673 || TREE_CODE (type) == METHOD_TYPE))
15674 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15676 TYPE_ATTRIBUTES (type));
15679 /* Return a reference suitable for calling a function with the
15680 longcall attribute. */
15683 rs6000_longcall_ref (rtx call_ref)
15685 const char *call_name;
15688 if (GET_CODE (call_ref) != SYMBOL_REF)
15691 /* System V adds '.' to the internal name, so skip them. */
15692 call_name = XSTR (call_ref, 0);
15693 if (*call_name == '.')
15695 while (*call_name == '.')
15698 node = get_identifier (call_name);
15699 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15702 return force_reg (Pmode, call_ref);
15705 #ifdef USING_ELFOS_H
15707 /* A C statement or statements to switch to the appropriate section
15708 for output of RTX in mode MODE. You can assume that RTX is some
15709 kind of constant in RTL. The argument MODE is redundant except in
15710 the case of a `const_int' rtx. Select the section by calling
15711 `text_section' or one of the alternatives for other sections.
15713 Do not define this macro if you put all constants in the read-only
15717 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15718 unsigned HOST_WIDE_INT align)
15720 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15723 default_elf_select_rtx_section (mode, x, align);
15726 /* A C statement or statements to switch to the appropriate
15727 section for output of DECL. DECL is either a `VAR_DECL' node
15728 or a constant of some sort. RELOC indicates whether forming
15729 the initial value of DECL requires link-time relocations. */
15732 rs6000_elf_select_section (tree decl, int reloc,
15733 unsigned HOST_WIDE_INT align)
15735 /* Pretend that we're always building for a shared library when
15736 ABI_AIX, because otherwise we end up with dynamic relocations
15737 in read-only sections. This happens for function pointers,
15738 references to vtables in typeinfo, and probably other cases. */
15739 default_elf_select_section_1 (decl, reloc, align,
15740 flag_pic || DEFAULT_ABI == ABI_AIX);
15743 /* A C statement to build up a unique section name, expressed as a
15744 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15745 RELOC indicates whether the initial value of EXP requires
15746 link-time relocations. If you do not define this macro, GCC will use
15747 the symbol name prefixed by `.' as the section name. Note - this
15748 macro can now be called for uninitialized data items as well as
15749 initialized data and functions. */
15752 rs6000_elf_unique_section (tree decl, int reloc)
15754 /* As above, pretend that we're always building for a shared library
15755 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15756 default_unique_section_1 (decl, reloc,
15757 flag_pic || DEFAULT_ABI == ABI_AIX);
15760 /* For a SYMBOL_REF, set generic flags and then perform some
15761 target-specific processing.
15763 When the AIX ABI is requested on a non-AIX system, replace the
15764 function name with the real name (with a leading .) rather than the
15765 function descriptor name. This saves a lot of overriding code to
15766 read the prefixes. */
15769 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15771 default_encode_section_info (decl, rtl, first);
15774 && TREE_CODE (decl) == FUNCTION_DECL
15776 && DEFAULT_ABI == ABI_AIX)
15778 rtx sym_ref = XEXP (rtl, 0);
15779 size_t len = strlen (XSTR (sym_ref, 0));
15780 char *str = alloca (len + 2);
15782 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15783 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15788 rs6000_elf_in_small_data_p (tree decl)
15790 if (rs6000_sdata == SDATA_NONE)
15793 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15795 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15796 if (strcmp (section, ".sdata") == 0
15797 || strcmp (section, ".sdata2") == 0
15798 || strcmp (section, ".sbss") == 0
15799 || strcmp (section, ".sbss2") == 0
15800 || strcmp (section, ".PPC.EMB.sdata0") == 0
15801 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15806 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15809 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15810 /* If it's not public, and we're not going to reference it there,
15811 there's no need to put it in the small data section. */
15812 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15819 #endif /* USING_ELFOS_H */
15822 /* Return a REG that occurs in ADDR with coefficient 1.
15823 ADDR can be effectively incremented by incrementing REG.
15825 r0 is special and we must not select it as an address
15826 register by this routine since our caller will try to
15827 increment the returned register via an "la" instruction. */
15830 find_addr_reg (rtx addr)
15832 while (GET_CODE (addr) == PLUS)
15834 if (GET_CODE (XEXP (addr, 0)) == REG
15835 && REGNO (XEXP (addr, 0)) != 0)
15836 addr = XEXP (addr, 0);
15837 else if (GET_CODE (XEXP (addr, 1)) == REG
15838 && REGNO (XEXP (addr, 1)) != 0)
15839 addr = XEXP (addr, 1);
15840 else if (CONSTANT_P (XEXP (addr, 0)))
15841 addr = XEXP (addr, 1);
15842 else if (CONSTANT_P (XEXP (addr, 1)))
15843 addr = XEXP (addr, 0);
15847 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15853 rs6000_fatal_bad_address (rtx op)
15855 fatal_insn ("bad address", op);
15861 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15862 reference and a constant. */
15865 symbolic_operand (rtx op)
15867 switch (GET_CODE (op))
15874 return (GET_CODE (op) == SYMBOL_REF ||
15875 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15876 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15877 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15886 static tree branch_island_list = 0;
15888 /* Remember to generate a branch island for far calls to the given
15892 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15894 tree branch_island = build_tree_list (function_name, label_name);
15895 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15896 TREE_CHAIN (branch_island) = branch_island_list;
15897 branch_island_list = branch_island;
15900 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15901 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15902 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15903 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15905 /* Generate far-jump branch islands for everything on the
15906 branch_island_list. Invoked immediately after the last instruction
15907 of the epilogue has been emitted; the branch-islands must be
15908 appended to, and contiguous with, the function body. Mach-O stubs
15909 are generated in machopic_output_stub(). */
15912 macho_branch_islands (void)
15915 tree branch_island;
15917 for (branch_island = branch_island_list;
15919 branch_island = TREE_CHAIN (branch_island))
15921 const char *label =
15922 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15924 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
15925 char name_buf[512];
15926 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15927 if (name[0] == '*' || name[0] == '&')
15928 strcpy (name_buf, name+1);
15932 strcpy (name_buf+1, name);
15934 strcpy (tmp_buf, "\n");
15935 strcat (tmp_buf, label);
15936 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15937 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15938 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15939 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15940 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15943 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15944 strcat (tmp_buf, label);
15945 strcat (tmp_buf, "_pic\n");
15946 strcat (tmp_buf, label);
15947 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15949 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15950 strcat (tmp_buf, name_buf);
15951 strcat (tmp_buf, " - ");
15952 strcat (tmp_buf, label);
15953 strcat (tmp_buf, "_pic)\n");
15955 strcat (tmp_buf, "\tmtlr r0\n");
15957 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15958 strcat (tmp_buf, name_buf);
15959 strcat (tmp_buf, " - ");
15960 strcat (tmp_buf, label);
15961 strcat (tmp_buf, "_pic)\n");
15963 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15967 strcat (tmp_buf, ":\nlis r12,hi16(");
15968 strcat (tmp_buf, name_buf);
15969 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15970 strcat (tmp_buf, name_buf);
15971 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15973 output_asm_insn (tmp_buf, 0);
15974 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15975 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15976 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15977 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15978 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15981 branch_island_list = 0;
15984 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15985 already there or not. */
15988 no_previous_def (tree function_name)
15990 tree branch_island;
15991 for (branch_island = branch_island_list;
15993 branch_island = TREE_CHAIN (branch_island))
15994 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15999 /* GET_PREV_LABEL gets the label name from the previous definition of
16003 get_prev_label (tree function_name)
16005 tree branch_island;
16006 for (branch_island = branch_island_list;
16008 branch_island = TREE_CHAIN (branch_island))
16009 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16010 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16014 /* INSN is either a function call or a millicode call. It may have an
16015 unconditional jump in its delay slot.
16017 CALL_DEST is the routine we are calling. */
16020 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
16022 static char buf[256];
16023 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16024 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16027 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
16029 if (no_previous_def (funname))
16031 int line_number = 0;
16032 rtx label_rtx = gen_label_rtx ();
16033 char *label_buf, temp_buf[256];
16034 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
16035 CODE_LABEL_NUMBER (label_rtx));
16036 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
16037 labelname = get_identifier (label_buf);
16038 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
16040 line_number = NOTE_LINE_NUMBER (insn);
16041 add_compiler_branch_island (labelname, funname, line_number);
16044 labelname = get_prev_label (funname);
16046 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16047 instruction will reach 'foo', otherwise link as 'bl L42'".
16048 "L42" should be a 'branch island', that will do a far jump to
16049 'foo'. Branch islands are generated in
16050 macho_branch_islands(). */
16051 sprintf (buf, "jbsr %%z%d,%.246s",
16052 dest_operand_number, IDENTIFIER_POINTER (labelname));
16055 sprintf (buf, "bl %%z%d", dest_operand_number);
16059 #endif /* TARGET_MACHO */
16061 /* Generate PIC and indirect symbol stubs. */
16064 machopic_output_stub (FILE *file, const char *symb, const char *stub)
16066 unsigned int length;
16067 char *symbol_name, *lazy_ptr_name;
16068 char *local_label_0;
16069 static int label = 0;
16071 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16072 symb = (*targetm.strip_name_encoding) (symb);
16075 length = strlen (symb);
16076 symbol_name = alloca (length + 32);
16077 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
16079 lazy_ptr_name = alloca (length + 32);
16080 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
16083 machopic_picsymbol_stub1_section ();
16085 machopic_symbol_stub1_section ();
16086 fprintf (file, "\t.align 2\n");
16088 fprintf (file, "%s:\n", stub);
16089 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16094 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
16095 sprintf (local_label_0, "\"L%011d$spb\"", label);
16097 fprintf (file, "\tmflr r0\n");
16098 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
16099 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
16100 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
16101 lazy_ptr_name, local_label_0);
16102 fprintf (file, "\tmtlr r0\n");
16103 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16104 lazy_ptr_name, local_label_0);
16105 fprintf (file, "\tmtctr r12\n");
16106 fprintf (file, "\tbctr\n");
16110 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
16111 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
16112 fprintf (file, "\tmtctr r12\n");
16113 fprintf (file, "\tbctr\n");
16116 machopic_lazy_symbol_ptr_section ();
16117 fprintf (file, "%s:\n", lazy_ptr_name);
16118 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16119 fprintf (file, "\t.long dyld_stub_binding_helper\n");
16122 /* Legitimize PIC addresses. If the address is already
16123 position-independent, we return ORIG. Newly generated
16124 position-independent addresses go into a reg. This is REG if non
16125 zero, otherwise we allocate register(s) as necessary. */
16127 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16130 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
16135 if (reg == NULL && ! reload_in_progress && ! reload_completed)
16136 reg = gen_reg_rtx (Pmode);
16138 if (GET_CODE (orig) == CONST)
16140 if (GET_CODE (XEXP (orig, 0)) == PLUS
16141 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
16144 if (GET_CODE (XEXP (orig, 0)) == PLUS)
16146 /* Use a different reg for the intermediate value, as
16147 it will be marked UNCHANGING. */
16148 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16151 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16154 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16160 if (GET_CODE (offset) == CONST_INT)
16162 if (SMALL_INT (offset))
16163 return plus_constant (base, INTVAL (offset));
16164 else if (! reload_in_progress && ! reload_completed)
16165 offset = force_reg (Pmode, offset);
16168 rtx mem = force_const_mem (Pmode, orig);
16169 return machopic_legitimize_pic_address (mem, Pmode, reg);
16172 return gen_rtx_PLUS (Pmode, base, offset);
16175 /* Fall back on generic machopic code. */
16176 return machopic_legitimize_pic_address (orig, mode, reg);
16179 /* This is just a placeholder to make linking work without having to
16180 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16181 ever needed for Darwin (not too likely!) this would have to get a
16182 real definition. */
16189 #endif /* TARGET_MACHO */
16192 static unsigned int
16193 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
16195 return default_section_type_flags_1 (decl, name, reloc,
16196 flag_pic || DEFAULT_ABI == ABI_AIX);
16199 /* Record an element in the table of global constructors. SYMBOL is
16200 a SYMBOL_REF of the function to be called; PRIORITY is a number
16201 between 0 and MAX_INIT_PRIORITY.
16203 This differs from default_named_section_asm_out_constructor in
16204 that we have special handling for -mrelocatable. */
16207 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
16209 const char *section = ".ctors";
16212 if (priority != DEFAULT_INIT_PRIORITY)
16214 sprintf (buf, ".ctors.%.5u",
16215 /* Invert the numbering so the linker puts us in the proper
16216 order; constructors are run from right to left, and the
16217 linker sorts in increasing order. */
16218 MAX_INIT_PRIORITY - priority);
16222 named_section_flags (section, SECTION_WRITE);
16223 assemble_align (POINTER_SIZE);
16225 if (TARGET_RELOCATABLE)
16227 fputs ("\t.long (", asm_out_file);
16228 output_addr_const (asm_out_file, symbol);
16229 fputs (")@fixup\n", asm_out_file);
16232 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16236 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
16238 const char *section = ".dtors";
16241 if (priority != DEFAULT_INIT_PRIORITY)
16243 sprintf (buf, ".dtors.%.5u",
16244 /* Invert the numbering so the linker puts us in the proper
16245 order; constructors are run from right to left, and the
16246 linker sorts in increasing order. */
16247 MAX_INIT_PRIORITY - priority);
16251 named_section_flags (section, SECTION_WRITE);
16252 assemble_align (POINTER_SIZE);
16254 if (TARGET_RELOCATABLE)
16256 fputs ("\t.long (", asm_out_file);
16257 output_addr_const (asm_out_file, symbol);
16258 fputs (")@fixup\n", asm_out_file);
16261 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16265 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
16269 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
16270 ASM_OUTPUT_LABEL (file, name);
16271 fputs (DOUBLE_INT_ASM_OP, file);
16273 assemble_name (file, name);
16274 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
16275 assemble_name (file, name);
16276 fputs (",24\n\t.type\t.", file);
16277 assemble_name (file, name);
16278 fputs (",@function\n", file);
16279 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
16281 fputs ("\t.globl\t.", file);
16282 assemble_name (file, name);
16285 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16287 ASM_OUTPUT_LABEL (file, name);
16291 if (TARGET_RELOCATABLE
16292 && (get_pool_size () != 0 || current_function_profile)
16297 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
16299 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
16300 fprintf (file, "\t.long ");
16301 assemble_name (file, buf);
16303 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
16304 assemble_name (file, buf);
16308 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
16309 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16311 if (DEFAULT_ABI == ABI_AIX)
16313 const char *desc_name, *orig_name;
16315 orig_name = (*targetm.strip_name_encoding) (name);
16316 desc_name = orig_name;
16317 while (*desc_name == '.')
16320 if (TREE_PUBLIC (decl))
16321 fprintf (file, "\t.globl %s\n", desc_name);
16323 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
16324 fprintf (file, "%s:\n", desc_name);
16325 fprintf (file, "\t.long %s\n", orig_name);
16326 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
16327 if (DEFAULT_ABI == ABI_AIX)
16328 fputs ("\t.long 0\n", file);
16329 fprintf (file, "\t.previous\n");
16331 ASM_OUTPUT_LABEL (file, name);
16337 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
16339 fputs (GLOBAL_ASM_OP, stream);
16340 RS6000_OUTPUT_BASENAME (stream, name);
16341 putc ('\n', stream);
16345 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
16348 static const char * const suffix[3] = { "PR", "RO", "RW" };
16350 if (flags & SECTION_CODE)
16352 else if (flags & SECTION_WRITE)
16357 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
16358 (flags & SECTION_CODE) ? "." : "",
16359 name, suffix[smclass], flags & SECTION_ENTSIZE);
16363 rs6000_xcoff_select_section (tree decl, int reloc,
16364 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16366 if (decl_readonly_section_1 (decl, reloc, 1))
16368 if (TREE_PUBLIC (decl))
16369 read_only_data_section ();
16371 read_only_private_data_section ();
16375 if (TREE_PUBLIC (decl))
16378 private_data_section ();
16383 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
16387 /* Use select_section for private and uninitialized data. */
16388 if (!TREE_PUBLIC (decl)
16389 || DECL_COMMON (decl)
16390 || DECL_INITIAL (decl) == NULL_TREE
16391 || DECL_INITIAL (decl) == error_mark_node
16392 || (flag_zero_initialized_in_bss
16393 && initializer_zerop (DECL_INITIAL (decl))))
16396 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
16397 name = (*targetm.strip_name_encoding) (name);
16398 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
16401 /* Select section for constant in constant pool.
16403 On RS/6000, all constants are in the private read-only data area.
16404 However, if this is being placed in the TOC it must be output as a
16408 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
16409 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16411 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16414 read_only_private_data_section ();
16417 /* Remove any trailing [DS] or the like from the symbol name. */
16419 static const char *
16420 rs6000_xcoff_strip_name_encoding (const char *name)
16425 len = strlen (name);
16426 if (name[len - 1] == ']')
16427 return ggc_alloc_string (name, len - 4);
16432 /* Section attributes. AIX is always PIC. */
16434 static unsigned int
16435 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
16437 unsigned int align;
16438 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
16440 /* Align to at least UNIT size. */
16441 if (flags & SECTION_CODE)
16442 align = MIN_UNITS_PER_WORD;
16444 /* Increase alignment of large objects if not already stricter. */
16445 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
16446 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
16447 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
16449 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
16452 /* Output at beginning of assembler file.
16454 Initialize the section names for the RS/6000 at this point.
16456 Specify filename, including full path, to assembler.
16458 We want to go into the TOC section so at least one .toc will be emitted.
16459 Also, in order to output proper .bs/.es pairs, we need at least one static
16460 [RW] section emitted.
16462 Finally, declare mcount when profiling to make the assembler happy. */
16465 rs6000_xcoff_file_start (void)
16467 rs6000_gen_section_name (&xcoff_bss_section_name,
16468 main_input_filename, ".bss_");
16469 rs6000_gen_section_name (&xcoff_private_data_section_name,
16470 main_input_filename, ".rw_");
16471 rs6000_gen_section_name (&xcoff_read_only_section_name,
16472 main_input_filename, ".ro_");
16474 fputs ("\t.file\t", asm_out_file);
16475 output_quoted_string (asm_out_file, main_input_filename);
16476 fputc ('\n', asm_out_file);
16478 if (write_symbols != NO_DEBUG)
16479 private_data_section ();
16482 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
16483 rs6000_file_start ();
16486 /* Output at end of assembler file.
16487 On the RS/6000, referencing data should automatically pull in text. */
16490 rs6000_xcoff_file_end (void)
16493 fputs ("_section_.text:\n", asm_out_file);
16495 fputs (TARGET_32BIT
16496 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
16499 #endif /* TARGET_XCOFF */
16502 /* Cross-module name binding. Darwin does not support overriding
16503 functions at dynamic-link time. */
16506 rs6000_binds_local_p (tree decl)
16508 return default_binds_local_p_1 (decl, 0);
16512 /* Compute a (partial) cost for rtx X. Return true if the complete
16513 cost has been computed, and false if subexpressions should be
16514 scanned. In either case, *TOTAL contains the cost result. */
16517 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
16520 enum machine_mode mode = GET_MODE (x);
16524 /* On the RS/6000, if it is valid in the insn, it is free.
16525 So this always returns 0. */
16536 if (mode == DFmode)
16537 *total = GET_CODE (XEXP (x, 0)) == MULT
16538 ? rs6000_cost->dmul
16540 else if (mode == SFmode)
16541 *total = rs6000_cost->fp;
16542 else if (GET_CODE (XEXP (x, 0)) == MULT)
16544 /* The rs6000 doesn't have shift-and-add instructions. */
16545 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
16546 *total += COSTS_N_INSNS (1);
16549 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16550 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
16551 + 0x8000) >= 0x10000)
16552 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16553 ? COSTS_N_INSNS (2)
16554 : COSTS_N_INSNS (1));
16558 if (mode == DFmode)
16559 *total = GET_CODE (XEXP (x, 0)) == MULT
16560 ? rs6000_cost->dmul
16562 else if (mode == SFmode)
16563 *total = rs6000_cost->fp;
16564 else if (GET_CODE (XEXP (x, 0)) == MULT)
16566 /* The rs6000 doesn't have shift-and-sub instructions. */
16567 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
16568 *total += COSTS_N_INSNS (1);
16571 *total = COSTS_N_INSNS (1);
16577 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16578 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
16579 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16580 ? COSTS_N_INSNS (2)
16581 : COSTS_N_INSNS (1));
16585 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
16587 if (INTVAL (XEXP (x, 1)) >= -256
16588 && INTVAL (XEXP (x, 1)) <= 255)
16589 *total = rs6000_cost->mulsi_const9;
16591 *total = rs6000_cost->mulsi_const;
16593 else if (mode == DFmode)
16594 *total = rs6000_cost->dmul;
16595 else if (mode == SFmode)
16596 *total = rs6000_cost->fp;
16597 else if (mode == DImode)
16598 *total = rs6000_cost->muldi;
16600 *total = rs6000_cost->mulsi;
16605 if (FLOAT_MODE_P (mode))
16607 *total = mode == DFmode ? rs6000_cost->ddiv
16608 : rs6000_cost->sdiv;
16611 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16612 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16614 *total = COSTS_N_INSNS (2);
16621 if (GET_MODE (XEXP (x, 1)) == DImode)
16622 *total = rs6000_cost->divdi;
16624 *total = rs6000_cost->divsi;
16628 *total = COSTS_N_INSNS (4);
16633 if (FLOAT_MODE_P (mode))
16634 *total = rs6000_cost->fp;
16636 *total = COSTS_N_INSNS (1);
16640 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16641 *total = COSTS_N_INSNS (1) + 1;
16648 *total = COSTS_N_INSNS (1);
16651 case FLOAT_TRUNCATE:
16652 *total = rs6000_cost->fp;
16656 switch (XINT (x, 1))
16659 *total = rs6000_cost->fp;
16671 *total = COSTS_N_INSNS (1);
16683 /* A C expression returning the cost of moving data from a register of class
16684 CLASS1 to one of CLASS2. */
16687 rs6000_register_move_cost (enum machine_mode mode,
16688 enum reg_class from, enum reg_class to)
16690 /* Moves from/to GENERAL_REGS. */
16691 if (reg_classes_intersect_p (to, GENERAL_REGS)
16692 || reg_classes_intersect_p (from, GENERAL_REGS))
16694 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16697 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16698 return (rs6000_memory_move_cost (mode, from, 0)
16699 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16701 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16702 else if (from == CR_REGS)
16706 /* A move will cost one instruction per GPR moved. */
16707 return 2 * HARD_REGNO_NREGS (0, mode);
16710 /* Moving between two similar registers is just one instruction. */
16711 else if (reg_classes_intersect_p (to, from))
16712 return mode == TFmode ? 4 : 2;
16714 /* Everything else has to go through GENERAL_REGS. */
16716 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16717 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16720 /* A C expressions returning the cost of moving data of MODE from a register to
16724 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16725 int in ATTRIBUTE_UNUSED)
16727 if (reg_classes_intersect_p (class, GENERAL_REGS))
16728 return 4 * HARD_REGNO_NREGS (0, mode);
16729 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16730 return 4 * HARD_REGNO_NREGS (32, mode);
16731 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16732 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16734 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16737 /* Return an RTX representing where to find the function value of a
16738 function returning MODE. */
16740 rs6000_complex_function_value (enum machine_mode mode)
16742 unsigned int regno;
16744 enum machine_mode inner = GET_MODE_INNER (mode);
16745 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16747 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16748 regno = FP_ARG_RETURN;
16751 regno = GP_ARG_RETURN;
16753 /* 32-bit is OK since it'll go in r3/r4. */
16754 if (TARGET_32BIT && inner_bytes >= 4)
16755 return gen_rtx_REG (mode, regno);
16758 if (inner_bytes >= 8)
16759 return gen_rtx_REG (mode, regno);
16761 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16763 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16764 GEN_INT (inner_bytes));
16765 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16768 /* Define how to find the value returned by a function.
16769 VALTYPE is the data type of the value (as a tree).
16770 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16771 otherwise, FUNC is 0.
16773 On the SPE, both FPs and vectors are returned in r3.
16775 On RS/6000 an integer value is in r3 and a floating-point value is in
16776 fp1, unless -msoft-float. */
16779 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16781 enum machine_mode mode;
16782 unsigned int regno;
16784 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16786 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16787 return gen_rtx_PARALLEL (DImode,
16789 gen_rtx_EXPR_LIST (VOIDmode,
16790 gen_rtx_REG (SImode, GP_ARG_RETURN),
16792 gen_rtx_EXPR_LIST (VOIDmode,
16793 gen_rtx_REG (SImode,
16794 GP_ARG_RETURN + 1),
16798 if ((INTEGRAL_TYPE_P (valtype)
16799 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16800 || POINTER_TYPE_P (valtype))
16801 mode = TARGET_32BIT ? SImode : DImode;
16803 mode = TYPE_MODE (valtype);
16805 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16806 regno = FP_ARG_RETURN;
16807 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16808 && targetm.calls.split_complex_arg)
16809 return rs6000_complex_function_value (mode);
16810 else if (TREE_CODE (valtype) == VECTOR_TYPE
16811 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16812 regno = ALTIVEC_ARG_RETURN;
16814 regno = GP_ARG_RETURN;
16816 return gen_rtx_REG (mode, regno);
16819 /* Define how to find the value returned by a library function
16820 assuming the value has mode MODE. */
16822 rs6000_libcall_value (enum machine_mode mode)
16824 unsigned int regno;
16826 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16827 && TARGET_HARD_FLOAT && TARGET_FPRS)
16828 regno = FP_ARG_RETURN;
16829 else if (ALTIVEC_VECTOR_MODE (mode)
16830 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16831 regno = ALTIVEC_ARG_RETURN;
16832 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16833 return rs6000_complex_function_value (mode);
16835 regno = GP_ARG_RETURN;
16837 return gen_rtx_REG (mode, regno);
16840 /* Define the offset between two registers, FROM to be eliminated and its
16841 replacement TO, at the start of a routine. */
16843 rs6000_initial_elimination_offset (int from, int to)
16845 rs6000_stack_t *info = rs6000_stack_info ();
16846 HOST_WIDE_INT offset;
16848 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16849 offset = info->push_p ? 0 : -info->total_size;
16850 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16851 offset = info->total_size;
16852 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16853 offset = info->push_p ? info->total_size : 0;
16854 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16862 /* Return true if TYPE is of type __ev64_opaque__. */
16865 is_ev64_opaque_type (tree type)
16868 && (type == opaque_V2SI_type_node
16869 || type == opaque_V2SF_type_node
16870 || type == opaque_p_V2SI_type_node));
16874 rs6000_dwarf_register_span (rtx reg)
16878 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16881 regno = REGNO (reg);
16883 /* The duality of the SPE register size wreaks all kinds of havoc.
16884 This is a way of distinguishing r0 in 32-bits from r0 in
16887 gen_rtx_PARALLEL (VOIDmode,
16890 gen_rtx_REG (SImode, regno + 1200),
16891 gen_rtx_REG (SImode, regno))
16893 gen_rtx_REG (SImode, regno),
16894 gen_rtx_REG (SImode, regno + 1200)));
16897 /* Map internal gcc register numbers to DWARF2 register numbers. */
16900 rs6000_dbx_register_number (unsigned int regno)
16902 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16904 if (regno == MQ_REGNO)
16906 if (regno == LINK_REGISTER_REGNUM)
16908 if (regno == COUNT_REGISTER_REGNUM)
16910 if (CR_REGNO_P (regno))
16911 return regno - CR0_REGNO + 86;
16912 if (regno == XER_REGNO)
16914 if (ALTIVEC_REGNO_P (regno))
16915 return regno - FIRST_ALTIVEC_REGNO + 1124;
16916 if (regno == VRSAVE_REGNO)
16918 if (regno == VSCR_REGNO)
16920 if (regno == SPE_ACC_REGNO)
16922 if (regno == SPEFSCR_REGNO)
16924 /* SPE high reg number. We get these values of regno from
16925 rs6000_dwarf_register_span. */
16926 if (regno >= 1200 && regno < 1232)
16932 #include "gt-rs6000.h"