1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #include "gstab.h" /* for N_SLINE */
63 #ifndef TARGET_NO_PROTOTYPE
64 #define TARGET_NO_PROTOTYPE 0
67 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
68 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e && !((n) & 1))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 int world_save_p; /* true if we're saving *everything*:
85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
94 int toc_save_offset; /* offset to save the TOC pointer */
95 int varargs_save_offset; /* offset to save the varargs registers */
96 int ehrd_offset; /* offset to EH return data */
97 int reg_size; /* register size (4 or 8) */
98 int varargs_size; /* size to hold V.4 args passed in regs */
99 HOST_WIDE_INT vars_size; /* variable save area size */
100 int parm_size; /* outgoing parameter size */
101 int save_size; /* save area size */
102 int fixed_size; /* fixed size of stack frame */
103 int gp_size; /* size of saved GP registers */
104 int fp_size; /* size of saved FP registers */
105 int altivec_size; /* size of saved AltiVec registers */
106 int cr_size; /* size to hold CR if not in save_size */
107 int lr_size; /* size to hold LR if not in save_size */
108 int vrsave_size; /* size to hold VRSAVE if not in save_size */
109 int altivec_padding_size; /* size of altivec alignment padding if
111 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
112 int spe_padding_size;
113 int toc_size; /* size to hold TOC if not in save_size */
114 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
115 int spe_64bit_regs_used;
118 /* Target cpu type */
120 enum processor_type rs6000_cpu;
121 struct rs6000_cpu_select rs6000_select[3] =
123 /* switch name, tune arch */
124 { (const char *)0, "--with-cpu=", 1, 1 },
125 { (const char *)0, "-mcpu=", 1, 1 },
126 { (const char *)0, "-mtune=", 1, 0 },
129 /* Always emit branch hint bits. */
130 static GTY(()) bool rs6000_always_hint;
132 /* Schedule instructions for group formation. */
133 static GTY(()) bool rs6000_sched_groups;
135 /* Support adjust_priority scheduler hook
136 and -mprioritize-restricted-insns= option. */
137 const char *rs6000_sched_restricted_insns_priority_str;
138 int rs6000_sched_restricted_insns_priority;
140 /* Support for -msched-costly-dep option. */
141 const char *rs6000_sched_costly_dep_str;
142 enum rs6000_dependence_cost rs6000_sched_costly_dep;
144 /* Support for -minsert-sched-nops option. */
145 const char *rs6000_sched_insert_nops_str;
146 enum rs6000_nop_insertion rs6000_sched_insert_nops;
148 /* Support targetm.vectorize.builtin_mask_for_load. */
149 static GTY(()) tree altivec_builtin_mask_for_load;
150 /* Support targetm.vectorize.builtin_mask_for_store. */
151 static GTY(()) tree altivec_builtin_mask_for_store;
153 /* Size of long double */
154 const char *rs6000_long_double_size_string;
155 int rs6000_long_double_type_size;
157 /* Whether -mabi=altivec has appeared */
158 int rs6000_altivec_abi;
160 /* Whether VRSAVE instructions should be generated. */
161 int rs6000_altivec_vrsave;
163 /* String from -mvrsave= option. */
164 const char *rs6000_altivec_vrsave_string;
166 /* Nonzero if we want SPE ABI extensions. */
169 /* Whether isel instructions should be generated. */
172 /* Whether SPE simd instructions should be generated. */
175 /* Nonzero if floating point operations are done in the GPRs. */
176 int rs6000_float_gprs = 0;
178 /* String from -mfloat-gprs=. */
179 const char *rs6000_float_gprs_string;
181 /* String from -misel=. */
182 const char *rs6000_isel_string;
184 /* String from -mspe=. */
185 const char *rs6000_spe_string;
187 /* Set to nonzero once AIX common-mode calls have been defined. */
188 static GTY(()) int common_mode_defined;
190 /* Save information from a "cmpxx" operation until the branch or scc is
192 rtx rs6000_compare_op0, rs6000_compare_op1;
193 int rs6000_compare_fp_p;
195 /* Label number of label created for -mrelocatable, to call to so we can
196 get the address of the GOT section */
197 int rs6000_pic_labelno;
200 /* Which abi to adhere to */
201 const char *rs6000_abi_name;
203 /* Semantics of the small data area */
204 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
206 /* Which small data model to use */
207 const char *rs6000_sdata_name = (char *)0;
209 /* Counter for labels which are to be placed in .fixup. */
210 int fixuplabelno = 0;
213 /* Bit size of immediate TLS offsets and string from which it is decoded. */
214 int rs6000_tls_size = 32;
215 const char *rs6000_tls_size_string;
217 /* ABI enumeration available for subtarget to use. */
218 enum rs6000_abi rs6000_current_abi;
220 /* ABI string from -mabi= option. */
221 const char *rs6000_abi_string;
223 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
227 const char *rs6000_debug_name;
228 int rs6000_debug_stack; /* debug stack applications */
229 int rs6000_debug_arg; /* debug argument handling */
231 /* Value is TRUE if register/mode pair is accepatable. */
232 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
235 static GTY(()) tree opaque_V2SI_type_node;
236 static GTY(()) tree opaque_V2SF_type_node;
237 static GTY(()) tree opaque_p_V2SI_type_node;
238 static GTY(()) tree V16QI_type_node;
239 static GTY(()) tree V2SI_type_node;
240 static GTY(()) tree V2SF_type_node;
241 static GTY(()) tree V4HI_type_node;
242 static GTY(()) tree V4SI_type_node;
243 static GTY(()) tree V4SF_type_node;
244 static GTY(()) tree V8HI_type_node;
245 static GTY(()) tree unsigned_V16QI_type_node;
246 static GTY(()) tree unsigned_V8HI_type_node;
247 static GTY(()) tree unsigned_V4SI_type_node;
248 static GTY(()) tree bool_char_type_node; /* __bool char */
249 static GTY(()) tree bool_short_type_node; /* __bool short */
250 static GTY(()) tree bool_int_type_node; /* __bool int */
251 static GTY(()) tree pixel_type_node; /* __pixel */
252 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
253 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
254 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
255 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
257 int rs6000_warn_altivec_long = 1; /* On by default. */
258 const char *rs6000_warn_altivec_long_switch;
260 const char *rs6000_traceback_name;
262 traceback_default = 0,
268 /* Flag to say the TOC is initialized */
270 char toc_label_name[10];
272 /* Alias set for saves and restores from the rs6000 stack. */
273 static GTY(()) int rs6000_sr_alias_set;
275 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
276 The only place that looks at this is rs6000_set_default_type_attributes;
277 everywhere else should rely on the presence or absence of a longcall
278 attribute on the function declaration. */
279 int rs6000_default_long_calls;
280 const char *rs6000_longcall_switch;
282 /* Control alignment for fields within structures. */
283 /* String from -malign-XXXXX. */
284 const char *rs6000_alignment_string;
285 int rs6000_alignment_flags;
287 struct builtin_description
289 /* mask is not const because we're going to alter it below. This
290 nonsense will go away when we rewrite the -march infrastructure
291 to give us more target flag bits. */
293 const enum insn_code icode;
294 const char *const name;
295 const enum rs6000_builtins code;
298 /* Target cpu costs. */
300 struct processor_costs {
301 const int mulsi; /* cost of SImode multiplication. */
302 const int mulsi_const; /* cost of SImode multiplication by constant. */
303 const int mulsi_const9; /* cost of SImode mult by short constant. */
304 const int muldi; /* cost of DImode multiplication. */
305 const int divsi; /* cost of SImode division. */
306 const int divdi; /* cost of DImode division. */
307 const int fp; /* cost of simple SFmode and DFmode insns. */
308 const int dmul; /* cost of DFmode multiplication (and fmadd). */
309 const int sdiv; /* cost of SFmode division (fdivs). */
310 const int ddiv; /* cost of DFmode division (fdiv). */
313 const struct processor_costs *rs6000_cost;
315 /* Processor costs (relative to an add) */
317 /* Instruction size costs on 32bit processors. */
319 struct processor_costs size32_cost = {
320 COSTS_N_INSNS (1), /* mulsi */
321 COSTS_N_INSNS (1), /* mulsi_const */
322 COSTS_N_INSNS (1), /* mulsi_const9 */
323 COSTS_N_INSNS (1), /* muldi */
324 COSTS_N_INSNS (1), /* divsi */
325 COSTS_N_INSNS (1), /* divdi */
326 COSTS_N_INSNS (1), /* fp */
327 COSTS_N_INSNS (1), /* dmul */
328 COSTS_N_INSNS (1), /* sdiv */
329 COSTS_N_INSNS (1), /* ddiv */
332 /* Instruction size costs on 64bit processors. */
334 struct processor_costs size64_cost = {
335 COSTS_N_INSNS (1), /* mulsi */
336 COSTS_N_INSNS (1), /* mulsi_const */
337 COSTS_N_INSNS (1), /* mulsi_const9 */
338 COSTS_N_INSNS (1), /* muldi */
339 COSTS_N_INSNS (1), /* divsi */
340 COSTS_N_INSNS (1), /* divdi */
341 COSTS_N_INSNS (1), /* fp */
342 COSTS_N_INSNS (1), /* dmul */
343 COSTS_N_INSNS (1), /* sdiv */
344 COSTS_N_INSNS (1), /* ddiv */
347 /* Instruction costs on RIOS1 processors. */
349 struct processor_costs rios1_cost = {
350 COSTS_N_INSNS (5), /* mulsi */
351 COSTS_N_INSNS (4), /* mulsi_const */
352 COSTS_N_INSNS (3), /* mulsi_const9 */
353 COSTS_N_INSNS (5), /* muldi */
354 COSTS_N_INSNS (19), /* divsi */
355 COSTS_N_INSNS (19), /* divdi */
356 COSTS_N_INSNS (2), /* fp */
357 COSTS_N_INSNS (2), /* dmul */
358 COSTS_N_INSNS (19), /* sdiv */
359 COSTS_N_INSNS (19), /* ddiv */
362 /* Instruction costs on RIOS2 processors. */
364 struct processor_costs rios2_cost = {
365 COSTS_N_INSNS (2), /* mulsi */
366 COSTS_N_INSNS (2), /* mulsi_const */
367 COSTS_N_INSNS (2), /* mulsi_const9 */
368 COSTS_N_INSNS (2), /* muldi */
369 COSTS_N_INSNS (13), /* divsi */
370 COSTS_N_INSNS (13), /* divdi */
371 COSTS_N_INSNS (2), /* fp */
372 COSTS_N_INSNS (2), /* dmul */
373 COSTS_N_INSNS (17), /* sdiv */
374 COSTS_N_INSNS (17), /* ddiv */
377 /* Instruction costs on RS64A processors. */
379 struct processor_costs rs64a_cost = {
380 COSTS_N_INSNS (20), /* mulsi */
381 COSTS_N_INSNS (12), /* mulsi_const */
382 COSTS_N_INSNS (8), /* mulsi_const9 */
383 COSTS_N_INSNS (34), /* muldi */
384 COSTS_N_INSNS (65), /* divsi */
385 COSTS_N_INSNS (67), /* divdi */
386 COSTS_N_INSNS (4), /* fp */
387 COSTS_N_INSNS (4), /* dmul */
388 COSTS_N_INSNS (31), /* sdiv */
389 COSTS_N_INSNS (31), /* ddiv */
392 /* Instruction costs on MPCCORE processors. */
394 struct processor_costs mpccore_cost = {
395 COSTS_N_INSNS (2), /* mulsi */
396 COSTS_N_INSNS (2), /* mulsi_const */
397 COSTS_N_INSNS (2), /* mulsi_const9 */
398 COSTS_N_INSNS (2), /* muldi */
399 COSTS_N_INSNS (6), /* divsi */
400 COSTS_N_INSNS (6), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (5), /* dmul */
403 COSTS_N_INSNS (10), /* sdiv */
404 COSTS_N_INSNS (17), /* ddiv */
407 /* Instruction costs on PPC403 processors. */
409 struct processor_costs ppc403_cost = {
410 COSTS_N_INSNS (4), /* mulsi */
411 COSTS_N_INSNS (4), /* mulsi_const */
412 COSTS_N_INSNS (4), /* mulsi_const9 */
413 COSTS_N_INSNS (4), /* muldi */
414 COSTS_N_INSNS (33), /* divsi */
415 COSTS_N_INSNS (33), /* divdi */
416 COSTS_N_INSNS (11), /* fp */
417 COSTS_N_INSNS (11), /* dmul */
418 COSTS_N_INSNS (11), /* sdiv */
419 COSTS_N_INSNS (11), /* ddiv */
422 /* Instruction costs on PPC405 processors. */
424 struct processor_costs ppc405_cost = {
425 COSTS_N_INSNS (5), /* mulsi */
426 COSTS_N_INSNS (4), /* mulsi_const */
427 COSTS_N_INSNS (3), /* mulsi_const9 */
428 COSTS_N_INSNS (5), /* muldi */
429 COSTS_N_INSNS (35), /* divsi */
430 COSTS_N_INSNS (35), /* divdi */
431 COSTS_N_INSNS (11), /* fp */
432 COSTS_N_INSNS (11), /* dmul */
433 COSTS_N_INSNS (11), /* sdiv */
434 COSTS_N_INSNS (11), /* ddiv */
437 /* Instruction costs on PPC440 processors. */
439 struct processor_costs ppc440_cost = {
440 COSTS_N_INSNS (3), /* mulsi */
441 COSTS_N_INSNS (2), /* mulsi_const */
442 COSTS_N_INSNS (2), /* mulsi_const9 */
443 COSTS_N_INSNS (3), /* muldi */
444 COSTS_N_INSNS (34), /* divsi */
445 COSTS_N_INSNS (34), /* divdi */
446 COSTS_N_INSNS (5), /* fp */
447 COSTS_N_INSNS (5), /* dmul */
448 COSTS_N_INSNS (19), /* sdiv */
449 COSTS_N_INSNS (33), /* ddiv */
452 /* Instruction costs on PPC601 processors. */
454 struct processor_costs ppc601_cost = {
455 COSTS_N_INSNS (5), /* mulsi */
456 COSTS_N_INSNS (5), /* mulsi_const */
457 COSTS_N_INSNS (5), /* mulsi_const9 */
458 COSTS_N_INSNS (5), /* muldi */
459 COSTS_N_INSNS (36), /* divsi */
460 COSTS_N_INSNS (36), /* divdi */
461 COSTS_N_INSNS (4), /* fp */
462 COSTS_N_INSNS (5), /* dmul */
463 COSTS_N_INSNS (17), /* sdiv */
464 COSTS_N_INSNS (31), /* ddiv */
467 /* Instruction costs on PPC603 processors. */
469 struct processor_costs ppc603_cost = {
470 COSTS_N_INSNS (5), /* mulsi */
471 COSTS_N_INSNS (3), /* mulsi_const */
472 COSTS_N_INSNS (2), /* mulsi_const9 */
473 COSTS_N_INSNS (5), /* muldi */
474 COSTS_N_INSNS (37), /* divsi */
475 COSTS_N_INSNS (37), /* divdi */
476 COSTS_N_INSNS (3), /* fp */
477 COSTS_N_INSNS (4), /* dmul */
478 COSTS_N_INSNS (18), /* sdiv */
479 COSTS_N_INSNS (33), /* ddiv */
482 /* Instruction costs on PPC604 processors. */
484 struct processor_costs ppc604_cost = {
485 COSTS_N_INSNS (4), /* mulsi */
486 COSTS_N_INSNS (4), /* mulsi_const */
487 COSTS_N_INSNS (4), /* mulsi_const9 */
488 COSTS_N_INSNS (4), /* muldi */
489 COSTS_N_INSNS (20), /* divsi */
490 COSTS_N_INSNS (20), /* divdi */
491 COSTS_N_INSNS (3), /* fp */
492 COSTS_N_INSNS (3), /* dmul */
493 COSTS_N_INSNS (18), /* sdiv */
494 COSTS_N_INSNS (32), /* ddiv */
497 /* Instruction costs on PPC604e processors. */
499 struct processor_costs ppc604e_cost = {
500 COSTS_N_INSNS (2), /* mulsi */
501 COSTS_N_INSNS (2), /* mulsi_const */
502 COSTS_N_INSNS (2), /* mulsi_const9 */
503 COSTS_N_INSNS (2), /* muldi */
504 COSTS_N_INSNS (20), /* divsi */
505 COSTS_N_INSNS (20), /* divdi */
506 COSTS_N_INSNS (3), /* fp */
507 COSTS_N_INSNS (3), /* dmul */
508 COSTS_N_INSNS (18), /* sdiv */
509 COSTS_N_INSNS (32), /* ddiv */
512 /* Instruction costs on PPC620 processors. */
514 struct processor_costs ppc620_cost = {
515 COSTS_N_INSNS (5), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (3), /* mulsi_const9 */
518 COSTS_N_INSNS (7), /* muldi */
519 COSTS_N_INSNS (21), /* divsi */
520 COSTS_N_INSNS (37), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
527 /* Instruction costs on PPC630 processors. */
529 struct processor_costs ppc630_cost = {
530 COSTS_N_INSNS (5), /* mulsi */
531 COSTS_N_INSNS (4), /* mulsi_const */
532 COSTS_N_INSNS (3), /* mulsi_const9 */
533 COSTS_N_INSNS (7), /* muldi */
534 COSTS_N_INSNS (21), /* divsi */
535 COSTS_N_INSNS (37), /* divdi */
536 COSTS_N_INSNS (3), /* fp */
537 COSTS_N_INSNS (3), /* dmul */
538 COSTS_N_INSNS (17), /* sdiv */
539 COSTS_N_INSNS (21), /* ddiv */
542 /* Instruction costs on PPC750 and PPC7400 processors. */
544 struct processor_costs ppc750_cost = {
545 COSTS_N_INSNS (5), /* mulsi */
546 COSTS_N_INSNS (3), /* mulsi_const */
547 COSTS_N_INSNS (2), /* mulsi_const9 */
548 COSTS_N_INSNS (5), /* muldi */
549 COSTS_N_INSNS (17), /* divsi */
550 COSTS_N_INSNS (17), /* divdi */
551 COSTS_N_INSNS (3), /* fp */
552 COSTS_N_INSNS (3), /* dmul */
553 COSTS_N_INSNS (17), /* sdiv */
554 COSTS_N_INSNS (31), /* ddiv */
557 /* Instruction costs on PPC7450 processors. */
559 struct processor_costs ppc7450_cost = {
560 COSTS_N_INSNS (4), /* mulsi */
561 COSTS_N_INSNS (3), /* mulsi_const */
562 COSTS_N_INSNS (3), /* mulsi_const9 */
563 COSTS_N_INSNS (4), /* muldi */
564 COSTS_N_INSNS (23), /* divsi */
565 COSTS_N_INSNS (23), /* divdi */
566 COSTS_N_INSNS (5), /* fp */
567 COSTS_N_INSNS (5), /* dmul */
568 COSTS_N_INSNS (21), /* sdiv */
569 COSTS_N_INSNS (35), /* ddiv */
572 /* Instruction costs on PPC8540 processors. */
574 struct processor_costs ppc8540_cost = {
575 COSTS_N_INSNS (4), /* mulsi */
576 COSTS_N_INSNS (4), /* mulsi_const */
577 COSTS_N_INSNS (4), /* mulsi_const9 */
578 COSTS_N_INSNS (4), /* muldi */
579 COSTS_N_INSNS (19), /* divsi */
580 COSTS_N_INSNS (19), /* divdi */
581 COSTS_N_INSNS (4), /* fp */
582 COSTS_N_INSNS (4), /* dmul */
583 COSTS_N_INSNS (29), /* sdiv */
584 COSTS_N_INSNS (29), /* ddiv */
587 /* Instruction costs on POWER4 and POWER5 processors. */
589 struct processor_costs power4_cost = {
590 COSTS_N_INSNS (3), /* mulsi */
591 COSTS_N_INSNS (2), /* mulsi_const */
592 COSTS_N_INSNS (2), /* mulsi_const9 */
593 COSTS_N_INSNS (4), /* muldi */
594 COSTS_N_INSNS (18), /* divsi */
595 COSTS_N_INSNS (34), /* divdi */
596 COSTS_N_INSNS (3), /* fp */
597 COSTS_N_INSNS (3), /* dmul */
598 COSTS_N_INSNS (17), /* sdiv */
599 COSTS_N_INSNS (17), /* ddiv */
603 static bool rs6000_function_ok_for_sibcall (tree, tree);
604 static int num_insns_constant_wide (HOST_WIDE_INT);
605 static void validate_condition_mode (enum rtx_code, enum machine_mode);
606 static rtx rs6000_generate_compare (enum rtx_code);
607 static void rs6000_maybe_dead (rtx);
608 static void rs6000_emit_stack_tie (void);
609 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
610 static rtx spe_synthesize_frame_save (rtx);
611 static bool spe_func_has_64bit_regs_p (void);
612 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
614 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
615 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
616 static unsigned rs6000_hash_constant (rtx);
617 static unsigned toc_hash_function (const void *);
618 static int toc_hash_eq (const void *, const void *);
619 static int constant_pool_expr_1 (rtx, int *, int *);
620 static bool constant_pool_expr_p (rtx);
621 static bool toc_relative_expr_p (rtx);
622 static bool legitimate_small_data_p (enum machine_mode, rtx);
623 static bool legitimate_indexed_address_p (rtx, int);
624 static bool legitimate_indirect_address_p (rtx, int);
625 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
626 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
627 static struct machine_function * rs6000_init_machine_status (void);
628 static bool rs6000_assemble_integer (rtx, unsigned int, int);
629 #ifdef HAVE_GAS_HIDDEN
630 static void rs6000_assemble_visibility (tree, int);
632 static int rs6000_ra_ever_killed (void);
633 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
634 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
635 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
636 static const char *rs6000_mangle_fundamental_type (tree);
637 extern const struct attribute_spec rs6000_attribute_table[];
638 static void rs6000_set_default_type_attributes (tree);
639 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
640 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
641 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
643 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
644 static bool rs6000_return_in_memory (tree, tree);
645 static void rs6000_file_start (void);
647 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
648 static void rs6000_elf_asm_out_constructor (rtx, int);
649 static void rs6000_elf_asm_out_destructor (rtx, int);
650 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
651 static void rs6000_elf_unique_section (tree, int);
652 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
653 unsigned HOST_WIDE_INT);
654 static void rs6000_elf_encode_section_info (tree, rtx, int)
656 static bool rs6000_elf_in_small_data_p (tree);
659 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
660 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
661 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
662 static void rs6000_xcoff_unique_section (tree, int);
663 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
664 unsigned HOST_WIDE_INT);
665 static const char * rs6000_xcoff_strip_name_encoding (const char *);
666 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
667 static void rs6000_xcoff_file_start (void);
668 static void rs6000_xcoff_file_end (void);
671 static bool rs6000_binds_local_p (tree);
673 static int rs6000_variable_issue (FILE *, int, rtx, int);
674 static bool rs6000_rtx_costs (rtx, int, int, int *);
675 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
676 static bool is_microcoded_insn (rtx);
677 static int is_dispatch_slot_restricted (rtx);
678 static bool is_cracked_insn (rtx);
679 static bool is_branch_slot_insn (rtx);
680 static int rs6000_adjust_priority (rtx, int);
681 static int rs6000_issue_rate (void);
682 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
683 static rtx get_next_active_insn (rtx, rtx);
684 static bool insn_terminates_group_p (rtx , enum group_termination);
685 static bool is_costly_group (rtx *, rtx);
686 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
687 static int redefine_groups (FILE *, int, rtx, rtx);
688 static int pad_groups (FILE *, int, rtx, rtx);
689 static void rs6000_sched_finish (FILE *, int);
690 static int rs6000_use_sched_lookahead (void);
691 static tree rs6000_builtin_mask_for_load (void);
692 static tree rs6000_builtin_mask_for_store (void);
694 static void rs6000_init_builtins (void);
695 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
696 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
697 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
698 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
699 static void altivec_init_builtins (void);
700 static void rs6000_common_init_builtins (void);
701 static void rs6000_init_libfuncs (void);
703 static void enable_mask_for_builtins (struct builtin_description *, int,
704 enum rs6000_builtins,
705 enum rs6000_builtins);
706 static tree build_opaque_vector_type (tree, int);
707 static void spe_init_builtins (void);
708 static rtx spe_expand_builtin (tree, rtx, bool *);
709 static rtx spe_expand_stv_builtin (enum insn_code, tree);
710 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
711 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
712 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
713 static rs6000_stack_t *rs6000_stack_info (void);
714 static void debug_stack_info (rs6000_stack_t *);
716 static rtx altivec_expand_builtin (tree, rtx, bool *);
717 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
718 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
719 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
720 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
721 static rtx altivec_expand_predicate_builtin (enum insn_code,
722 const char *, tree, rtx);
723 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
724 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
725 static void rs6000_parse_abi_options (void);
726 static void rs6000_parse_alignment_option (void);
727 static void rs6000_parse_tls_size_option (void);
728 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
729 static void rs6000_parse_float_gprs_option (void);
730 static int first_altivec_reg_to_save (void);
731 static unsigned int compute_vrsave_mask (void);
732 static void compute_save_world_info(rs6000_stack_t *info_ptr);
733 static void is_altivec_return_reg (rtx, void *);
734 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
735 int easy_vector_constant (rtx, enum machine_mode);
736 static int easy_vector_same (rtx, enum machine_mode);
737 static int easy_vector_splat_const (int, enum machine_mode);
738 static bool is_ev64_opaque_type (tree);
739 static rtx rs6000_dwarf_register_span (rtx);
740 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
741 static rtx rs6000_tls_get_addr (void);
742 static rtx rs6000_got_sym (void);
743 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
744 static const char *rs6000_get_some_local_dynamic_name (void);
745 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
746 static rtx rs6000_complex_function_value (enum machine_mode);
747 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
748 enum machine_mode, tree);
749 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
750 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
751 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
752 enum machine_mode, tree,
754 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
757 static void macho_branch_islands (void);
758 static void add_compiler_branch_island (tree, tree, int);
759 static int no_previous_def (tree function_name);
760 static tree get_prev_label (tree function_name);
761 static void rs6000_darwin_file_start (void);
764 static tree rs6000_build_builtin_va_list (void);
765 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
766 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
767 static bool rs6000_vector_mode_supported_p (enum machine_mode);
768 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
770 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
772 static int get_vsel_insn (enum machine_mode);
773 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
776 const int INSN_NOT_AVAILABLE = -1;
777 static enum machine_mode rs6000_eh_return_filter_mode (void);
779 /* Hash table stuff for keeping track of TOC entries. */
781 struct toc_hash_struct GTY(())
783 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
784 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
786 enum machine_mode key_mode;
790 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
792 /* Default register names. */
793 char rs6000_reg_names[][8] =
795 "0", "1", "2", "3", "4", "5", "6", "7",
796 "8", "9", "10", "11", "12", "13", "14", "15",
797 "16", "17", "18", "19", "20", "21", "22", "23",
798 "24", "25", "26", "27", "28", "29", "30", "31",
799 "0", "1", "2", "3", "4", "5", "6", "7",
800 "8", "9", "10", "11", "12", "13", "14", "15",
801 "16", "17", "18", "19", "20", "21", "22", "23",
802 "24", "25", "26", "27", "28", "29", "30", "31",
803 "mq", "lr", "ctr","ap",
804 "0", "1", "2", "3", "4", "5", "6", "7",
806 /* AltiVec registers. */
807 "0", "1", "2", "3", "4", "5", "6", "7",
808 "8", "9", "10", "11", "12", "13", "14", "15",
809 "16", "17", "18", "19", "20", "21", "22", "23",
810 "24", "25", "26", "27", "28", "29", "30", "31",
816 #ifdef TARGET_REGNAMES
817 static const char alt_reg_names[][8] =
819 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
820 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
821 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
822 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
823 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
824 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
825 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
826 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
827 "mq", "lr", "ctr", "ap",
828 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
830 /* AltiVec registers. */
831 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
832 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
833 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
834 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
841 #ifndef MASK_STRICT_ALIGN
842 #define MASK_STRICT_ALIGN 0
844 #ifndef TARGET_PROFILE_KERNEL
845 #define TARGET_PROFILE_KERNEL 0
848 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
849 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
851 /* Return 1 for a symbol ref for a thread-local storage symbol. */
852 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
853 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
855 /* Initialize the GCC target structure. */
856 #undef TARGET_ATTRIBUTE_TABLE
857 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
858 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
859 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
861 #undef TARGET_ASM_ALIGNED_DI_OP
862 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
864 /* Default unaligned ops are only provided for ELF. Find the ops needed
865 for non-ELF systems. */
866 #ifndef OBJECT_FORMAT_ELF
868 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
870 #undef TARGET_ASM_UNALIGNED_HI_OP
871 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
872 #undef TARGET_ASM_UNALIGNED_SI_OP
873 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
874 #undef TARGET_ASM_UNALIGNED_DI_OP
875 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
878 #undef TARGET_ASM_UNALIGNED_HI_OP
879 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
880 #undef TARGET_ASM_UNALIGNED_SI_OP
881 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
882 #undef TARGET_ASM_UNALIGNED_DI_OP
883 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
884 #undef TARGET_ASM_ALIGNED_DI_OP
885 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
889 /* This hook deals with fixups for relocatable code and DI-mode objects
891 #undef TARGET_ASM_INTEGER
892 #define TARGET_ASM_INTEGER rs6000_assemble_integer
894 #ifdef HAVE_GAS_HIDDEN
895 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
896 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
899 #undef TARGET_HAVE_TLS
900 #define TARGET_HAVE_TLS HAVE_AS_TLS
902 #undef TARGET_CANNOT_FORCE_CONST_MEM
903 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
905 #undef TARGET_ASM_FUNCTION_PROLOGUE
906 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
907 #undef TARGET_ASM_FUNCTION_EPILOGUE
908 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
910 #undef TARGET_SCHED_VARIABLE_ISSUE
911 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
913 #undef TARGET_SCHED_ISSUE_RATE
914 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
915 #undef TARGET_SCHED_ADJUST_COST
916 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
917 #undef TARGET_SCHED_ADJUST_PRIORITY
918 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
919 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
920 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
921 #undef TARGET_SCHED_FINISH
922 #define TARGET_SCHED_FINISH rs6000_sched_finish
924 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
925 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
927 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
928 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
930 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_STORE
931 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_STORE rs6000_builtin_mask_for_store
933 #undef TARGET_INIT_BUILTINS
934 #define TARGET_INIT_BUILTINS rs6000_init_builtins
936 #undef TARGET_EXPAND_BUILTIN
937 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
939 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
940 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
942 #undef TARGET_INIT_LIBFUNCS
943 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
946 #undef TARGET_BINDS_LOCAL_P
947 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
950 #undef TARGET_ASM_OUTPUT_MI_THUNK
951 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
953 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
954 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
956 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
957 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
959 #undef TARGET_RTX_COSTS
960 #define TARGET_RTX_COSTS rs6000_rtx_costs
961 #undef TARGET_ADDRESS_COST
962 #define TARGET_ADDRESS_COST hook_int_rtx_0
964 #undef TARGET_VECTOR_OPAQUE_P
965 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
967 #undef TARGET_DWARF_REGISTER_SPAN
968 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
970 /* On rs6000, function arguments are promoted, as are function return
972 #undef TARGET_PROMOTE_FUNCTION_ARGS
973 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
974 #undef TARGET_PROMOTE_FUNCTION_RETURN
975 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
977 #undef TARGET_RETURN_IN_MEMORY
978 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
980 #undef TARGET_SETUP_INCOMING_VARARGS
981 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
983 /* Always strict argument naming on rs6000. */
984 #undef TARGET_STRICT_ARGUMENT_NAMING
985 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
986 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
987 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
988 #undef TARGET_SPLIT_COMPLEX_ARG
989 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
990 #undef TARGET_MUST_PASS_IN_STACK
991 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
992 #undef TARGET_PASS_BY_REFERENCE
993 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
995 #undef TARGET_BUILD_BUILTIN_VA_LIST
996 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
998 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
999 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1001 #undef TARGET_EH_RETURN_FILTER_MODE
1002 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1004 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1005 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1007 struct gcc_target targetm = TARGET_INITIALIZER;
1010 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1013 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1015 /* The GPRs can hold any mode, but values bigger than one register
1016 cannot go past R31. */
1017 if (INT_REGNO_P (regno))
1018 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1020 /* The float registers can only hold floating modes and DImode. */
1021 if (FP_REGNO_P (regno))
1023 (GET_MODE_CLASS (mode) == MODE_FLOAT
1024 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1025 || (GET_MODE_CLASS (mode) == MODE_INT
1026 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1028 /* The CR register can only hold CC modes. */
1029 if (CR_REGNO_P (regno))
1030 return GET_MODE_CLASS (mode) == MODE_CC;
1032 if (XER_REGNO_P (regno))
1033 return mode == PSImode;
1035 /* AltiVec only in AldyVec registers. */
1036 if (ALTIVEC_REGNO_P (regno))
1037 return ALTIVEC_VECTOR_MODE (mode);
1039 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1040 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1043 /* We cannot put TImode anywhere except general register and it must be
1044 able to fit within the register set. */
1046 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1049 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1051 rs6000_init_hard_regno_mode_ok (void)
1055 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1056 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1057 if (rs6000_hard_regno_mode_ok (r, m))
1058 rs6000_hard_regno_mode_ok_p[m][r] = true;
1061 /* If not otherwise specified by a target, make 'long double' equivalent to
1064 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1065 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1068 /* Override command line options. Mostly we process the processor
1069 type and sometimes adjust other TARGET_ options. */
1072 rs6000_override_options (const char *default_cpu)
1075 struct rs6000_cpu_select *ptr;
1078 /* Simplifications for entries below. */
1081 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1082 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1085 /* This table occasionally claims that a processor does not support
1086 a particular feature even though it does, but the feature is slower
1087 than the alternative. Thus, it shouldn't be relied on as a
1088 complete description of the processor's support.
1090 Please keep this list in order, and don't forget to update the
1091 documentation in invoke.texi when adding a new processor or
1095 const char *const name; /* Canonical processor name. */
1096 const enum processor_type processor; /* Processor type enum value. */
1097 const int target_enable; /* Target flags to enable. */
1098 } const processor_target_table[]
1099 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1100 {"403", PROCESSOR_PPC403,
1101 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1102 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1103 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1104 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1105 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1106 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1107 {"601", PROCESSOR_PPC601,
1108 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1109 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1110 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1111 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1112 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1113 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1114 {"620", PROCESSOR_PPC620,
1115 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1116 {"630", PROCESSOR_PPC630,
1117 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1118 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1119 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1120 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1121 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1122 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1123 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1124 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1125 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1126 /* 8548 has a dummy entry for now. */
1127 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1128 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1129 {"970", PROCESSOR_POWER4,
1130 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1131 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1132 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1133 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1134 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1135 {"G5", PROCESSOR_POWER4,
1136 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1137 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1138 {"power2", PROCESSOR_POWER,
1139 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1140 {"power3", PROCESSOR_PPC630,
1141 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1142 {"power4", PROCESSOR_POWER4,
1143 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1144 {"power5", PROCESSOR_POWER5,
1145 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1146 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1147 {"powerpc64", PROCESSOR_POWERPC64,
1148 POWERPC_BASE_MASK | MASK_POWERPC64},
1149 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1150 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1151 {"rios2", PROCESSOR_RIOS2,
1152 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1153 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1154 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1155 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
1158 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1160 /* Some OSs don't support saving the high part of 64-bit registers on
1161 context switch. Other OSs don't support saving Altivec registers.
1162 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1163 settings; if the user wants either, the user must explicitly specify
1164 them and we won't interfere with the user's specification. */
1167 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1168 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1169 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1173 rs6000_init_hard_regno_mode_ok ();
1175 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1176 #ifdef OS_MISSING_POWERPC64
1177 if (OS_MISSING_POWERPC64)
1178 set_masks &= ~MASK_POWERPC64;
1180 #ifdef OS_MISSING_ALTIVEC
1181 if (OS_MISSING_ALTIVEC)
1182 set_masks &= ~MASK_ALTIVEC;
1185 /* Don't override these by the processor default if given explicitly. */
1186 set_masks &= ~(target_flags_explicit
1187 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
1189 /* Identify the processor type. */
1190 rs6000_select[0].string = default_cpu;
1191 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1193 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1195 ptr = &rs6000_select[i];
1196 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1198 for (j = 0; j < ptt_size; j++)
1199 if (! strcmp (ptr->string, processor_target_table[j].name))
1201 if (ptr->set_tune_p)
1202 rs6000_cpu = processor_target_table[j].processor;
1204 if (ptr->set_arch_p)
1206 target_flags &= ~set_masks;
1207 target_flags |= (processor_target_table[j].target_enable
1214 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1221 /* If we are optimizing big endian systems for space, use the load/store
1222 multiple and string instructions. */
1223 if (BYTES_BIG_ENDIAN && optimize_size)
1224 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1226 /* Don't allow -mmultiple or -mstring on little endian systems
1227 unless the cpu is a 750, because the hardware doesn't support the
1228 instructions used in little endian mode, and causes an alignment
1229 trap. The 750 does not cause an alignment trap (except when the
1230 target is unaligned). */
1232 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1234 if (TARGET_MULTIPLE)
1236 target_flags &= ~MASK_MULTIPLE;
1237 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1238 warning ("-mmultiple is not supported on little endian systems");
1243 target_flags &= ~MASK_STRING;
1244 if ((target_flags_explicit & MASK_STRING) != 0)
1245 warning ("-mstring is not supported on little endian systems");
1249 /* Set debug flags */
1250 if (rs6000_debug_name)
1252 if (! strcmp (rs6000_debug_name, "all"))
1253 rs6000_debug_stack = rs6000_debug_arg = 1;
1254 else if (! strcmp (rs6000_debug_name, "stack"))
1255 rs6000_debug_stack = 1;
1256 else if (! strcmp (rs6000_debug_name, "arg"))
1257 rs6000_debug_arg = 1;
1259 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1262 if (rs6000_traceback_name)
1264 if (! strncmp (rs6000_traceback_name, "full", 4))
1265 rs6000_traceback = traceback_full;
1266 else if (! strncmp (rs6000_traceback_name, "part", 4))
1267 rs6000_traceback = traceback_part;
1268 else if (! strncmp (rs6000_traceback_name, "no", 2))
1269 rs6000_traceback = traceback_none;
1271 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1272 rs6000_traceback_name);
1275 /* Set size of long double */
1276 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1277 if (rs6000_long_double_size_string)
1280 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1281 if (*tail != '\0' || (size != 64 && size != 128))
1282 error ("Unknown switch -mlong-double-%s",
1283 rs6000_long_double_size_string);
1285 rs6000_long_double_type_size = size;
1288 /* Set Altivec ABI as default for powerpc64 linux. */
1289 if (TARGET_ELF && TARGET_64BIT)
1291 rs6000_altivec_abi = 1;
1292 rs6000_altivec_vrsave = 1;
1295 /* Handle -mabi= options. */
1296 rs6000_parse_abi_options ();
1298 /* Handle -malign-XXXXX option. */
1299 rs6000_parse_alignment_option ();
1301 rs6000_parse_float_gprs_option ();
1303 /* Handle generic -mFOO=YES/NO options. */
1304 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1305 &rs6000_altivec_vrsave);
1306 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1308 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1310 /* Handle -mtls-size option. */
1311 rs6000_parse_tls_size_option ();
1313 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1314 SUBTARGET_OVERRIDE_OPTIONS;
1316 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1317 SUBSUBTARGET_OVERRIDE_OPTIONS;
1319 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1320 SUB3TARGET_OVERRIDE_OPTIONS;
1326 error ("AltiVec and E500 instructions cannot coexist");
1328 /* The e500 does not have string instructions, and we set
1329 MASK_STRING above when optimizing for size. */
1330 if ((target_flags & MASK_STRING) != 0)
1331 target_flags = target_flags & ~MASK_STRING;
1333 /* No SPE means 64-bit long doubles, even if an E500. */
1334 if (rs6000_spe_string != 0
1335 && !strcmp (rs6000_spe_string, "no"))
1336 rs6000_long_double_type_size = 64;
1338 else if (rs6000_select[1].string != NULL)
1340 /* For the powerpc-eabispe configuration, we set all these by
1341 default, so let's unset them if we manually set another
1342 CPU that is not the E500. */
1343 if (rs6000_abi_string == 0)
1345 if (rs6000_spe_string == 0)
1347 if (rs6000_float_gprs_string == 0)
1348 rs6000_float_gprs = 0;
1349 if (rs6000_isel_string == 0)
1351 if (rs6000_long_double_size_string == 0)
1352 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1355 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1356 && rs6000_cpu != PROCESSOR_POWER5);
1357 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1358 || rs6000_cpu == PROCESSOR_POWER5);
1360 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1361 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1362 bits in target_flags so TARGET_SWITCHES cannot be used.
1363 Assumption here is that rs6000_longcall_switch points into the
1364 text of the complete option, rather than being a copy, so we can
1365 scan back for the presence or absence of the no- modifier. */
1366 if (rs6000_longcall_switch)
1368 const char *base = rs6000_longcall_switch;
1369 while (base[-1] != 'm') base--;
1371 if (*rs6000_longcall_switch != '\0')
1372 error ("invalid option `%s'", base);
1373 rs6000_default_long_calls = (base[0] != 'n');
1376 /* Handle -m(no-)warn-altivec-long similarly. */
1377 if (rs6000_warn_altivec_long_switch)
1379 const char *base = rs6000_warn_altivec_long_switch;
1380 while (base[-1] != 'm') base--;
1382 if (*rs6000_warn_altivec_long_switch != '\0')
1383 error ("invalid option `%s'", base);
1384 rs6000_warn_altivec_long = (base[0] != 'n');
1387 /* Handle -mprioritize-restricted-insns option. */
1388 rs6000_sched_restricted_insns_priority
1389 = (rs6000_sched_groups ? 1 : 0);
1390 if (rs6000_sched_restricted_insns_priority_str)
1391 rs6000_sched_restricted_insns_priority =
1392 atoi (rs6000_sched_restricted_insns_priority_str);
1394 /* Handle -msched-costly-dep option. */
1395 rs6000_sched_costly_dep
1396 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1397 if (rs6000_sched_costly_dep_str)
1399 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1400 rs6000_sched_costly_dep = no_dep_costly;
1401 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1402 rs6000_sched_costly_dep = all_deps_costly;
1403 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1404 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1405 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1406 rs6000_sched_costly_dep = store_to_load_dep_costly;
1408 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1411 /* Handle -minsert-sched-nops option. */
1412 rs6000_sched_insert_nops
1413 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1414 if (rs6000_sched_insert_nops_str)
1416 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1417 rs6000_sched_insert_nops = sched_finish_none;
1418 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1419 rs6000_sched_insert_nops = sched_finish_pad_groups;
1420 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1421 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1423 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1426 #ifdef TARGET_REGNAMES
1427 /* If the user desires alternate register names, copy in the
1428 alternate names now. */
1429 if (TARGET_REGNAMES)
1430 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1433 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1434 If -maix-struct-return or -msvr4-struct-return was explicitly
1435 used, don't override with the ABI default. */
1436 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1438 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1439 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1441 target_flags |= MASK_AIX_STRUCT_RET;
1444 if (TARGET_LONG_DOUBLE_128
1445 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1446 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1448 /* Allocate an alias set for register saves & restores from stack. */
1449 rs6000_sr_alias_set = new_alias_set ();
1452 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1454 /* We can only guarantee the availability of DI pseudo-ops when
1455 assembling for 64-bit targets. */
1458 targetm.asm_out.aligned_op.di = NULL;
1459 targetm.asm_out.unaligned_op.di = NULL;
1462 /* Set branch target alignment, if not optimizing for size. */
1465 if (rs6000_sched_groups)
1467 if (align_functions <= 0)
1468 align_functions = 16;
1469 if (align_jumps <= 0)
1471 if (align_loops <= 0)
1474 if (align_jumps_max_skip <= 0)
1475 align_jumps_max_skip = 15;
1476 if (align_loops_max_skip <= 0)
1477 align_loops_max_skip = 15;
1480 /* Arrange to save and restore machine status around nested functions. */
1481 init_machine_status = rs6000_init_machine_status;
1483 /* We should always be splitting complex arguments, but we can't break
1484 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1485 if (DEFAULT_ABI != ABI_AIX)
1486 targetm.calls.split_complex_arg = NULL;
1488 /* Initialize rs6000_cost with the appropriate target costs. */
1490 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1494 case PROCESSOR_RIOS1:
1495 rs6000_cost = &rios1_cost;
1498 case PROCESSOR_RIOS2:
1499 rs6000_cost = &rios2_cost;
1502 case PROCESSOR_RS64A:
1503 rs6000_cost = &rs64a_cost;
1506 case PROCESSOR_MPCCORE:
1507 rs6000_cost = &mpccore_cost;
1510 case PROCESSOR_PPC403:
1511 rs6000_cost = &ppc403_cost;
1514 case PROCESSOR_PPC405:
1515 rs6000_cost = &ppc405_cost;
1518 case PROCESSOR_PPC440:
1519 rs6000_cost = &ppc440_cost;
1522 case PROCESSOR_PPC601:
1523 rs6000_cost = &ppc601_cost;
1526 case PROCESSOR_PPC603:
1527 rs6000_cost = &ppc603_cost;
1530 case PROCESSOR_PPC604:
1531 rs6000_cost = &ppc604_cost;
1534 case PROCESSOR_PPC604e:
1535 rs6000_cost = &ppc604e_cost;
1538 case PROCESSOR_PPC620:
1539 rs6000_cost = &ppc620_cost;
1542 case PROCESSOR_PPC630:
1543 rs6000_cost = &ppc630_cost;
1546 case PROCESSOR_PPC750:
1547 case PROCESSOR_PPC7400:
1548 rs6000_cost = &ppc750_cost;
1551 case PROCESSOR_PPC7450:
1552 rs6000_cost = &ppc7450_cost;
1555 case PROCESSOR_PPC8540:
1556 rs6000_cost = &ppc8540_cost;
1559 case PROCESSOR_POWER4:
1560 case PROCESSOR_POWER5:
1561 rs6000_cost = &power4_cost;
1569 /* Implement targetm.vectorize.builtin_mask_for_load. */
1571 rs6000_builtin_mask_for_load (void)
1574 return altivec_builtin_mask_for_load;
1579 /* Implement targetm.vectorize.builtin_mask_for_store. */
1581 rs6000_builtin_mask_for_store (void)
1584 return altivec_builtin_mask_for_store;
1589 /* Handle generic options of the form -mfoo=yes/no.
1590 NAME is the option name.
1591 VALUE is the option value.
1592 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1593 whether the option value is 'yes' or 'no' respectively. */
1595 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1599 else if (!strcmp (value, "yes"))
1601 else if (!strcmp (value, "no"))
1604 error ("unknown -m%s= option specified: '%s'", name, value);
1607 /* Handle -mabi= options. */
1609 rs6000_parse_abi_options (void)
1611 if (rs6000_abi_string == 0)
1613 else if (! strcmp (rs6000_abi_string, "altivec"))
1615 rs6000_altivec_abi = 1;
1618 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1619 rs6000_altivec_abi = 0;
1620 else if (! strcmp (rs6000_abi_string, "spe"))
1623 rs6000_altivec_abi = 0;
1624 if (!TARGET_SPE_ABI)
1625 error ("not configured for ABI: '%s'", rs6000_abi_string);
1628 else if (! strcmp (rs6000_abi_string, "no-spe"))
1631 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1634 /* Handle -mfloat-gprs= options. */
1636 rs6000_parse_float_gprs_option (void)
1638 if (rs6000_float_gprs_string == 0)
1640 else if (! strcmp (rs6000_float_gprs_string, "yes")
1641 || ! strcmp (rs6000_float_gprs_string, "single"))
1642 rs6000_float_gprs = 1;
1643 else if (! strcmp (rs6000_float_gprs_string, "double"))
1644 rs6000_float_gprs = 2;
1645 else if (! strcmp (rs6000_float_gprs_string, "no"))
1646 rs6000_float_gprs = 0;
1648 error ("invalid option for -mfloat-gprs");
1651 /* Handle -malign-XXXXXX options. */
1653 rs6000_parse_alignment_option (void)
1655 if (rs6000_alignment_string == 0)
1657 else if (! strcmp (rs6000_alignment_string, "power"))
1658 rs6000_alignment_flags = MASK_ALIGN_POWER;
1659 else if (! strcmp (rs6000_alignment_string, "natural"))
1660 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1662 error ("unknown -malign-XXXXX option specified: '%s'",
1663 rs6000_alignment_string);
1666 /* Validate and record the size specified with the -mtls-size option. */
1669 rs6000_parse_tls_size_option (void)
1671 if (rs6000_tls_size_string == 0)
1673 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1674 rs6000_tls_size = 16;
1675 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1676 rs6000_tls_size = 32;
1677 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1678 rs6000_tls_size = 64;
1680 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1684 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1688 /* Do anything needed at the start of the asm file. */
1691 rs6000_file_start (void)
1695 const char *start = buffer;
1696 struct rs6000_cpu_select *ptr;
1697 const char *default_cpu = TARGET_CPU_DEFAULT;
1698 FILE *file = asm_out_file;
1700 default_file_start ();
1702 #ifdef TARGET_BI_ARCH
1703 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1707 if (flag_verbose_asm)
1709 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1710 rs6000_select[0].string = default_cpu;
1712 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1714 ptr = &rs6000_select[i];
1715 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1717 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1722 #ifdef USING_ELFOS_H
1723 switch (rs6000_sdata)
1725 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1726 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1727 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1728 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1731 if (rs6000_sdata && g_switch_value)
1733 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1745 /* Return nonzero if this function is known to have a null epilogue. */
1748 direct_return (void)
1750 if (reload_completed)
1752 rs6000_stack_t *info = rs6000_stack_info ();
1754 if (info->first_gp_reg_save == 32
1755 && info->first_fp_reg_save == 64
1756 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1757 && ! info->lr_save_p
1758 && ! info->cr_save_p
1759 && info->vrsave_mask == 0
1767 /* Returns 1 always. */
1770 any_operand (rtx op ATTRIBUTE_UNUSED,
1771 enum machine_mode mode ATTRIBUTE_UNUSED)
1776 /* Returns 1 always. */
1779 any_parallel_operand (rtx op ATTRIBUTE_UNUSED,
1780 enum machine_mode mode ATTRIBUTE_UNUSED)
1785 /* Returns 1 if op is the count register. */
1788 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1790 if (GET_CODE (op) != REG)
1793 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1796 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1802 /* Returns 1 if op is an altivec register. */
1805 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1807 return (register_operand (op, mode)
1808 && (GET_CODE (op) != REG
1809 || REGNO (op) > FIRST_PSEUDO_REGISTER
1810 || ALTIVEC_REGNO_P (REGNO (op))));
1814 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1816 if (GET_CODE (op) != REG)
1819 if (XER_REGNO_P (REGNO (op)))
1825 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1826 by such constants completes more quickly. */
1829 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1831 return (GET_CODE (op) == CONST_INT
1832 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1835 /* Return 1 if OP is a constant that can fit in a D field. */
1838 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1840 return (GET_CODE (op) == CONST_INT
1841 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1844 /* Similar for an unsigned D field. */
1847 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1849 return (GET_CODE (op) == CONST_INT
1850 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1853 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1856 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1858 return (GET_CODE (op) == CONST_INT
1859 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1862 /* Returns 1 if OP is a CONST_INT that is a positive value
1863 and an exact power of 2. */
1866 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1868 return (GET_CODE (op) == CONST_INT
1870 && exact_log2 (INTVAL (op)) >= 0);
1873 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1877 gpc_reg_operand (rtx op, enum machine_mode mode)
1879 return (register_operand (op, mode)
1880 && (GET_CODE (op) != REG
1881 || (REGNO (op) >= ARG_POINTER_REGNUM
1882 && !XER_REGNO_P (REGNO (op)))
1883 || REGNO (op) < MQ_REGNO));
1886 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1890 cc_reg_operand (rtx op, enum machine_mode mode)
1892 return (register_operand (op, mode)
1893 && (GET_CODE (op) != REG
1894 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1895 || CR_REGNO_P (REGNO (op))));
1898 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1899 CR field that isn't CR0. */
1902 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1904 return (register_operand (op, mode)
1905 && (GET_CODE (op) != REG
1906 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1907 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1910 /* Returns 1 if OP is either a constant integer valid for a D-field or
1911 a non-special register. If a register, it must be in the proper
1912 mode unless MODE is VOIDmode. */
1915 reg_or_short_operand (rtx op, enum machine_mode mode)
1917 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1920 /* Similar, except check if the negation of the constant would be
1921 valid for a D-field. Don't allow a constant zero, since all the
1922 patterns that call this predicate use "addic r1,r2,-constant" on
1923 a constant value to set a carry when r2 is greater or equal to
1924 "constant". That doesn't work for zero. */
1927 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1929 if (GET_CODE (op) == CONST_INT)
1930 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1932 return gpc_reg_operand (op, mode);
1935 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1936 a non-special register. If a register, it must be in the proper
1937 mode unless MODE is VOIDmode. */
1940 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1942 if (gpc_reg_operand (op, mode))
1944 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1951 /* Return 1 if the operand is either a register or an integer whose
1952 high-order 16 bits are zero. */
1955 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1957 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1960 /* Return 1 is the operand is either a non-special register or ANY
1961 constant integer. */
1964 reg_or_cint_operand (rtx op, enum machine_mode mode)
1966 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1969 /* Return 1 is the operand is either a non-special register or ANY
1970 32-bit signed constant integer. */
1973 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1975 return (gpc_reg_operand (op, mode)
1976 || (GET_CODE (op) == CONST_INT
1977 #if HOST_BITS_PER_WIDE_INT != 32
1978 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1979 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1984 /* Return 1 is the operand is either a non-special register or a 32-bit
1985 signed constant integer valid for 64-bit addition. */
1988 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1990 return (gpc_reg_operand (op, mode)
1991 || (GET_CODE (op) == CONST_INT
1992 #if HOST_BITS_PER_WIDE_INT == 32
1993 && INTVAL (op) < 0x7fff8000
1995 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
2001 /* Return 1 is the operand is either a non-special register or a 32-bit
2002 signed constant integer valid for 64-bit subtraction. */
2005 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
2007 return (gpc_reg_operand (op, mode)
2008 || (GET_CODE (op) == CONST_INT
2009 #if HOST_BITS_PER_WIDE_INT == 32
2010 && (- INTVAL (op)) < 0x7fff8000
2012 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
2018 /* Return 1 is the operand is either a non-special register or ANY
2019 32-bit unsigned constant integer. */
2022 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
2024 if (GET_CODE (op) == CONST_INT)
2026 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
2028 if (GET_MODE_BITSIZE (mode) <= 32)
2031 if (INTVAL (op) < 0)
2035 return ((INTVAL (op) & GET_MODE_MASK (mode)
2036 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
2038 else if (GET_CODE (op) == CONST_DOUBLE)
2040 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2044 return CONST_DOUBLE_HIGH (op) == 0;
2047 return gpc_reg_operand (op, mode);
2050 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
2053 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2055 return (GET_CODE (op) == SYMBOL_REF
2056 || GET_CODE (op) == CONST
2057 || GET_CODE (op) == LABEL_REF);
2060 /* Return 1 if the operand is a simple references that can be loaded via
2061 the GOT (labels involving addition aren't allowed). */
2064 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2066 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
2069 /* Return the number of instructions it takes to form a constant in an
2070 integer register. */
2073 num_insns_constant_wide (HOST_WIDE_INT value)
2075 /* signed constant loadable with {cal|addi} */
2076 if (CONST_OK_FOR_LETTER_P (value, 'I'))
2079 /* constant loadable with {cau|addis} */
2080 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
2083 #if HOST_BITS_PER_WIDE_INT == 64
2084 else if (TARGET_POWERPC64)
2086 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2087 HOST_WIDE_INT high = value >> 31;
2089 if (high == 0 || high == -1)
2095 return num_insns_constant_wide (high) + 1;
2097 return (num_insns_constant_wide (high)
2098 + num_insns_constant_wide (low) + 1);
2107 num_insns_constant (rtx op, enum machine_mode mode)
2109 if (GET_CODE (op) == CONST_INT)
2111 #if HOST_BITS_PER_WIDE_INT == 64
2112 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2113 && mask64_operand (op, mode))
2117 return num_insns_constant_wide (INTVAL (op));
2120 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
2125 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2126 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2127 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2130 else if (GET_CODE (op) == CONST_DOUBLE)
2136 int endian = (WORDS_BIG_ENDIAN == 0);
2138 if (mode == VOIDmode || mode == DImode)
2140 high = CONST_DOUBLE_HIGH (op);
2141 low = CONST_DOUBLE_LOW (op);
2145 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2146 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2148 low = l[1 - endian];
2152 return (num_insns_constant_wide (low)
2153 + num_insns_constant_wide (high));
2157 if (high == 0 && low >= 0)
2158 return num_insns_constant_wide (low);
2160 else if (high == -1 && low < 0)
2161 return num_insns_constant_wide (low);
2163 else if (mask64_operand (op, mode))
2167 return num_insns_constant_wide (high) + 1;
2170 return (num_insns_constant_wide (high)
2171 + num_insns_constant_wide (low) + 1);
2179 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2180 register with one instruction per word. We only do this if we can
2181 safely read CONST_DOUBLE_{LOW,HIGH}. */
2184 easy_fp_constant (rtx op, enum machine_mode mode)
2186 if (GET_CODE (op) != CONST_DOUBLE
2187 || GET_MODE (op) != mode
2188 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
2191 /* Consider all constants with -msoft-float to be easy. */
2192 if ((TARGET_SOFT_FLOAT || TARGET_E500_SINGLE)
2196 /* If we are using V.4 style PIC, consider all constants to be hard. */
2197 if (flag_pic && DEFAULT_ABI == ABI_V4)
2200 #ifdef TARGET_RELOCATABLE
2201 /* Similarly if we are using -mrelocatable, consider all constants
2203 if (TARGET_RELOCATABLE)
2212 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2213 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
2215 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2216 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
2217 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
2218 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
2221 else if (mode == DFmode)
2226 if (TARGET_E500_DOUBLE)
2229 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2230 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
2232 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2233 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
2236 else if (mode == SFmode)
2241 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2242 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2244 return num_insns_constant_wide (l) == 1;
2247 else if (mode == DImode)
2248 return ((TARGET_POWERPC64
2249 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
2250 || (num_insns_constant (op, DImode) <= 2));
2252 else if (mode == SImode)
2258 /* Returns the constant for the splat instruction, if exists. */
2261 easy_vector_splat_const (int cst, enum machine_mode mode)
2266 if (EASY_VECTOR_15 (cst)
2267 || EASY_VECTOR_15_ADD_SELF (cst))
2269 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2275 if (EASY_VECTOR_15 (cst)
2276 || EASY_VECTOR_15_ADD_SELF (cst))
2278 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2284 if (EASY_VECTOR_15 (cst)
2285 || EASY_VECTOR_15_ADD_SELF (cst))
2294 /* Return nonzero if all elements of a vector have the same value. */
2297 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2301 units = CONST_VECTOR_NUNITS (op);
2303 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2304 for (i = 1; i < units; ++i)
2305 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2307 if (i == units && easy_vector_splat_const (cst, mode))
2312 /* Return 1 if the operand is a CONST_INT and can be put into a
2313 register without using memory. */
2316 easy_vector_constant (rtx op, enum machine_mode mode)
2320 if (GET_CODE (op) != CONST_VECTOR
2325 if (zero_constant (op, mode)
2326 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
2327 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
2330 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2333 if (TARGET_SPE && mode == V1DImode)
2336 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2337 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
2339 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2341 evmergelo r0, r0, r0
2344 I don't know how efficient it would be to allow bigger constants,
2345 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2346 instructions is better than a 64-bit memory load, but I don't
2347 have the e500 timing specs. */
2348 if (TARGET_SPE && mode == V2SImode
2349 && cst >= -0x7fff && cst <= 0x7fff
2350 && cst2 >= -0x7fff && cst2 <= 0x7fff)
2354 && easy_vector_same (op, mode))
2356 cst = easy_vector_splat_const (cst, mode);
2357 if (EASY_VECTOR_15_ADD_SELF (cst)
2358 || EASY_VECTOR_15 (cst))
2364 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2367 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
2371 && GET_CODE (op) == CONST_VECTOR
2372 && easy_vector_same (op, mode))
2374 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
2375 if (EASY_VECTOR_15_ADD_SELF (cst))
2381 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2384 gen_easy_vector_constant_add_self (rtx op)
2388 units = GET_MODE_NUNITS (GET_MODE (op));
2389 v = rtvec_alloc (units);
2391 for (i = 0; i < units; i++)
2393 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2394 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2398 output_vec_const_move (rtx *operands)
2401 enum machine_mode mode;
2407 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2408 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2409 mode = GET_MODE (dest);
2413 if (zero_constant (vec, mode))
2414 return "vxor %0,%0,%0";
2415 else if (easy_vector_constant (vec, mode))
2417 operands[1] = GEN_INT (cst);
2421 if (EASY_VECTOR_15 (cst))
2423 operands[1] = GEN_INT (cst);
2424 return "vspltisw %0,%1";
2426 else if (EASY_VECTOR_15_ADD_SELF (cst))
2432 if (EASY_VECTOR_15 (cst))
2434 operands[1] = GEN_INT (cst);
2435 return "vspltish %0,%1";
2437 else if (EASY_VECTOR_15_ADD_SELF (cst))
2443 if (EASY_VECTOR_15 (cst))
2445 operands[1] = GEN_INT (cst);
2446 return "vspltisb %0,%1";
2448 else if (EASY_VECTOR_15_ADD_SELF (cst))
2461 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2462 pattern of V1DI, V4HI, and V2SF.
2464 FIXME: We should probably return # and add post reload
2465 splitters for these, but this way is so easy ;-). */
2466 operands[1] = GEN_INT (cst);
2467 operands[2] = GEN_INT (cst2);
2469 return "li %0,%1\n\tevmergelo %0,%0,%0";
2471 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2477 /* Return 1 if the operand is the constant 0. This works for scalars
2478 as well as vectors. */
2480 zero_constant (rtx op, enum machine_mode mode)
2482 return op == CONST0_RTX (mode);
2485 /* Return 1 if the operand is 0.0. */
2487 zero_fp_constant (rtx op, enum machine_mode mode)
2489 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
2492 /* Return 1 if the operand is in volatile memory. Note that during
2493 the RTL generation phase, memory_operand does not return TRUE for
2494 volatile memory references. So this function allows us to
2495 recognize volatile references where its safe. */
2498 volatile_mem_operand (rtx op, enum machine_mode mode)
2500 if (GET_CODE (op) != MEM)
2503 if (!MEM_VOLATILE_P (op))
2506 if (mode != GET_MODE (op))
2509 if (reload_completed)
2510 return memory_operand (op, mode);
2512 if (reload_in_progress)
2513 return strict_memory_address_p (mode, XEXP (op, 0));
2515 return memory_address_p (mode, XEXP (op, 0));
2518 /* Return 1 if the operand is an offsettable memory operand. */
2521 offsettable_mem_operand (rtx op, enum machine_mode mode)
2523 return ((GET_CODE (op) == MEM)
2524 && offsettable_address_p (reload_completed || reload_in_progress,
2525 mode, XEXP (op, 0)));
2528 /* Return 1 if the operand is either an easy FP constant (see above) or
2532 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2534 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2537 /* Return 1 if the operand is either a non-special register or an item
2538 that can be used as the operand of a `mode' add insn. */
2541 add_operand (rtx op, enum machine_mode mode)
2543 if (GET_CODE (op) == CONST_INT)
2544 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2545 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2547 return gpc_reg_operand (op, mode);
2550 /* Return 1 if OP is a constant but not a valid add_operand. */
2553 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2555 return (GET_CODE (op) == CONST_INT
2556 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2557 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2560 /* Return 1 if the operand is a non-special register or a constant that
2561 can be used as the operand of an OR or XOR insn on the RS/6000. */
2564 logical_operand (rtx op, enum machine_mode mode)
2566 HOST_WIDE_INT opl, oph;
2568 if (gpc_reg_operand (op, mode))
2571 if (GET_CODE (op) == CONST_INT)
2573 opl = INTVAL (op) & GET_MODE_MASK (mode);
2575 #if HOST_BITS_PER_WIDE_INT <= 32
2576 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2580 else if (GET_CODE (op) == CONST_DOUBLE)
2582 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2585 opl = CONST_DOUBLE_LOW (op);
2586 oph = CONST_DOUBLE_HIGH (op);
2593 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2594 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2597 /* Return 1 if C is a constant that is not a logical operand (as
2598 above), but could be split into one. */
2601 non_logical_cint_operand (rtx op, enum machine_mode mode)
2603 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2604 && ! logical_operand (op, mode)
2605 && reg_or_logical_cint_operand (op, mode));
2608 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2609 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2610 Reject all ones and all zeros, since these should have been optimized
2611 away and confuse the making of MB and ME. */
2614 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2616 HOST_WIDE_INT c, lsb;
2618 if (GET_CODE (op) != CONST_INT)
2623 /* Fail in 64-bit mode if the mask wraps around because the upper
2624 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2625 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2628 /* We don't change the number of transitions by inverting,
2629 so make sure we start with the LS bit zero. */
2633 /* Reject all zeros or all ones. */
2637 /* Find the first transition. */
2640 /* Invert to look for a second transition. */
2643 /* Erase first transition. */
2646 /* Find the second transition (if any). */
2649 /* Match if all the bits above are 1's (or c is zero). */
2653 /* Return 1 for the PowerPC64 rlwinm corner case. */
2656 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2658 HOST_WIDE_INT c, lsb;
2660 if (GET_CODE (op) != CONST_INT)
2665 if ((c & 0x80000001) != 0x80000001)
2679 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2680 It is if there are no more than one 1->0 or 0->1 transitions.
2681 Reject all zeros, since zero should have been optimized away and
2682 confuses the making of MB and ME. */
2685 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2687 if (GET_CODE (op) == CONST_INT)
2689 HOST_WIDE_INT c, lsb;
2693 /* Reject all zeros. */
2697 /* We don't change the number of transitions by inverting,
2698 so make sure we start with the LS bit zero. */
2702 /* Find the transition, and check that all bits above are 1's. */
2705 /* Match if all the bits above are 1's (or c is zero). */
2711 /* Like mask64_operand, but allow up to three transitions. This
2712 predicate is used by insn patterns that generate two rldicl or
2713 rldicr machine insns. */
2716 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2718 if (GET_CODE (op) == CONST_INT)
2720 HOST_WIDE_INT c, lsb;
2724 /* Disallow all zeros. */
2728 /* We don't change the number of transitions by inverting,
2729 so make sure we start with the LS bit zero. */
2733 /* Find the first transition. */
2736 /* Invert to look for a second transition. */
2739 /* Erase first transition. */
2742 /* Find the second transition. */
2745 /* Invert to look for a third transition. */
2748 /* Erase second transition. */
2751 /* Find the third transition (if any). */
2754 /* Match if all the bits above are 1's (or c is zero). */
2760 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2761 implement ANDing by the mask IN. */
2763 build_mask64_2_operands (rtx in, rtx *out)
2765 #if HOST_BITS_PER_WIDE_INT >= 64
2766 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2769 if (GET_CODE (in) != CONST_INT)
2775 /* Assume c initially something like 0x00fff000000fffff. The idea
2776 is to rotate the word so that the middle ^^^^^^ group of zeros
2777 is at the MS end and can be cleared with an rldicl mask. We then
2778 rotate back and clear off the MS ^^ group of zeros with a
2780 c = ~c; /* c == 0xff000ffffff00000 */
2781 lsb = c & -c; /* lsb == 0x0000000000100000 */
2782 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2783 c = ~c; /* c == 0x00fff000000fffff */
2784 c &= -lsb; /* c == 0x00fff00000000000 */
2785 lsb = c & -c; /* lsb == 0x0000100000000000 */
2786 c = ~c; /* c == 0xff000fffffffffff */
2787 c &= -lsb; /* c == 0xff00000000000000 */
2789 while ((lsb >>= 1) != 0)
2790 shift++; /* shift == 44 on exit from loop */
2791 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2792 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2793 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2797 /* Assume c initially something like 0xff000f0000000000. The idea
2798 is to rotate the word so that the ^^^ middle group of zeros
2799 is at the LS end and can be cleared with an rldicr mask. We then
2800 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2802 lsb = c & -c; /* lsb == 0x0000010000000000 */
2803 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2804 c = ~c; /* c == 0x00fff0ffffffffff */
2805 c &= -lsb; /* c == 0x00fff00000000000 */
2806 lsb = c & -c; /* lsb == 0x0000100000000000 */
2807 c = ~c; /* c == 0xff000fffffffffff */
2808 c &= -lsb; /* c == 0xff00000000000000 */
2810 while ((lsb >>= 1) != 0)
2811 shift++; /* shift == 44 on exit from loop */
2812 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2813 m1 >>= shift; /* m1 == 0x0000000000000fff */
2814 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2817 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2818 masks will be all 1's. We are guaranteed more than one transition. */
2819 out[0] = GEN_INT (64 - shift);
2820 out[1] = GEN_INT (m1);
2821 out[2] = GEN_INT (shift);
2822 out[3] = GEN_INT (m2);
2830 /* Return 1 if the operand is either a non-special register or a constant
2831 that can be used as the operand of a PowerPC64 logical AND insn. */
2834 and64_operand (rtx op, enum machine_mode mode)
2836 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2837 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2839 return (logical_operand (op, mode) || mask64_operand (op, mode));
2842 /* Like the above, but also match constants that can be implemented
2843 with two rldicl or rldicr insns. */
2846 and64_2_operand (rtx op, enum machine_mode mode)
2848 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2849 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2851 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2854 /* Return 1 if the operand is either a non-special register or a
2855 constant that can be used as the operand of an RS/6000 logical AND insn. */
2858 and_operand (rtx op, enum machine_mode mode)
2860 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2861 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2863 return (logical_operand (op, mode) || mask_operand (op, mode));
2866 /* Return 1 if the operand is a general register or memory operand. */
2869 reg_or_mem_operand (rtx op, enum machine_mode mode)
2871 return (gpc_reg_operand (op, mode)
2872 || memory_operand (op, mode)
2873 || macho_lo_sum_memory_operand (op, mode)
2874 || volatile_mem_operand (op, mode));
2877 /* Return 1 if the operand is a general register or memory operand without
2878 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2882 lwa_operand (rtx op, enum machine_mode mode)
2886 if (reload_completed && GET_CODE (inner) == SUBREG)
2887 inner = SUBREG_REG (inner);
2889 return gpc_reg_operand (inner, mode)
2890 || (memory_operand (inner, mode)
2891 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2892 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2893 && (GET_CODE (XEXP (inner, 0)) != PLUS
2894 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2895 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2898 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2901 symbol_ref_operand (rtx op, enum machine_mode mode)
2903 if (mode != VOIDmode && GET_MODE (op) != mode)
2906 return (GET_CODE (op) == SYMBOL_REF
2907 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2910 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2911 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2914 call_operand (rtx op, enum machine_mode mode)
2916 if (mode != VOIDmode && GET_MODE (op) != mode)
2919 return (GET_CODE (op) == SYMBOL_REF
2920 || (GET_CODE (op) == REG
2921 && (REGNO (op) == LINK_REGISTER_REGNUM
2922 || REGNO (op) == COUNT_REGISTER_REGNUM
2923 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2926 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2930 current_file_function_operand (rtx op,
2931 enum machine_mode mode ATTRIBUTE_UNUSED)
2933 return (GET_CODE (op) == SYMBOL_REF
2934 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2935 && (SYMBOL_REF_LOCAL_P (op)
2936 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2939 /* Return 1 if this operand is a valid input for a move insn. */
2942 input_operand (rtx op, enum machine_mode mode)
2944 /* Memory is always valid. */
2945 if (memory_operand (op, mode))
2948 /* For floating-point, easy constants are valid. */
2949 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2951 && easy_fp_constant (op, mode))
2954 /* Allow any integer constant. */
2955 if (GET_MODE_CLASS (mode) == MODE_INT
2956 && (GET_CODE (op) == CONST_INT
2957 || GET_CODE (op) == CONST_DOUBLE))
2960 /* Allow easy vector constants. */
2961 if (GET_CODE (op) == CONST_VECTOR
2962 && easy_vector_constant (op, mode))
2965 /* For floating-point or multi-word mode, the only remaining valid type
2967 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2968 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2969 return register_operand (op, mode);
2971 /* The only cases left are integral modes one word or smaller (we
2972 do not get called for MODE_CC values). These can be in any
2974 if (register_operand (op, mode))
2977 /* A SYMBOL_REF referring to the TOC is valid. */
2978 if (legitimate_constant_pool_address_p (op))
2981 /* A constant pool expression (relative to the TOC) is valid */
2982 if (toc_relative_expr_p (op))
2985 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2987 if (DEFAULT_ABI == ABI_V4
2988 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2989 && small_data_operand (op, Pmode))
2996 /* Darwin, AIX increases natural record alignment to doubleword if the first
2997 field is an FP double while the FP fields remain word aligned. */
3000 rs6000_special_round_type_align (tree type, int computed, int specified)
3002 tree field = TYPE_FIELDS (type);
3004 /* Skip all the static variables only if ABI is greater than
3006 while (field != NULL && TREE_CODE (field) == VAR_DECL)
3007 field = TREE_CHAIN (field);
3009 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
3010 return MAX (computed, specified);
3012 return MAX (MAX (computed, specified), 64);
3015 /* Return 1 for an operand in small memory on V.4/eabi. */
3018 small_data_operand (rtx op ATTRIBUTE_UNUSED,
3019 enum machine_mode mode ATTRIBUTE_UNUSED)
3024 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
3027 if (DEFAULT_ABI != ABI_V4)
3030 if (GET_CODE (op) == SYMBOL_REF)
3033 else if (GET_CODE (op) != CONST
3034 || GET_CODE (XEXP (op, 0)) != PLUS
3035 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3036 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
3041 rtx sum = XEXP (op, 0);
3042 HOST_WIDE_INT summand;
3044 /* We have to be careful here, because it is the referenced address
3045 that must be 32k from _SDA_BASE_, not just the symbol. */
3046 summand = INTVAL (XEXP (sum, 1));
3047 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
3050 sym_ref = XEXP (sum, 0);
3053 return SYMBOL_REF_SMALL_P (sym_ref);
3059 /* Return true, if operand is a memory operand and has a
3060 displacement divisible by 4. */
3063 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3068 if (!memory_operand (op, mode))
3071 addr = XEXP (op, 0);
3072 if (GET_CODE (addr) == PLUS
3073 && GET_CODE (XEXP (addr, 0)) == REG
3074 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3075 off = INTVAL (XEXP (addr, 1));
3077 return (off % 4) == 0;
3080 /* Return true if either operand is a general purpose register. */
3083 gpr_or_gpr_p (rtx op0, rtx op1)
3085 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3086 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
3090 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3093 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
3095 switch (GET_CODE(op))
3098 if (RS6000_SYMBOL_REF_TLS_P (op))
3100 else if (CONSTANT_POOL_ADDRESS_P (op))
3102 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3110 else if (! strcmp (XSTR (op, 0), toc_label_name))
3119 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3120 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
3122 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
3131 constant_pool_expr_p (rtx op)
3135 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3139 toc_relative_expr_p (rtx op)
3143 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3147 legitimate_constant_pool_address_p (rtx x)
3150 && GET_CODE (x) == PLUS
3151 && GET_CODE (XEXP (x, 0)) == REG
3152 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3153 && constant_pool_expr_p (XEXP (x, 1)));
3157 legitimate_small_data_p (enum machine_mode mode, rtx x)
3159 return (DEFAULT_ABI == ABI_V4
3160 && !flag_pic && !TARGET_TOC
3161 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3162 && small_data_operand (x, mode));
3165 /* SPE offset addressing is limited to 5-bits worth of double words. */
3166 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3169 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3171 unsigned HOST_WIDE_INT offset, extra;
3173 if (GET_CODE (x) != PLUS)
3175 if (GET_CODE (XEXP (x, 0)) != REG)
3177 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3179 if (legitimate_constant_pool_address_p (x))
3181 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3184 offset = INTVAL (XEXP (x, 1));
3192 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3193 which leaves the only valid constant offset of zero, which by
3194 canonicalization rules is also invalid. */
3201 /* SPE vector modes. */
3202 return SPE_CONST_OFFSET_OK (offset);
3205 if (TARGET_E500_DOUBLE)
3206 return SPE_CONST_OFFSET_OK (offset);
3209 if (mode == DFmode || !TARGET_POWERPC64)
3211 else if (offset & 3)
3217 if (mode == TFmode || !TARGET_POWERPC64)
3219 else if (offset & 3)
3230 return (offset < 0x10000) && (offset + extra < 0x10000);
3234 legitimate_indexed_address_p (rtx x, int strict)
3238 if (GET_CODE (x) != PLUS)
3244 if (!REG_P (op0) || !REG_P (op1))
3247 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
3248 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3249 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3250 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
3254 legitimate_indirect_address_p (rtx x, int strict)
3256 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3260 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3262 if (!TARGET_MACHO || !flag_pic
3263 || mode != SImode || GET_CODE(x) != MEM)
3267 if (GET_CODE (x) != LO_SUM)
3269 if (GET_CODE (XEXP (x, 0)) != REG)
3271 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3275 return CONSTANT_P (x);
3279 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3281 if (GET_CODE (x) != LO_SUM)
3283 if (GET_CODE (XEXP (x, 0)) != REG)
3285 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3287 if (TARGET_E500_DOUBLE && mode == DFmode)
3291 if (TARGET_ELF || TARGET_MACHO)
3293 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3297 if (GET_MODE_NUNITS (mode) != 1)
3299 if (GET_MODE_BITSIZE (mode) > 64)
3302 return CONSTANT_P (x);
3309 /* Try machine-dependent ways of modifying an illegitimate address
3310 to be legitimate. If we find one, return the new, valid address.
3311 This is used from only one place: `memory_address' in explow.c.
3313 OLDX is the address as it was before break_out_memory_refs was
3314 called. In some cases it is useful to look at this to decide what
3317 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3319 It is always safe for this function to do nothing. It exists to
3320 recognize opportunities to optimize the output.
3322 On RS/6000, first check for the sum of a register with a constant
3323 integer that is out of range. If so, generate code to add the
3324 constant with the low-order 16 bits masked to the register and force
3325 this result into another register (this can be done with `cau').
3326 Then generate an address of REG+(CONST&0xffff), allowing for the
3327 possibility of bit 16 being a one.
3329 Then check for the sum of a register and something not constant, try to
3330 load the other things into a register and return the sum. */
3333 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3334 enum machine_mode mode)
3336 if (GET_CODE (x) == SYMBOL_REF)
3338 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3340 return rs6000_legitimize_tls_address (x, model);
3343 if (GET_CODE (x) == PLUS
3344 && GET_CODE (XEXP (x, 0)) == REG
3345 && GET_CODE (XEXP (x, 1)) == CONST_INT
3346 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
3348 HOST_WIDE_INT high_int, low_int;
3350 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3351 high_int = INTVAL (XEXP (x, 1)) - low_int;
3352 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3353 GEN_INT (high_int)), 0);
3354 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3356 else if (GET_CODE (x) == PLUS
3357 && GET_CODE (XEXP (x, 0)) == REG
3358 && GET_CODE (XEXP (x, 1)) != CONST_INT
3359 && GET_MODE_NUNITS (mode) == 1
3360 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3362 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3363 && (TARGET_POWERPC64 || mode != DImode)
3366 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3367 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3369 else if (ALTIVEC_VECTOR_MODE (mode))
3373 /* Make sure both operands are registers. */
3374 if (GET_CODE (x) == PLUS)
3375 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3376 force_reg (Pmode, XEXP (x, 1)));
3378 reg = force_reg (Pmode, x);
3381 else if (SPE_VECTOR_MODE (mode)
3382 || (TARGET_E500_DOUBLE && mode == DFmode))
3384 /* We accept [reg + reg] and [reg + OFFSET]. */
3386 if (GET_CODE (x) == PLUS)
3388 rtx op1 = XEXP (x, 0);
3389 rtx op2 = XEXP (x, 1);
3391 op1 = force_reg (Pmode, op1);
3393 if (GET_CODE (op2) != REG
3394 && (GET_CODE (op2) != CONST_INT
3395 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3396 op2 = force_reg (Pmode, op2);
3398 return gen_rtx_PLUS (Pmode, op1, op2);
3401 return force_reg (Pmode, x);
3407 && GET_CODE (x) != CONST_INT
3408 && GET_CODE (x) != CONST_DOUBLE
3410 && GET_MODE_NUNITS (mode) == 1
3411 && (GET_MODE_BITSIZE (mode) <= 32
3412 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3414 rtx reg = gen_reg_rtx (Pmode);
3415 emit_insn (gen_elf_high (reg, x));
3416 return gen_rtx_LO_SUM (Pmode, reg, x);
3418 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3421 && ! MACHO_DYNAMIC_NO_PIC_P
3423 && GET_CODE (x) != CONST_INT
3424 && GET_CODE (x) != CONST_DOUBLE
3426 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3430 rtx reg = gen_reg_rtx (Pmode);
3431 emit_insn (gen_macho_high (reg, x));
3432 return gen_rtx_LO_SUM (Pmode, reg, x);
3435 && constant_pool_expr_p (x)
3436 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3438 return create_TOC_reference (x);
3444 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3445 We need to emit DTP-relative relocations. */
3448 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3453 fputs ("\t.long\t", file);
3456 fputs (DOUBLE_INT_ASM_OP, file);
3461 output_addr_const (file, x);
3462 fputs ("@dtprel+0x8000", file);
3465 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3467 static GTY(()) rtx rs6000_tls_symbol;
3469 rs6000_tls_get_addr (void)
3471 if (!rs6000_tls_symbol)
3472 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3474 return rs6000_tls_symbol;
3477 /* Construct the SYMBOL_REF for TLS GOT references. */
3479 static GTY(()) rtx rs6000_got_symbol;
3481 rs6000_got_sym (void)
3483 if (!rs6000_got_symbol)
3485 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3486 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3487 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3490 return rs6000_got_symbol;
3493 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3494 this (thread-local) address. */
3497 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3501 dest = gen_reg_rtx (Pmode);
3502 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3508 tlsreg = gen_rtx_REG (Pmode, 13);
3509 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3513 tlsreg = gen_rtx_REG (Pmode, 2);
3514 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3518 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3522 tmp = gen_reg_rtx (Pmode);
3525 tlsreg = gen_rtx_REG (Pmode, 13);
3526 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3530 tlsreg = gen_rtx_REG (Pmode, 2);
3531 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3535 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3537 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3542 rtx r3, got, tga, tmp1, tmp2, eqv;
3545 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3549 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3552 rtx gsym = rs6000_got_sym ();
3553 got = gen_reg_rtx (Pmode);
3555 rs6000_emit_move (got, gsym, Pmode);
3559 static int tls_got_labelno = 0;
3560 rtx tempLR, lab, tmp3, mem;
3563 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3564 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3565 tempLR = gen_reg_rtx (Pmode);
3566 tmp1 = gen_reg_rtx (Pmode);
3567 tmp2 = gen_reg_rtx (Pmode);
3568 tmp3 = gen_reg_rtx (Pmode);
3569 mem = gen_const_mem (Pmode, tmp1);
3571 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3573 emit_move_insn (tmp1, tempLR);
3574 emit_move_insn (tmp2, mem);
3575 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3576 last = emit_move_insn (got, tmp3);
3577 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3579 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3581 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3587 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3589 r3 = gen_rtx_REG (Pmode, 3);
3591 insn = gen_tls_gd_64 (r3, got, addr);
3593 insn = gen_tls_gd_32 (r3, got, addr);
3596 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3597 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3598 insn = emit_call_insn (insn);
3599 CONST_OR_PURE_CALL_P (insn) = 1;
3600 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3601 insn = get_insns ();
3603 emit_libcall_block (insn, dest, r3, addr);
3605 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3607 r3 = gen_rtx_REG (Pmode, 3);
3609 insn = gen_tls_ld_64 (r3, got);
3611 insn = gen_tls_ld_32 (r3, got);
3614 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3615 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3616 insn = emit_call_insn (insn);
3617 CONST_OR_PURE_CALL_P (insn) = 1;
3618 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3619 insn = get_insns ();
3621 tmp1 = gen_reg_rtx (Pmode);
3622 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3624 emit_libcall_block (insn, tmp1, r3, eqv);
3625 if (rs6000_tls_size == 16)
3628 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3630 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3632 else if (rs6000_tls_size == 32)
3634 tmp2 = gen_reg_rtx (Pmode);
3636 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3638 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3641 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3643 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3647 tmp2 = gen_reg_rtx (Pmode);
3649 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3651 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3653 insn = gen_rtx_SET (Pmode, dest,
3654 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3660 /* IE, or 64 bit offset LE. */
3661 tmp2 = gen_reg_rtx (Pmode);
3663 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3665 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3668 insn = gen_tls_tls_64 (dest, tmp2, addr);
3670 insn = gen_tls_tls_32 (dest, tmp2, addr);
3678 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3679 instruction definitions. */
3682 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3684 return RS6000_SYMBOL_REF_TLS_P (x);
3687 /* Return 1 if X contains a thread-local symbol. */
3690 rs6000_tls_referenced_p (rtx x)
3692 if (! TARGET_HAVE_TLS)
3695 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3698 /* Return 1 if *X is a thread-local symbol. This is the same as
3699 rs6000_tls_symbol_ref except for the type of the unused argument. */
3702 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3704 return RS6000_SYMBOL_REF_TLS_P (*x);
3707 /* The convention appears to be to define this wherever it is used.
3708 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3709 is now used here. */
3710 #ifndef REG_MODE_OK_FOR_BASE_P
3711 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3714 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3715 replace the input X, or the original X if no replacement is called for.
3716 The output parameter *WIN is 1 if the calling macro should goto WIN,
3719 For RS/6000, we wish to handle large displacements off a base
3720 register by splitting the addend across an addiu/addis and the mem insn.
3721 This cuts number of extra insns needed from 3 to 1.
3723 On Darwin, we use this to generate code for floating point constants.
3724 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3725 The Darwin code is inside #if TARGET_MACHO because only then is
3726 machopic_function_base_name() defined. */
3728 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3729 int opnum, int type,
3730 int ind_levels ATTRIBUTE_UNUSED, int *win)
3732 /* We must recognize output that we have already generated ourselves. */
3733 if (GET_CODE (x) == PLUS
3734 && GET_CODE (XEXP (x, 0)) == PLUS
3735 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3736 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3737 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3739 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3740 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3741 opnum, (enum reload_type)type);
3747 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3748 && GET_CODE (x) == LO_SUM
3749 && GET_CODE (XEXP (x, 0)) == PLUS
3750 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3751 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3752 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3753 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3754 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3755 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3756 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3758 /* Result of previous invocation of this function on Darwin
3759 floating point constant. */
3760 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3761 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3762 opnum, (enum reload_type)type);
3767 if (GET_CODE (x) == PLUS
3768 && GET_CODE (XEXP (x, 0)) == REG
3769 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3770 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3771 && GET_CODE (XEXP (x, 1)) == CONST_INT
3772 && !SPE_VECTOR_MODE (mode)
3773 && !(TARGET_E500_DOUBLE && mode == DFmode)
3774 && !ALTIVEC_VECTOR_MODE (mode))
3776 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3777 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3779 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3781 /* Check for 32-bit overflow. */
3782 if (high + low != val)
3788 /* Reload the high part into a base reg; leave the low part
3789 in the mem directly. */
3791 x = gen_rtx_PLUS (GET_MODE (x),
3792 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3796 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3797 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3798 opnum, (enum reload_type)type);
3803 if (GET_CODE (x) == SYMBOL_REF
3804 && DEFAULT_ABI == ABI_DARWIN
3805 && !ALTIVEC_VECTOR_MODE (mode)
3806 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3807 /* Don't do this for TFmode, since the result isn't offsettable. */
3812 rtx offset = gen_rtx_CONST (Pmode,
3813 gen_rtx_MINUS (Pmode, x,
3814 machopic_function_base_sym ()));
3815 x = gen_rtx_LO_SUM (GET_MODE (x),
3816 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3817 gen_rtx_HIGH (Pmode, offset)), offset);
3820 x = gen_rtx_LO_SUM (GET_MODE (x),
3821 gen_rtx_HIGH (Pmode, x), x);
3823 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3824 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3825 opnum, (enum reload_type)type);
3831 && constant_pool_expr_p (x)
3832 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3834 (x) = create_TOC_reference (x);
3842 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3843 that is a valid memory address for an instruction.
3844 The MODE argument is the machine mode for the MEM expression
3845 that wants to use this address.
3847 On the RS/6000, there are four valid address: a SYMBOL_REF that
3848 refers to a constant pool entry of an address (or the sum of it
3849 plus a constant), a short (16-bit signed) constant plus a register,
3850 the sum of two registers, or a register indirect, possibly with an
3851 auto-increment. For DFmode and DImode with a constant plus register,
3852 we must ensure that both words are addressable or PowerPC64 with offset
3855 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3856 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3857 adjacent memory cells are accessed by adding word-sized offsets
3858 during assembly output. */
3860 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3862 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3864 && ALTIVEC_VECTOR_MODE (mode)
3865 && GET_CODE (x) == AND
3866 && GET_CODE (XEXP (x, 1)) == CONST_INT
3867 && INTVAL (XEXP (x, 1)) == -16)
3870 if (RS6000_SYMBOL_REF_TLS_P (x))
3872 if (legitimate_indirect_address_p (x, reg_ok_strict))
3874 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3875 && !ALTIVEC_VECTOR_MODE (mode)
3876 && !SPE_VECTOR_MODE (mode)
3877 && !(TARGET_E500_DOUBLE && mode == DFmode)
3879 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3881 if (legitimate_small_data_p (mode, x))
3883 if (legitimate_constant_pool_address_p (x))
3885 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3887 && GET_CODE (x) == PLUS
3888 && GET_CODE (XEXP (x, 0)) == REG
3889 && (XEXP (x, 0) == virtual_stack_vars_rtx
3890 || XEXP (x, 0) == arg_pointer_rtx)
3891 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3893 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3897 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3899 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3900 && (TARGET_POWERPC64 || mode != DImode)
3901 && legitimate_indexed_address_p (x, reg_ok_strict))
3903 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3908 /* Go to LABEL if ADDR (a legitimate address expression)
3909 has an effect that depends on the machine mode it is used for.
3911 On the RS/6000 this is true of all integral offsets (since AltiVec
3912 modes don't allow them) or is a pre-increment or decrement.
3914 ??? Except that due to conceptual problems in offsettable_address_p
3915 we can't really report the problems of integral offsets. So leave
3916 this assuming that the adjustable offset must be valid for the
3917 sub-words of a TFmode operand, which is what we had before. */
3920 rs6000_mode_dependent_address (rtx addr)
3922 switch (GET_CODE (addr))
3925 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3927 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3928 return val + 12 + 0x8000 >= 0x10000;
3937 return TARGET_UPDATE;
3946 /* Return number of consecutive hard regs needed starting at reg REGNO
3947 to hold something of mode MODE.
3948 This is ordinarily the length in words of a value of mode MODE
3949 but can be less for certain modes in special long registers.
3951 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3952 scalar instructions. The upper 32 bits are only available to the
3955 POWER and PowerPC GPRs hold 32 bits worth;
3956 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3959 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3961 if (FP_REGNO_P (regno))
3962 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3964 if (TARGET_E500_DOUBLE && mode == DFmode)
3967 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3968 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3970 if (ALTIVEC_REGNO_P (regno))
3972 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3974 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3977 /* Change register usage conditional on target flags. */
3979 rs6000_conditional_register_usage (void)
3983 /* Set MQ register fixed (already call_used) if not POWER
3984 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3989 /* 64-bit AIX reserves GPR13 for thread-private data. */
3991 fixed_regs[13] = call_used_regs[13]
3992 = call_really_used_regs[13] = 1;
3994 /* Conditionally disable FPRs. */
3995 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3996 for (i = 32; i < 64; i++)
3997 fixed_regs[i] = call_used_regs[i]
3998 = call_really_used_regs[i] = 1;
4000 if (DEFAULT_ABI == ABI_V4
4001 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4003 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4005 if (DEFAULT_ABI == ABI_V4
4006 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4008 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4009 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4010 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4012 if (DEFAULT_ABI == ABI_DARWIN
4013 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4014 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4015 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4016 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4017 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4019 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4020 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4021 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4024 global_regs[VSCR_REGNO] = 1;
4028 global_regs[SPEFSCR_REGNO] = 1;
4029 fixed_regs[FIXED_SCRATCH]
4030 = call_used_regs[FIXED_SCRATCH]
4031 = call_really_used_regs[FIXED_SCRATCH] = 1;
4034 if (! TARGET_ALTIVEC)
4036 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4037 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4038 call_really_used_regs[VRSAVE_REGNO] = 1;
4041 if (TARGET_ALTIVEC_ABI)
4042 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4043 call_used_regs[i] = call_really_used_regs[i] = 1;
4046 /* Try to output insns to set TARGET equal to the constant C if it can
4047 be done in less than N insns. Do all computations in MODE.
4048 Returns the place where the output has been placed if it can be
4049 done and the insns have been emitted. If it would take more than N
4050 insns, zero is returned and no insns and emitted. */
4053 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
4054 rtx source, int n ATTRIBUTE_UNUSED)
4056 rtx result, insn, set;
4057 HOST_WIDE_INT c0, c1;
4059 if (mode == QImode || mode == HImode)
4062 dest = gen_reg_rtx (mode);
4063 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4066 else if (mode == SImode)
4068 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
4070 emit_insn (gen_rtx_SET (VOIDmode, result,
4071 GEN_INT (INTVAL (source)
4072 & (~ (HOST_WIDE_INT) 0xffff))));
4073 emit_insn (gen_rtx_SET (VOIDmode, dest,
4074 gen_rtx_IOR (SImode, result,
4075 GEN_INT (INTVAL (source) & 0xffff))));
4078 else if (mode == DImode)
4080 if (GET_CODE (source) == CONST_INT)
4082 c0 = INTVAL (source);
4085 else if (GET_CODE (source) == CONST_DOUBLE)
4087 #if HOST_BITS_PER_WIDE_INT >= 64
4088 c0 = CONST_DOUBLE_LOW (source);
4091 c0 = CONST_DOUBLE_LOW (source);
4092 c1 = CONST_DOUBLE_HIGH (source);
4098 result = rs6000_emit_set_long_const (dest, c0, c1);
4103 insn = get_last_insn ();
4104 set = single_set (insn);
4105 if (! CONSTANT_P (SET_SRC (set)))
4106 set_unique_reg_note (insn, REG_EQUAL, source);
4111 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4112 fall back to a straight forward decomposition. We do this to avoid
4113 exponential run times encountered when looking for longer sequences
4114 with rs6000_emit_set_const. */
4116 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
4118 if (!TARGET_POWERPC64)
4120 rtx operand1, operand2;
4122 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4124 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
4126 emit_move_insn (operand1, GEN_INT (c1));
4127 emit_move_insn (operand2, GEN_INT (c2));
4131 HOST_WIDE_INT ud1, ud2, ud3, ud4;
4134 ud2 = (c1 & 0xffff0000) >> 16;
4135 #if HOST_BITS_PER_WIDE_INT >= 64
4139 ud4 = (c2 & 0xffff0000) >> 16;
4141 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4142 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4145 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4147 emit_move_insn (dest, GEN_INT (ud1));
4150 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4151 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4154 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4157 emit_move_insn (dest, GEN_INT (ud2 << 16));
4159 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4161 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4162 || (ud4 == 0 && ! (ud3 & 0x8000)))
4165 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4168 emit_move_insn (dest, GEN_INT (ud3 << 16));
4171 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
4172 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
4174 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4179 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4182 emit_move_insn (dest, GEN_INT (ud4 << 16));
4185 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
4187 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
4189 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
4190 GEN_INT (ud2 << 16)));
4192 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4198 /* Helper for the following. Get rid of [r+r] memory refs
4199 in cases where it won't work (TImode, TFmode). */
4202 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4204 if (GET_CODE (operands[0]) == MEM
4205 && GET_CODE (XEXP (operands[0], 0)) != REG
4206 && ! reload_in_progress)
4208 = replace_equiv_address (operands[0],
4209 copy_addr_to_reg (XEXP (operands[0], 0)));
4211 if (GET_CODE (operands[1]) == MEM
4212 && GET_CODE (XEXP (operands[1], 0)) != REG
4213 && ! reload_in_progress)
4215 = replace_equiv_address (operands[1],
4216 copy_addr_to_reg (XEXP (operands[1], 0)));
4219 /* Emit a move from SOURCE to DEST in mode MODE. */
4221 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4225 operands[1] = source;
4227 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4228 if (GET_CODE (operands[1]) == CONST_DOUBLE
4229 && ! FLOAT_MODE_P (mode)
4230 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4232 /* FIXME. This should never happen. */
4233 /* Since it seems that it does, do the safe thing and convert
4235 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4237 if (GET_CODE (operands[1]) == CONST_DOUBLE
4238 && ! FLOAT_MODE_P (mode)
4239 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
4240 && CONST_DOUBLE_LOW (operands[1]) >= 0)
4241 || (CONST_DOUBLE_HIGH (operands[1]) == -1
4242 && CONST_DOUBLE_LOW (operands[1]) < 0)))
4245 /* Check if GCC is setting up a block move that will end up using FP
4246 registers as temporaries. We must make sure this is acceptable. */
4247 if (GET_CODE (operands[0]) == MEM
4248 && GET_CODE (operands[1]) == MEM
4250 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4251 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4252 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4253 ? 32 : MEM_ALIGN (operands[0])))
4254 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4256 : MEM_ALIGN (operands[1]))))
4257 && ! MEM_VOLATILE_P (operands [0])
4258 && ! MEM_VOLATILE_P (operands [1]))
4260 emit_move_insn (adjust_address (operands[0], SImode, 0),
4261 adjust_address (operands[1], SImode, 0));
4262 emit_move_insn (adjust_address (operands[0], SImode, 4),
4263 adjust_address (operands[1], SImode, 4));
4267 if (!no_new_pseudos && GET_CODE (operands[0]) != REG
4268 && !gpc_reg_operand (operands[1], mode))
4269 operands[1] = force_reg (mode, operands[1]);
4271 if (mode == SFmode && ! TARGET_POWERPC
4272 && TARGET_HARD_FLOAT && TARGET_FPRS
4273 && GET_CODE (operands[0]) == MEM)
4277 if (reload_in_progress || reload_completed)
4278 regnum = true_regnum (operands[1]);
4279 else if (GET_CODE (operands[1]) == REG)
4280 regnum = REGNO (operands[1]);
4284 /* If operands[1] is a register, on POWER it may have
4285 double-precision data in it, so truncate it to single
4287 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4290 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4291 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4292 operands[1] = newreg;
4296 /* Recognize the case where operand[1] is a reference to thread-local
4297 data and load its address to a register. */
4298 if (GET_CODE (operands[1]) == SYMBOL_REF)
4300 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4302 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
4305 /* Handle the case where reload calls us with an invalid address. */
4306 if (reload_in_progress && mode == Pmode
4307 && (! general_operand (operands[1], mode)
4308 || ! nonimmediate_operand (operands[0], mode)))
4311 /* 128-bit constant floating-point values on Darwin should really be
4312 loaded as two parts. */
4313 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
4314 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4315 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4317 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4318 know how to get a DFmode SUBREG of a TFmode. */
4319 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4320 simplify_gen_subreg (DImode, operands[1], mode, 0),
4322 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4323 GET_MODE_SIZE (DImode)),
4324 simplify_gen_subreg (DImode, operands[1], mode,
4325 GET_MODE_SIZE (DImode)),
4330 /* FIXME: In the long term, this switch statement should go away
4331 and be replaced by a sequence of tests based on things like
4337 if (CONSTANT_P (operands[1])
4338 && GET_CODE (operands[1]) != CONST_INT)
4339 operands[1] = force_const_mem (mode, operands[1]);
4343 rs6000_eliminate_indexed_memrefs (operands);
4348 if (CONSTANT_P (operands[1])
4349 && ! easy_fp_constant (operands[1], mode))
4350 operands[1] = force_const_mem (mode, operands[1]);
4361 if (CONSTANT_P (operands[1])
4362 && !easy_vector_constant (operands[1], mode))
4363 operands[1] = force_const_mem (mode, operands[1]);
4368 /* Use default pattern for address of ELF small data */
4371 && DEFAULT_ABI == ABI_V4
4372 && (GET_CODE (operands[1]) == SYMBOL_REF
4373 || GET_CODE (operands[1]) == CONST)
4374 && small_data_operand (operands[1], mode))
4376 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4380 if (DEFAULT_ABI == ABI_V4
4381 && mode == Pmode && mode == SImode
4382 && flag_pic == 1 && got_operand (operands[1], mode))
4384 emit_insn (gen_movsi_got (operands[0], operands[1]));
4388 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4392 && CONSTANT_P (operands[1])
4393 && GET_CODE (operands[1]) != HIGH
4394 && GET_CODE (operands[1]) != CONST_INT)
4396 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4398 /* If this is a function address on -mcall-aixdesc,
4399 convert it to the address of the descriptor. */
4400 if (DEFAULT_ABI == ABI_AIX
4401 && GET_CODE (operands[1]) == SYMBOL_REF
4402 && XSTR (operands[1], 0)[0] == '.')
4404 const char *name = XSTR (operands[1], 0);
4406 while (*name == '.')
4408 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4409 CONSTANT_POOL_ADDRESS_P (new_ref)
4410 = CONSTANT_POOL_ADDRESS_P (operands[1]);
4411 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4412 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4413 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
4414 operands[1] = new_ref;
4417 if (DEFAULT_ABI == ABI_DARWIN)
4420 if (MACHO_DYNAMIC_NO_PIC_P)
4422 /* Take care of any required data indirection. */
4423 operands[1] = rs6000_machopic_legitimize_pic_address (
4424 operands[1], mode, operands[0]);
4425 if (operands[0] != operands[1])
4426 emit_insn (gen_rtx_SET (VOIDmode,
4427 operands[0], operands[1]));
4431 emit_insn (gen_macho_high (target, operands[1]));
4432 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4436 emit_insn (gen_elf_high (target, operands[1]));
4437 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4441 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4442 and we have put it in the TOC, we just need to make a TOC-relative
4445 && GET_CODE (operands[1]) == SYMBOL_REF
4446 && constant_pool_expr_p (operands[1])
4447 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4448 get_pool_mode (operands[1])))
4450 operands[1] = create_TOC_reference (operands[1]);
4452 else if (mode == Pmode
4453 && CONSTANT_P (operands[1])
4454 && ((GET_CODE (operands[1]) != CONST_INT
4455 && ! easy_fp_constant (operands[1], mode))
4456 || (GET_CODE (operands[1]) == CONST_INT
4457 && num_insns_constant (operands[1], mode) > 2)
4458 || (GET_CODE (operands[0]) == REG
4459 && FP_REGNO_P (REGNO (operands[0]))))
4460 && GET_CODE (operands[1]) != HIGH
4461 && ! legitimate_constant_pool_address_p (operands[1])
4462 && ! toc_relative_expr_p (operands[1]))
4464 /* Emit a USE operation so that the constant isn't deleted if
4465 expensive optimizations are turned on because nobody
4466 references it. This should only be done for operands that
4467 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4468 This should not be done for operands that contain LABEL_REFs.
4469 For now, we just handle the obvious case. */
4470 if (GET_CODE (operands[1]) != LABEL_REF)
4471 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4474 /* Darwin uses a special PIC legitimizer. */
4475 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4478 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4480 if (operands[0] != operands[1])
4481 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4486 /* If we are to limit the number of things we put in the TOC and
4487 this is a symbol plus a constant we can add in one insn,
4488 just put the symbol in the TOC and add the constant. Don't do
4489 this if reload is in progress. */
4490 if (GET_CODE (operands[1]) == CONST
4491 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4492 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4493 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4494 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4495 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4496 && ! side_effects_p (operands[0]))
4499 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4500 rtx other = XEXP (XEXP (operands[1], 0), 1);
4502 sym = force_reg (mode, sym);
4504 emit_insn (gen_addsi3 (operands[0], sym, other));
4506 emit_insn (gen_adddi3 (operands[0], sym, other));
4510 operands[1] = force_const_mem (mode, operands[1]);
4513 && constant_pool_expr_p (XEXP (operands[1], 0))
4514 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4515 get_pool_constant (XEXP (operands[1], 0)),
4516 get_pool_mode (XEXP (operands[1], 0))))
4519 = gen_const_mem (mode,
4520 create_TOC_reference (XEXP (operands[1], 0)));
4521 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4527 rs6000_eliminate_indexed_memrefs (operands);
4531 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4533 gen_rtx_SET (VOIDmode,
4534 operands[0], operands[1]),
4535 gen_rtx_CLOBBER (VOIDmode,
4536 gen_rtx_SCRATCH (SImode)))));
4545 /* Above, we may have called force_const_mem which may have returned
4546 an invalid address. If we can, fix this up; otherwise, reload will
4547 have to deal with it. */
4548 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4549 operands[1] = validize_mem (operands[1]);
4552 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4555 /* Nonzero if we can use a floating-point register to pass this arg. */
4556 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4557 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4558 && (CUM)->fregno <= FP_ARG_MAX_REG \
4559 && TARGET_HARD_FLOAT && TARGET_FPRS)
4561 /* Nonzero if we can use an AltiVec register to pass this arg. */
4562 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4563 (ALTIVEC_VECTOR_MODE (MODE) \
4564 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4565 && TARGET_ALTIVEC_ABI \
4568 /* Return a nonzero value to say to return the function value in
4569 memory, just as large structures are always returned. TYPE will be
4570 the data type of the value, and FNTYPE will be the type of the
4571 function doing the returning, or @code{NULL} for libcalls.
4573 The AIX ABI for the RS/6000 specifies that all structures are
4574 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4575 specifies that structures <= 8 bytes are returned in r3/r4, but a
4576 draft put them in memory, and GCC used to implement the draft
4577 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4578 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4579 compatibility can change DRAFT_V4_STRUCT_RET to override the
4580 default, and -m switches get the final word. See
4581 rs6000_override_options for more details.
4583 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4584 long double support is enabled. These values are returned in memory.
4586 int_size_in_bytes returns -1 for variable size objects, which go in
4587 memory always. The cast to unsigned makes -1 > 8. */
4590 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4592 if (AGGREGATE_TYPE_P (type)
4593 && (TARGET_AIX_STRUCT_RET
4594 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4596 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4601 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4602 for a call to a function whose data type is FNTYPE.
4603 For a library call, FNTYPE is 0.
4605 For incoming args we set the number of arguments in the prototype large
4606 so we never return a PARALLEL. */
4609 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4610 rtx libname ATTRIBUTE_UNUSED, int incoming,
4611 int libcall, int n_named_args)
4613 static CUMULATIVE_ARGS zero_cumulative;
4615 *cum = zero_cumulative;
4617 cum->fregno = FP_ARG_MIN_REG;
4618 cum->vregno = ALTIVEC_ARG_MIN_REG;
4619 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4620 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4621 ? CALL_LIBCALL : CALL_NORMAL);
4622 cum->sysv_gregno = GP_ARG_MIN_REG;
4623 cum->stdarg = fntype
4624 && (TYPE_ARG_TYPES (fntype) != 0
4625 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4626 != void_type_node));
4628 cum->nargs_prototype = 0;
4629 if (incoming || cum->prototype)
4630 cum->nargs_prototype = n_named_args;
4632 /* Check for a longcall attribute. */
4634 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4635 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4636 cum->call_cookie = CALL_LONG;
4638 if (TARGET_DEBUG_ARG)
4640 fprintf (stderr, "\ninit_cumulative_args:");
4643 tree ret_type = TREE_TYPE (fntype);
4644 fprintf (stderr, " ret code = %s,",
4645 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4648 if (cum->call_cookie & CALL_LONG)
4649 fprintf (stderr, " longcall,");
4651 fprintf (stderr, " proto = %d, nargs = %d\n",
4652 cum->prototype, cum->nargs_prototype);
4657 && TARGET_ALTIVEC_ABI
4658 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4660 error ("Cannot return value in vector register because"
4661 " altivec instructions are disabled, use -maltivec"
4662 " to enable them.");
4666 /* Return true if TYPE must be passed on the stack and not in registers. */
4669 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4671 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4672 return must_pass_in_stack_var_size (mode, type);
4674 return must_pass_in_stack_var_size_or_pad (mode, type);
4677 /* If defined, a C expression which determines whether, and in which
4678 direction, to pad out an argument with extra space. The value
4679 should be of type `enum direction': either `upward' to pad above
4680 the argument, `downward' to pad below, or `none' to inhibit
4683 For the AIX ABI structs are always stored left shifted in their
4687 function_arg_padding (enum machine_mode mode, tree type)
4689 #ifndef AGGREGATE_PADDING_FIXED
4690 #define AGGREGATE_PADDING_FIXED 0
4692 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4693 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4696 if (!AGGREGATE_PADDING_FIXED)
4698 /* GCC used to pass structures of the same size as integer types as
4699 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4700 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4701 passed padded downward, except that -mstrict-align further
4702 muddied the water in that multi-component structures of 2 and 4
4703 bytes in size were passed padded upward.
4705 The following arranges for best compatibility with previous
4706 versions of gcc, but removes the -mstrict-align dependency. */
4707 if (BYTES_BIG_ENDIAN)
4709 HOST_WIDE_INT size = 0;
4711 if (mode == BLKmode)
4713 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4714 size = int_size_in_bytes (type);
4717 size = GET_MODE_SIZE (mode);
4719 if (size == 1 || size == 2 || size == 4)
4725 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4727 if (type != 0 && AGGREGATE_TYPE_P (type))
4731 /* Fall back to the default. */
4732 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4735 /* If defined, a C expression that gives the alignment boundary, in bits,
4736 of an argument with the specified mode and type. If it is not defined,
4737 PARM_BOUNDARY is used for all arguments.
4739 V.4 wants long longs to be double word aligned. */
4742 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4744 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4746 else if (SPE_VECTOR_MODE (mode))
4748 else if (ALTIVEC_VECTOR_MODE (mode))
4751 return PARM_BOUNDARY;
4754 /* Compute the size (in words) of a function argument. */
4756 static unsigned long
4757 rs6000_arg_size (enum machine_mode mode, tree type)
4761 if (mode != BLKmode)
4762 size = GET_MODE_SIZE (mode);
4764 size = int_size_in_bytes (type);
4767 return (size + 3) >> 2;
4769 return (size + 7) >> 3;
4772 /* Update the data in CUM to advance over an argument
4773 of mode MODE and data type TYPE.
4774 (TYPE is null for libcalls where that information may not be available.)
4776 Note that for args passed by reference, function_arg will be called
4777 with MODE and TYPE set to that of the pointer to the arg, not the arg
4781 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4782 tree type, int named)
4784 cum->nargs_prototype--;
4786 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4790 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4793 if (!TARGET_ALTIVEC)
4794 error ("Cannot pass argument in vector register because"
4795 " altivec instructions are disabled, use -maltivec"
4796 " to enable them.");
4798 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4799 even if it is going to be passed in a vector register.
4800 Darwin does the same for variable-argument functions. */
4801 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4802 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4812 /* Vector parameters must be 16-byte aligned. This places
4813 them at 2 mod 4 in terms of words in 32-bit mode, since
4814 the parameter save area starts at offset 24 from the
4815 stack. In 64-bit mode, they just have to start on an
4816 even word, since the parameter save area is 16-byte
4817 aligned. Space for GPRs is reserved even if the argument
4818 will be passed in memory. */
4820 align = (2 - cum->words) & 3;
4822 align = cum->words & 1;
4823 cum->words += align + rs6000_arg_size (mode, type);
4825 if (TARGET_DEBUG_ARG)
4827 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4829 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4830 cum->nargs_prototype, cum->prototype,
4831 GET_MODE_NAME (mode));
4835 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4837 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4839 else if (DEFAULT_ABI == ABI_V4)
4841 if (TARGET_HARD_FLOAT && TARGET_FPRS
4842 && (mode == SFmode || mode == DFmode))
4844 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4849 cum->words += cum->words & 1;
4850 cum->words += rs6000_arg_size (mode, type);
4855 int n_words = rs6000_arg_size (mode, type);
4856 int gregno = cum->sysv_gregno;
4858 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4859 (r7,r8) or (r9,r10). As does any other 2 word item such
4860 as complex int due to a historical mistake. */
4862 gregno += (1 - gregno) & 1;
4864 /* Multi-reg args are not split between registers and stack. */
4865 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4867 /* Long long and SPE vectors are aligned on the stack.
4868 So are other 2 word items such as complex int due to
4869 a historical mistake. */
4871 cum->words += cum->words & 1;
4872 cum->words += n_words;
4875 /* Note: continuing to accumulate gregno past when we've started
4876 spilling to the stack indicates the fact that we've started
4877 spilling to the stack to expand_builtin_saveregs. */
4878 cum->sysv_gregno = gregno + n_words;
4881 if (TARGET_DEBUG_ARG)
4883 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4884 cum->words, cum->fregno);
4885 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4886 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4887 fprintf (stderr, "mode = %4s, named = %d\n",
4888 GET_MODE_NAME (mode), named);
4893 int n_words = rs6000_arg_size (mode, type);
4894 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4896 /* The simple alignment calculation here works because
4897 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4898 If we ever want to handle alignments larger than 8 bytes for
4899 32-bit or 16 bytes for 64-bit, then we'll need to take into
4900 account the offset to the start of the parm save area. */
4901 align &= cum->words;
4902 cum->words += align + n_words;
4904 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4905 && TARGET_HARD_FLOAT && TARGET_FPRS)
4906 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4908 if (TARGET_DEBUG_ARG)
4910 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4911 cum->words, cum->fregno);
4912 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4913 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4914 fprintf (stderr, "named = %d, align = %d\n", named, align);
4920 spe_build_register_parallel (enum machine_mode mode, int gregno)
4923 enum machine_mode inner;
4924 unsigned int inner_bytes;
4934 r1 = gen_rtx_REG (inner, gregno);
4935 r1 = gen_rtx_EXPR_LIST (SImode, r1, const0_rtx);
4936 r2 = gen_rtx_REG (inner, gregno + 1);
4937 r2 = gen_rtx_EXPR_LIST (SImode, r2, GEN_INT (inner_bytes));
4938 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4941 /* Determine where to put a SIMD argument on the SPE. */
4943 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4946 int gregno = cum->sysv_gregno;
4948 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4949 are passed and returned in a pair of GPRs for ABI compatibility. */
4950 if (TARGET_E500_DOUBLE && mode == DFmode)
4952 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4953 gregno += (1 - gregno) & 1;
4955 /* We do not split between registers and stack. */
4956 if (gregno + 1 > GP_ARG_MAX_REG)
4959 return spe_build_register_parallel (mode, gregno);
4963 int n_words = rs6000_arg_size (mode, type);
4965 /* SPE vectors are put in odd registers. */
4966 if (n_words == 2 && (gregno & 1) == 0)
4969 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4972 enum machine_mode m = SImode;
4974 r1 = gen_rtx_REG (m, gregno);
4975 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4976 r2 = gen_rtx_REG (m, gregno + 1);
4977 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4978 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4985 if (gregno <= GP_ARG_MAX_REG)
4986 return gen_rtx_REG (mode, gregno);
4992 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4995 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4999 rtx rvec[GP_ARG_NUM_REG + 1];
5001 if (align_words >= GP_ARG_NUM_REG)
5004 n_units = rs6000_arg_size (mode, type);
5006 /* Optimize the simple case where the arg fits in one gpr, except in
5007 the case of BLKmode due to assign_parms assuming that registers are
5008 BITS_PER_WORD wide. */
5010 || (n_units == 1 && mode != BLKmode))
5011 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5014 if (align_words + n_units > GP_ARG_NUM_REG)
5015 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5016 using a magic NULL_RTX component.
5017 FIXME: This is not strictly correct. Only some of the arg
5018 belongs in memory, not all of it. However, there isn't any way
5019 to do this currently, apart from building rtx descriptions for
5020 the pieces of memory we want stored. Due to bugs in the generic
5021 code we can't use the normal function_arg_partial_nregs scheme
5022 with the PARALLEL arg description we emit here.
5023 In any case, the code to store the whole arg to memory is often
5024 more efficient than code to store pieces, and we know that space
5025 is available in the right place for the whole arg. */
5026 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5031 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5032 rtx off = GEN_INT (i++ * 4);
5033 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5035 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5037 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5040 /* Determine where to put an argument to a function.
5041 Value is zero to push the argument on the stack,
5042 or a hard register in which to store the argument.
5044 MODE is the argument's machine mode.
5045 TYPE is the data type of the argument (as a tree).
5046 This is null for libcalls where that information may
5048 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5049 the preceding args and about the function being called.
5050 NAMED is nonzero if this argument is a named parameter
5051 (otherwise it is an extra parameter matching an ellipsis).
5053 On RS/6000 the first eight words of non-FP are normally in registers
5054 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5055 Under V.4, the first 8 FP args are in registers.
5057 If this is floating-point and no prototype is specified, we use
5058 both an FP and integer register (or possibly FP reg and stack). Library
5059 functions (when CALL_LIBCALL is set) always have the proper types for args,
5060 so we can pass the FP value just in one register. emit_library_function
5061 doesn't support PARALLEL anyway.
5063 Note that for args passed by reference, function_arg will be called
5064 with MODE and TYPE set to that of the pointer to the arg, not the arg
5068 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5069 tree type, int named)
5071 enum rs6000_abi abi = DEFAULT_ABI;
5073 /* Return a marker to indicate whether CR1 needs to set or clear the
5074 bit that V.4 uses to say fp args were passed in registers.
5075 Assume that we don't need the marker for software floating point,
5076 or compiler generated library calls. */
5077 if (mode == VOIDmode)
5080 && cum->nargs_prototype < 0
5081 && (cum->call_cookie & CALL_LIBCALL) == 0
5082 && (cum->prototype || TARGET_NO_PROTOTYPE))
5084 /* For the SPE, we need to crxor CR6 always. */
5086 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5087 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5088 return GEN_INT (cum->call_cookie
5089 | ((cum->fregno == FP_ARG_MIN_REG)
5090 ? CALL_V4_SET_FP_ARGS
5091 : CALL_V4_CLEAR_FP_ARGS));
5094 return GEN_INT (cum->call_cookie);
5097 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5098 if (TARGET_64BIT && ! cum->prototype)
5100 /* Vector parameters get passed in vector register
5101 and also in GPRs or memory, in absence of prototype. */
5104 align_words = (cum->words + 1) & ~1;
5106 if (align_words >= GP_ARG_NUM_REG)
5112 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5114 return gen_rtx_PARALLEL (mode,
5116 gen_rtx_EXPR_LIST (VOIDmode,
5118 gen_rtx_EXPR_LIST (VOIDmode,
5119 gen_rtx_REG (mode, cum->vregno),
5123 return gen_rtx_REG (mode, cum->vregno);
5124 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5126 if (named || abi == ABI_V4)
5130 /* Vector parameters to varargs functions under AIX or Darwin
5131 get passed in memory and possibly also in GPRs. */
5132 int align, align_words, n_words;
5133 enum machine_mode part_mode;
5135 /* Vector parameters must be 16-byte aligned. This places them at
5136 2 mod 4 in terms of words in 32-bit mode, since the parameter
5137 save area starts at offset 24 from the stack. In 64-bit mode,
5138 they just have to start on an even word, since the parameter
5139 save area is 16-byte aligned. */
5141 align = (2 - cum->words) & 3;
5143 align = cum->words & 1;
5144 align_words = cum->words + align;
5146 /* Out of registers? Memory, then. */
5147 if (align_words >= GP_ARG_NUM_REG)
5150 if (TARGET_32BIT && TARGET_POWERPC64)
5151 return rs6000_mixed_function_arg (mode, type, align_words);
5153 /* The vector value goes in GPRs. Only the part of the
5154 value in GPRs is reported here. */
5156 n_words = rs6000_arg_size (mode, type);
5157 if (align_words + n_words > GP_ARG_NUM_REG)
5158 /* Fortunately, there are only two possibilities, the value
5159 is either wholly in GPRs or half in GPRs and half not. */
5162 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5165 else if (TARGET_SPE_ABI && TARGET_SPE
5166 && (SPE_VECTOR_MODE (mode)
5167 || (TARGET_E500_DOUBLE && mode == DFmode)))
5168 return rs6000_spe_function_arg (cum, mode, type);
5169 else if (abi == ABI_V4)
5171 if (TARGET_HARD_FLOAT && TARGET_FPRS
5172 && (mode == SFmode || mode == DFmode))
5174 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5175 return gen_rtx_REG (mode, cum->fregno);
5181 int n_words = rs6000_arg_size (mode, type);
5182 int gregno = cum->sysv_gregno;
5184 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5185 (r7,r8) or (r9,r10). As does any other 2 word item such
5186 as complex int due to a historical mistake. */
5188 gregno += (1 - gregno) & 1;
5190 /* Multi-reg args are not split between registers and stack. */
5191 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5194 if (TARGET_32BIT && TARGET_POWERPC64)
5195 return rs6000_mixed_function_arg (mode, type,
5196 gregno - GP_ARG_MIN_REG);
5197 return gen_rtx_REG (mode, gregno);
5202 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5203 int align_words = cum->words + (cum->words & align);
5205 if (USE_FP_FOR_ARG_P (cum, mode, type))
5207 rtx rvec[GP_ARG_NUM_REG + 1];
5211 enum machine_mode fmode = mode;
5212 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5214 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5216 /* Currently, we only ever need one reg here because complex
5217 doubles are split. */
5218 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
5221 /* Long double split over regs and memory. */
5225 /* Do we also need to pass this arg in the parameter save
5228 && (cum->nargs_prototype <= 0
5229 || (DEFAULT_ABI == ABI_AIX
5231 && align_words >= GP_ARG_NUM_REG)));
5233 if (!needs_psave && mode == fmode)
5234 return gen_rtx_REG (fmode, cum->fregno);
5239 /* Describe the part that goes in gprs or the stack.
5240 This piece must come first, before the fprs. */
5241 if (align_words < GP_ARG_NUM_REG)
5243 unsigned long n_words = rs6000_arg_size (mode, type);
5245 if (align_words + n_words > GP_ARG_NUM_REG
5246 || (TARGET_32BIT && TARGET_POWERPC64))
5248 /* If this is partially on the stack, then we only
5249 include the portion actually in registers here. */
5250 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5253 if (align_words + n_words > GP_ARG_NUM_REG
5254 && (TARGET_32BIT && TARGET_POWERPC64))
5255 /* Not all of the arg fits in gprs. Say that it
5256 goes in memory too, using a magic NULL_RTX
5257 component. Also see comment in
5258 rs6000_mixed_function_arg for why the normal
5259 function_arg_partial_nregs scheme doesn't work
5261 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5265 r = gen_rtx_REG (rmode,
5266 GP_ARG_MIN_REG + align_words);
5267 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5268 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5270 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5274 /* The whole arg fits in gprs. */
5275 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5276 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5280 /* It's entirely in memory. */
5281 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5284 /* Describe where this piece goes in the fprs. */
5285 r = gen_rtx_REG (fmode, cum->fregno);
5286 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5288 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5290 else if (align_words < GP_ARG_NUM_REG)
5292 if (TARGET_32BIT && TARGET_POWERPC64)
5293 return rs6000_mixed_function_arg (mode, type, align_words);
5295 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5302 /* For an arg passed partly in registers and partly in memory, this is
5303 the number of registers used. For args passed entirely in registers
5304 or entirely in memory, zero. When an arg is described by a PARALLEL,
5305 perhaps using more than one register type, this function returns the
5306 number of registers used by the first element of the PARALLEL. */
5309 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5310 tree type, int named)
5317 if (DEFAULT_ABI == ABI_V4)
5320 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5321 && cum->nargs_prototype >= 0)
5324 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5325 parm_offset = TARGET_32BIT ? 2 : 0;
5326 align_words = cum->words + ((parm_offset - cum->words) & align);
5328 if (USE_FP_FOR_ARG_P (cum, mode, type)
5329 /* If we are passing this arg in gprs as well, then this function
5330 should return the number of gprs (or memory) partially passed,
5331 *not* the number of fprs. */
5333 && (cum->nargs_prototype <= 0
5334 || (DEFAULT_ABI == ABI_AIX
5336 && align_words >= GP_ARG_NUM_REG))))
5338 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5339 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
5340 else if (cum->nargs_prototype >= 0)
5344 if (align_words < GP_ARG_NUM_REG
5345 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5346 ret = GP_ARG_NUM_REG - align_words;
5348 if (ret != 0 && TARGET_DEBUG_ARG)
5349 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
5354 /* A C expression that indicates when an argument must be passed by
5355 reference. If nonzero for an argument, a copy of that argument is
5356 made in memory and a pointer to the argument is passed instead of
5357 the argument itself. The pointer is passed in whatever way is
5358 appropriate for passing a pointer to that type.
5360 Under V.4, aggregates and long double are passed by reference.
5362 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5363 reference unless the AltiVec vector extension ABI is in force.
5365 As an extension to all ABIs, variable sized types are passed by
5369 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5370 enum machine_mode mode ATTRIBUTE_UNUSED,
5371 tree type, bool named ATTRIBUTE_UNUSED)
5373 if ((DEFAULT_ABI == ABI_V4
5374 && ((type && AGGREGATE_TYPE_P (type))
5376 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5377 || (type && int_size_in_bytes (type) < 0))
5379 if (TARGET_DEBUG_ARG)
5380 fprintf (stderr, "function_arg_pass_by_reference\n");
5388 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5391 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5396 for (i = 0; i < nregs; i++)
5398 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
5399 if (reload_completed)
5401 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5404 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5405 i * GET_MODE_SIZE(reg_mode));
5408 tem = replace_equiv_address (tem, XEXP (tem, 0));
5410 if (tem == NULL_RTX)
5413 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5417 /* Perform any needed actions needed for a function that is receiving a
5418 variable number of arguments.
5422 MODE and TYPE are the mode and type of the current parameter.
5424 PRETEND_SIZE is a variable that should be set to the amount of stack
5425 that must be pushed by the prolog to pretend that our caller pushed
5428 Normally, this macro will push all remaining incoming registers on the
5429 stack and set PRETEND_SIZE to the length of the registers pushed. */
5432 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5433 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5436 CUMULATIVE_ARGS next_cum;
5437 int reg_size = TARGET_32BIT ? 4 : 8;
5438 rtx save_area = NULL_RTX, mem;
5439 int first_reg_offset, set;
5441 /* Skip the last named argument. */
5443 function_arg_advance (&next_cum, mode, type, 1);
5445 if (DEFAULT_ABI == ABI_V4)
5448 save_area = plus_constant (virtual_stack_vars_rtx,
5449 - RS6000_VARARGS_SIZE);
5451 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5455 first_reg_offset = next_cum.words;
5456 save_area = virtual_incoming_args_rtx;
5458 if (targetm.calls.must_pass_in_stack (mode, type))
5459 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5462 set = get_varargs_alias_set ();
5463 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
5465 mem = gen_rtx_MEM (BLKmode,
5466 plus_constant (save_area,
5467 first_reg_offset * reg_size)),
5468 set_mem_alias_set (mem, set);
5469 set_mem_align (mem, BITS_PER_WORD);
5471 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5472 GP_ARG_NUM_REG - first_reg_offset);
5475 /* Save FP registers if needed. */
5476 if (DEFAULT_ABI == ABI_V4
5477 && TARGET_HARD_FLOAT && TARGET_FPRS
5479 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
5481 int fregno = next_cum.fregno;
5482 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5483 rtx lab = gen_label_rtx ();
5484 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5487 (gen_rtx_SET (VOIDmode,
5489 gen_rtx_IF_THEN_ELSE (VOIDmode,
5490 gen_rtx_NE (VOIDmode, cr1,
5492 gen_rtx_LABEL_REF (VOIDmode, lab),
5495 while (fregno <= FP_ARG_V4_MAX_REG)
5497 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5498 set_mem_alias_set (mem, set);
5499 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5500 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5509 /* Create the va_list data type. */
5512 rs6000_build_builtin_va_list (void)
5514 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5516 /* For AIX, prefer 'char *' because that's what the system
5517 header files like. */
5518 if (DEFAULT_ABI != ABI_V4)
5519 return build_pointer_type (char_type_node);
5521 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5522 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5524 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5525 unsigned_char_type_node);
5526 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5527 unsigned_char_type_node);
5528 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5530 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5531 short_unsigned_type_node);
5532 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5534 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5537 DECL_FIELD_CONTEXT (f_gpr) = record;
5538 DECL_FIELD_CONTEXT (f_fpr) = record;
5539 DECL_FIELD_CONTEXT (f_res) = record;
5540 DECL_FIELD_CONTEXT (f_ovf) = record;
5541 DECL_FIELD_CONTEXT (f_sav) = record;
5543 TREE_CHAIN (record) = type_decl;
5544 TYPE_NAME (record) = type_decl;
5545 TYPE_FIELDS (record) = f_gpr;
5546 TREE_CHAIN (f_gpr) = f_fpr;
5547 TREE_CHAIN (f_fpr) = f_res;
5548 TREE_CHAIN (f_res) = f_ovf;
5549 TREE_CHAIN (f_ovf) = f_sav;
5551 layout_type (record);
5553 /* The correct type is an array type of one element. */
5554 return build_array_type (record, build_index_type (size_zero_node));
5557 /* Implement va_start. */
5560 rs6000_va_start (tree valist, rtx nextarg)
5562 HOST_WIDE_INT words, n_gpr, n_fpr;
5563 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5564 tree gpr, fpr, ovf, sav, t;
5566 /* Only SVR4 needs something special. */
5567 if (DEFAULT_ABI != ABI_V4)
5569 std_expand_builtin_va_start (valist, nextarg);
5573 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5574 f_fpr = TREE_CHAIN (f_gpr);
5575 f_res = TREE_CHAIN (f_fpr);
5576 f_ovf = TREE_CHAIN (f_res);
5577 f_sav = TREE_CHAIN (f_ovf);
5579 valist = build_va_arg_indirect_ref (valist);
5580 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5581 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5582 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5583 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5585 /* Count number of gp and fp argument registers used. */
5586 words = current_function_args_info.words;
5587 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5588 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5590 if (TARGET_DEBUG_ARG)
5591 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5592 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5593 words, n_gpr, n_fpr);
5595 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5596 build_int_cst (NULL_TREE, n_gpr));
5597 TREE_SIDE_EFFECTS (t) = 1;
5598 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5600 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5601 build_int_cst (NULL_TREE, n_fpr));
5602 TREE_SIDE_EFFECTS (t) = 1;
5603 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5605 /* Find the overflow area. */
5606 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5608 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5609 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5610 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5611 TREE_SIDE_EFFECTS (t) = 1;
5612 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5614 /* Find the register save area. */
5615 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5616 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5617 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE));
5618 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5619 TREE_SIDE_EFFECTS (t) = 1;
5620 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5623 /* Implement va_arg. */
5626 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5628 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5629 tree gpr, fpr, ovf, sav, reg, t, u;
5630 int size, rsize, n_reg, sav_ofs, sav_scale;
5631 tree lab_false, lab_over, addr;
5633 tree ptrtype = build_pointer_type (type);
5635 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5637 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5638 return build_va_arg_indirect_ref (t);
5641 if (DEFAULT_ABI != ABI_V4)
5643 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5645 tree elem_type = TREE_TYPE (type);
5646 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5647 int elem_size = GET_MODE_SIZE (elem_mode);
5649 if (elem_size < UNITS_PER_WORD)
5651 tree real_part, imag_part;
5652 tree post = NULL_TREE;
5654 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5656 /* Copy the value into a temporary, lest the formal temporary
5657 be reused out from under us. */
5658 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5659 append_to_statement_list (post, pre_p);
5661 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5664 return build (COMPLEX_EXPR, type, real_part, imag_part);
5668 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5671 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5672 f_fpr = TREE_CHAIN (f_gpr);
5673 f_res = TREE_CHAIN (f_fpr);
5674 f_ovf = TREE_CHAIN (f_res);
5675 f_sav = TREE_CHAIN (f_ovf);
5677 valist = build_va_arg_indirect_ref (valist);
5678 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5679 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5680 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5681 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5683 size = int_size_in_bytes (type);
5684 rsize = (size + 3) / 4;
5687 if (TARGET_HARD_FLOAT && TARGET_FPRS
5688 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5690 /* FP args go in FP registers, if present. */
5695 if (TYPE_MODE (type) == DFmode)
5700 /* Otherwise into GP registers. */
5709 /* Pull the value out of the saved registers.... */
5712 addr = create_tmp_var (ptr_type_node, "addr");
5713 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5715 /* AltiVec vectors never go in registers when -mabi=altivec. */
5716 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5720 lab_false = create_artificial_label ();
5721 lab_over = create_artificial_label ();
5723 /* Long long and SPE vectors are aligned in the registers.
5724 As are any other 2 gpr item such as complex int due to a
5725 historical mistake. */
5729 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5730 size_int (n_reg - 1));
5731 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5734 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5735 t = build2 (GE_EXPR, boolean_type_node, u, t);
5736 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5737 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5738 gimplify_and_add (t, pre_p);
5742 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5744 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5745 u = build1 (CONVERT_EXPR, integer_type_node, u);
5746 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5747 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5749 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5750 gimplify_and_add (t, pre_p);
5752 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5753 gimplify_and_add (t, pre_p);
5755 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5756 append_to_statement_list (t, pre_p);
5760 /* Ensure that we don't find any more args in regs.
5761 Alignment has taken care of the n_reg == 2 case. */
5762 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5763 gimplify_and_add (t, pre_p);
5767 /* ... otherwise out of the overflow area. */
5769 /* Care for on-stack alignment if needed. */
5773 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5774 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5775 build_int_cst (NULL_TREE, -align));
5777 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5779 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5780 gimplify_and_add (u, pre_p);
5782 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5783 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5784 gimplify_and_add (t, pre_p);
5788 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5789 append_to_statement_list (t, pre_p);
5792 addr = fold_convert (ptrtype, addr);
5793 return build_va_arg_indirect_ref (addr);
5798 #define def_builtin(MASK, NAME, TYPE, CODE) \
5800 if ((MASK) & target_flags) \
5801 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5805 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5807 static const struct builtin_description bdesc_3arg[] =
5809 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5810 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5811 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5812 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5813 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5814 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5815 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5816 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5817 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5818 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5819 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5820 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5821 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5822 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5823 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5824 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5825 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5826 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5827 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5828 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5829 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5830 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5831 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5834 /* DST operations: void foo (void *, const int, const char). */
5836 static const struct builtin_description bdesc_dst[] =
5838 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5839 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5840 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5841 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5844 /* Simple binary operations: VECc = foo (VECa, VECb). */
5846 static struct builtin_description bdesc_2arg[] =
5848 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5849 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5850 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5851 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5852 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5853 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5854 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5855 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5856 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5857 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5858 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5859 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5860 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5861 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5862 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5863 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5864 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5865 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5866 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5867 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5868 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5869 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5870 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5871 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5872 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5873 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5874 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5875 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5876 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5877 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5878 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5879 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5880 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5881 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5882 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5883 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5884 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5885 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5886 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5887 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5888 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5889 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5890 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5891 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5892 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5893 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5894 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5895 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5896 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5897 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5898 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5899 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5900 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5901 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5902 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5903 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5904 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5905 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5906 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5907 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5908 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5909 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5910 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5911 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5912 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5913 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5914 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5915 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5916 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5918 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5919 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5920 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5921 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5922 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5923 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5924 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5925 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5926 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5927 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5928 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5929 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5930 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5931 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5932 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5933 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5934 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5935 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5936 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5937 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5938 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5939 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5940 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5941 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5942 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5943 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5944 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5945 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5946 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5947 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5948 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5949 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5950 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5951 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5952 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5953 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5954 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5955 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5956 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5957 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5958 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5959 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5960 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5962 /* Place holder, leave as first spe builtin. */
5963 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5964 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5965 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5966 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5967 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5968 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5969 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5970 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5971 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5972 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5973 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5974 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5975 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5976 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5977 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5978 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5979 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5980 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5981 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5982 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5983 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5984 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5985 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5986 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5987 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5988 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5989 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5990 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5991 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5992 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5993 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5994 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5995 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5996 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5997 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5998 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5999 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6000 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6001 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6002 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6003 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6004 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6005 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6006 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6007 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6008 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6009 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6010 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6011 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6012 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6013 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6014 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6015 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6016 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6017 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6018 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6019 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6020 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6021 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6022 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6023 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6024 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6025 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6026 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6027 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6028 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6029 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6030 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6031 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6032 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6033 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6034 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6035 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6036 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6037 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6038 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6039 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6040 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6041 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6042 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6043 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6044 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6045 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6046 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6047 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6048 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6049 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6050 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6051 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6052 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6053 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6054 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6055 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6056 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6057 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6058 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6059 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6060 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6061 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6062 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6063 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6064 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6065 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6066 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6067 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6068 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6069 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6070 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6071 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6073 /* SPE binary operations expecting a 5-bit unsigned literal. */
6074 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6076 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6077 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6078 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6079 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6080 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6081 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6082 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6083 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6084 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6085 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6086 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6087 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6088 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6089 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6090 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6091 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6092 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6093 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6094 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6095 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6096 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6097 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6098 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6099 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6100 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6101 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6103 /* Place-holder. Leave as last binary SPE builtin. */
6104 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
6107 /* AltiVec predicates. */
6109 struct builtin_description_predicates
6111 const unsigned int mask;
6112 const enum insn_code icode;
6114 const char *const name;
6115 const enum rs6000_builtins code;
6118 static const struct builtin_description_predicates bdesc_altivec_preds[] =
6120 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6121 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6122 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6123 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6124 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6125 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6126 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6127 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6128 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6129 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6130 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6131 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6132 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
6135 /* SPE predicates. */
6136 static struct builtin_description bdesc_spe_predicates[] =
6138 /* Place-holder. Leave as first. */
6139 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6140 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6141 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6142 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6143 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6144 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6145 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6146 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6147 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6148 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6149 /* Place-holder. Leave as last. */
6150 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6153 /* SPE evsel predicates. */
6154 static struct builtin_description bdesc_spe_evsel[] =
6156 /* Place-holder. Leave as first. */
6157 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6158 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6159 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6160 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6161 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6162 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6163 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6164 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6165 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6166 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6167 /* Place-holder. Leave as last. */
6168 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6171 /* ABS* operations. */
6173 static const struct builtin_description bdesc_abs[] =
6175 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6176 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6177 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6178 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6179 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6180 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6181 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6184 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6187 static struct builtin_description bdesc_1arg[] =
6189 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6190 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6191 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6192 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6193 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6194 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6195 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6196 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6197 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6198 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6199 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6200 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6201 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6202 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6203 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6204 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6205 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6207 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6208 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6209 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6210 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6211 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6212 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6213 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6214 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6215 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6216 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6217 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6218 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6219 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6220 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6221 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6222 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6223 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6224 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6225 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6226 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6227 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6228 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6229 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6230 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6231 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6232 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6233 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6234 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6235 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6236 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6238 /* Place-holder. Leave as last unary SPE builtin. */
6239 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
6243 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6246 tree arg0 = TREE_VALUE (arglist);
6247 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6248 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6249 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6251 if (icode == CODE_FOR_nothing)
6252 /* Builtin not supported on this processor. */
6255 /* If we got invalid arguments bail out before generating bad rtl. */
6256 if (arg0 == error_mark_node)
6259 if (icode == CODE_FOR_altivec_vspltisb
6260 || icode == CODE_FOR_altivec_vspltish
6261 || icode == CODE_FOR_altivec_vspltisw
6262 || icode == CODE_FOR_spe_evsplatfi
6263 || icode == CODE_FOR_spe_evsplati)
6265 /* Only allow 5-bit *signed* literals. */
6266 if (GET_CODE (op0) != CONST_INT
6267 || INTVAL (op0) > 0x1f
6268 || INTVAL (op0) < -0x1f)
6270 error ("argument 1 must be a 5-bit signed literal");
6276 || GET_MODE (target) != tmode
6277 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6278 target = gen_reg_rtx (tmode);
6280 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6281 op0 = copy_to_mode_reg (mode0, op0);
6283 pat = GEN_FCN (icode) (target, op0);
6292 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6294 rtx pat, scratch1, scratch2;
6295 tree arg0 = TREE_VALUE (arglist);
6296 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6297 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6298 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6300 /* If we have invalid arguments, bail out before generating bad rtl. */
6301 if (arg0 == error_mark_node)
6305 || GET_MODE (target) != tmode
6306 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6307 target = gen_reg_rtx (tmode);
6309 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6310 op0 = copy_to_mode_reg (mode0, op0);
6312 scratch1 = gen_reg_rtx (mode0);
6313 scratch2 = gen_reg_rtx (mode0);
6315 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6324 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6327 tree arg0 = TREE_VALUE (arglist);
6328 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6329 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6330 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6331 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6332 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6333 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6335 if (icode == CODE_FOR_nothing)
6336 /* Builtin not supported on this processor. */
6339 /* If we got invalid arguments bail out before generating bad rtl. */
6340 if (arg0 == error_mark_node || arg1 == error_mark_node)
6343 if (icode == CODE_FOR_altivec_vcfux
6344 || icode == CODE_FOR_altivec_vcfsx
6345 || icode == CODE_FOR_altivec_vctsxs
6346 || icode == CODE_FOR_altivec_vctuxs
6347 || icode == CODE_FOR_altivec_vspltb
6348 || icode == CODE_FOR_altivec_vsplth
6349 || icode == CODE_FOR_altivec_vspltw
6350 || icode == CODE_FOR_spe_evaddiw
6351 || icode == CODE_FOR_spe_evldd
6352 || icode == CODE_FOR_spe_evldh
6353 || icode == CODE_FOR_spe_evldw
6354 || icode == CODE_FOR_spe_evlhhesplat
6355 || icode == CODE_FOR_spe_evlhhossplat
6356 || icode == CODE_FOR_spe_evlhhousplat
6357 || icode == CODE_FOR_spe_evlwhe
6358 || icode == CODE_FOR_spe_evlwhos
6359 || icode == CODE_FOR_spe_evlwhou
6360 || icode == CODE_FOR_spe_evlwhsplat
6361 || icode == CODE_FOR_spe_evlwwsplat
6362 || icode == CODE_FOR_spe_evrlwi
6363 || icode == CODE_FOR_spe_evslwi
6364 || icode == CODE_FOR_spe_evsrwis
6365 || icode == CODE_FOR_spe_evsubifw
6366 || icode == CODE_FOR_spe_evsrwiu)
6368 /* Only allow 5-bit unsigned literals. */
6370 if (TREE_CODE (arg1) != INTEGER_CST
6371 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6373 error ("argument 2 must be a 5-bit unsigned literal");
6379 || GET_MODE (target) != tmode
6380 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6381 target = gen_reg_rtx (tmode);
6383 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6384 op0 = copy_to_mode_reg (mode0, op0);
6385 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6386 op1 = copy_to_mode_reg (mode1, op1);
6388 pat = GEN_FCN (icode) (target, op0, op1);
6397 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6398 tree arglist, rtx target)
6401 tree cr6_form = TREE_VALUE (arglist);
6402 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6403 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6404 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6405 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6406 enum machine_mode tmode = SImode;
6407 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6408 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6411 if (TREE_CODE (cr6_form) != INTEGER_CST)
6413 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6417 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6422 /* If we have invalid arguments, bail out before generating bad rtl. */
6423 if (arg0 == error_mark_node || arg1 == error_mark_node)
6427 || GET_MODE (target) != tmode
6428 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6429 target = gen_reg_rtx (tmode);
6431 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6432 op0 = copy_to_mode_reg (mode0, op0);
6433 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6434 op1 = copy_to_mode_reg (mode1, op1);
6436 scratch = gen_reg_rtx (mode0);
6438 pat = GEN_FCN (icode) (scratch, op0, op1,
6439 gen_rtx_SYMBOL_REF (Pmode, opcode));
6444 /* The vec_any* and vec_all* predicates use the same opcodes for two
6445 different operations, but the bits in CR6 will be different
6446 depending on what information we want. So we have to play tricks
6447 with CR6 to get the right bits out.
6449 If you think this is disgusting, look at the specs for the
6450 AltiVec predicates. */
6452 switch (cr6_form_int)
6455 emit_insn (gen_cr6_test_for_zero (target));
6458 emit_insn (gen_cr6_test_for_zero_reverse (target));
6461 emit_insn (gen_cr6_test_for_lt (target));
6464 emit_insn (gen_cr6_test_for_lt_reverse (target));
6467 error ("argument 1 of __builtin_altivec_predicate is out of range");
6475 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6478 tree arg0 = TREE_VALUE (arglist);
6479 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6480 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6481 enum machine_mode mode0 = Pmode;
6482 enum machine_mode mode1 = Pmode;
6483 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6484 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6486 if (icode == CODE_FOR_nothing)
6487 /* Builtin not supported on this processor. */
6490 /* If we got invalid arguments bail out before generating bad rtl. */
6491 if (arg0 == error_mark_node || arg1 == error_mark_node)
6495 || GET_MODE (target) != tmode
6496 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6497 target = gen_reg_rtx (tmode);
6499 op1 = copy_to_mode_reg (mode1, op1);
6501 if (op0 == const0_rtx)
6503 addr = gen_rtx_MEM (tmode, op1);
6507 op0 = copy_to_mode_reg (mode0, op0);
6508 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6511 pat = GEN_FCN (icode) (target, addr);
6521 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6523 tree arg0 = TREE_VALUE (arglist);
6524 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6525 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6526 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6527 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6528 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6530 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6531 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6532 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6534 /* Invalid arguments. Bail before doing anything stoopid! */
6535 if (arg0 == error_mark_node
6536 || arg1 == error_mark_node
6537 || arg2 == error_mark_node)
6540 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6541 op0 = copy_to_mode_reg (mode2, op0);
6542 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6543 op1 = copy_to_mode_reg (mode0, op1);
6544 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6545 op2 = copy_to_mode_reg (mode1, op2);
6547 pat = GEN_FCN (icode) (op1, op2, op0);
6554 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6556 tree arg0 = TREE_VALUE (arglist);
6557 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6558 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6559 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6560 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6561 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6563 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6564 enum machine_mode mode1 = Pmode;
6565 enum machine_mode mode2 = Pmode;
6567 /* Invalid arguments. Bail before doing anything stoopid! */
6568 if (arg0 == error_mark_node
6569 || arg1 == error_mark_node
6570 || arg2 == error_mark_node)
6573 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6574 op0 = copy_to_mode_reg (tmode, op0);
6576 op2 = copy_to_mode_reg (mode2, op2);
6578 if (op1 == const0_rtx)
6580 addr = gen_rtx_MEM (tmode, op2);
6584 op1 = copy_to_mode_reg (mode1, op1);
6585 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6588 pat = GEN_FCN (icode) (addr, op0);
6595 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6598 tree arg0 = TREE_VALUE (arglist);
6599 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6600 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6601 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6602 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6603 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6604 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6605 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6606 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6607 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6609 if (icode == CODE_FOR_nothing)
6610 /* Builtin not supported on this processor. */
6613 /* If we got invalid arguments bail out before generating bad rtl. */
6614 if (arg0 == error_mark_node
6615 || arg1 == error_mark_node
6616 || arg2 == error_mark_node)
6619 if (icode == CODE_FOR_altivec_vsldoi_4sf
6620 || icode == CODE_FOR_altivec_vsldoi_4si
6621 || icode == CODE_FOR_altivec_vsldoi_8hi
6622 || icode == CODE_FOR_altivec_vsldoi_16qi)
6624 /* Only allow 4-bit unsigned literals. */
6626 if (TREE_CODE (arg2) != INTEGER_CST
6627 || TREE_INT_CST_LOW (arg2) & ~0xf)
6629 error ("argument 3 must be a 4-bit unsigned literal");
6635 || GET_MODE (target) != tmode
6636 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6637 target = gen_reg_rtx (tmode);
6639 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6640 op0 = copy_to_mode_reg (mode0, op0);
6641 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6642 op1 = copy_to_mode_reg (mode1, op1);
6643 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6644 op2 = copy_to_mode_reg (mode2, op2);
6646 pat = GEN_FCN (icode) (target, op0, op1, op2);
6654 /* Expand the lvx builtins. */
6656 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6658 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6659 tree arglist = TREE_OPERAND (exp, 1);
6660 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6662 enum machine_mode tmode, mode0;
6664 enum insn_code icode;
6668 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6669 icode = CODE_FOR_altivec_lvx_16qi;
6671 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6672 icode = CODE_FOR_altivec_lvx_8hi;
6674 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6675 icode = CODE_FOR_altivec_lvx_4si;
6677 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6678 icode = CODE_FOR_altivec_lvx_4sf;
6687 arg0 = TREE_VALUE (arglist);
6688 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6689 tmode = insn_data[icode].operand[0].mode;
6690 mode0 = insn_data[icode].operand[1].mode;
6693 || GET_MODE (target) != tmode
6694 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6695 target = gen_reg_rtx (tmode);
6697 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6698 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6700 pat = GEN_FCN (icode) (target, op0);
6707 /* Expand the stvx builtins. */
6709 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6712 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6713 tree arglist = TREE_OPERAND (exp, 1);
6714 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6716 enum machine_mode mode0, mode1;
6718 enum insn_code icode;
6722 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6723 icode = CODE_FOR_altivec_stvx_16qi;
6725 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6726 icode = CODE_FOR_altivec_stvx_8hi;
6728 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6729 icode = CODE_FOR_altivec_stvx_4si;
6731 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6732 icode = CODE_FOR_altivec_stvx_4sf;
6739 arg0 = TREE_VALUE (arglist);
6740 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6741 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6742 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6743 mode0 = insn_data[icode].operand[0].mode;
6744 mode1 = insn_data[icode].operand[1].mode;
6746 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6747 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6748 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6749 op1 = copy_to_mode_reg (mode1, op1);
6751 pat = GEN_FCN (icode) (op0, op1);
6759 /* Expand the dst builtins. */
6761 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6764 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6765 tree arglist = TREE_OPERAND (exp, 1);
6766 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6767 tree arg0, arg1, arg2;
6768 enum machine_mode mode0, mode1, mode2;
6769 rtx pat, op0, op1, op2;
6770 struct builtin_description *d;
6775 /* Handle DST variants. */
6776 d = (struct builtin_description *) bdesc_dst;
6777 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6778 if (d->code == fcode)
6780 arg0 = TREE_VALUE (arglist);
6781 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6782 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6783 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6784 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6785 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6786 mode0 = insn_data[d->icode].operand[0].mode;
6787 mode1 = insn_data[d->icode].operand[1].mode;
6788 mode2 = insn_data[d->icode].operand[2].mode;
6790 /* Invalid arguments, bail out before generating bad rtl. */
6791 if (arg0 == error_mark_node
6792 || arg1 == error_mark_node
6793 || arg2 == error_mark_node)
6798 if (TREE_CODE (arg2) != INTEGER_CST
6799 || TREE_INT_CST_LOW (arg2) & ~0x3)
6801 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6805 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6806 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6807 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6808 op1 = copy_to_mode_reg (mode1, op1);
6810 pat = GEN_FCN (d->icode) (op0, op1, op2);
6820 /* Expand the builtin in EXP and store the result in TARGET. Store
6821 true in *EXPANDEDP if we found a builtin to expand. */
6823 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6825 struct builtin_description *d;
6826 struct builtin_description_predicates *dp;
6828 enum insn_code icode;
6829 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6830 tree arglist = TREE_OPERAND (exp, 1);
6833 enum machine_mode tmode, mode0;
6834 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6836 target = altivec_expand_ld_builtin (exp, target, expandedp);
6840 target = altivec_expand_st_builtin (exp, target, expandedp);
6844 target = altivec_expand_dst_builtin (exp, target, expandedp);
6852 case ALTIVEC_BUILTIN_STVX:
6853 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6854 case ALTIVEC_BUILTIN_STVEBX:
6855 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6856 case ALTIVEC_BUILTIN_STVEHX:
6857 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6858 case ALTIVEC_BUILTIN_STVEWX:
6859 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6860 case ALTIVEC_BUILTIN_STVXL:
6861 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6863 case ALTIVEC_BUILTIN_MFVSCR:
6864 icode = CODE_FOR_altivec_mfvscr;
6865 tmode = insn_data[icode].operand[0].mode;
6868 || GET_MODE (target) != tmode
6869 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6870 target = gen_reg_rtx (tmode);
6872 pat = GEN_FCN (icode) (target);
6878 case ALTIVEC_BUILTIN_MTVSCR:
6879 icode = CODE_FOR_altivec_mtvscr;
6880 arg0 = TREE_VALUE (arglist);
6881 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6882 mode0 = insn_data[icode].operand[0].mode;
6884 /* If we got invalid arguments bail out before generating bad rtl. */
6885 if (arg0 == error_mark_node)
6888 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6889 op0 = copy_to_mode_reg (mode0, op0);
6891 pat = GEN_FCN (icode) (op0);
6896 case ALTIVEC_BUILTIN_DSSALL:
6897 emit_insn (gen_altivec_dssall ());
6900 case ALTIVEC_BUILTIN_DSS:
6901 icode = CODE_FOR_altivec_dss;
6902 arg0 = TREE_VALUE (arglist);
6904 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6905 mode0 = insn_data[icode].operand[0].mode;
6907 /* If we got invalid arguments bail out before generating bad rtl. */
6908 if (arg0 == error_mark_node)
6911 if (TREE_CODE (arg0) != INTEGER_CST
6912 || TREE_INT_CST_LOW (arg0) & ~0x3)
6914 error ("argument to dss must be a 2-bit unsigned literal");
6918 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6919 op0 = copy_to_mode_reg (mode0, op0);
6921 emit_insn (gen_altivec_dss (op0));
6924 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6925 arg0 = TREE_VALUE (arglist);
6926 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR
6927 || TREE_CODE (arg0) == ARRAY_REF)
6928 arg0 = TREE_OPERAND (arg0, 0);
6929 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6930 TREE_STRING_POINTER (arg0));
6935 /* Expand abs* operations. */
6936 d = (struct builtin_description *) bdesc_abs;
6937 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6938 if (d->code == fcode)
6939 return altivec_expand_abs_builtin (d->icode, arglist, target);
6941 /* Expand the AltiVec predicates. */
6942 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6943 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6944 if (dp->code == fcode)
6945 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
6948 /* LV* are funky. We initialized them differently. */
6951 case ALTIVEC_BUILTIN_LVSL:
6952 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6954 case ALTIVEC_BUILTIN_LVSR:
6955 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6957 case ALTIVEC_BUILTIN_LVEBX:
6958 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6960 case ALTIVEC_BUILTIN_LVEHX:
6961 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6963 case ALTIVEC_BUILTIN_LVEWX:
6964 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6966 case ALTIVEC_BUILTIN_LVXL:
6967 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6969 case ALTIVEC_BUILTIN_LVX:
6970 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6981 /* Binops that need to be initialized manually, but can be expanded
6982 automagically by rs6000_expand_binop_builtin. */
6983 static struct builtin_description bdesc_2arg_spe[] =
6985 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6986 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6987 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6988 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6989 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6990 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6991 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6992 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6993 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6994 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6995 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6996 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6997 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6998 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6999 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7000 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7001 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7002 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7003 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7004 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7005 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7006 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7009 /* Expand the builtin in EXP and store the result in TARGET. Store
7010 true in *EXPANDEDP if we found a builtin to expand.
7012 This expands the SPE builtins that are not simple unary and binary
7015 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7017 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7018 tree arglist = TREE_OPERAND (exp, 1);
7020 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7021 enum insn_code icode;
7022 enum machine_mode tmode, mode0;
7024 struct builtin_description *d;
7029 /* Syntax check for a 5-bit unsigned immediate. */
7032 case SPE_BUILTIN_EVSTDD:
7033 case SPE_BUILTIN_EVSTDH:
7034 case SPE_BUILTIN_EVSTDW:
7035 case SPE_BUILTIN_EVSTWHE:
7036 case SPE_BUILTIN_EVSTWHO:
7037 case SPE_BUILTIN_EVSTWWE:
7038 case SPE_BUILTIN_EVSTWWO:
7039 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7040 if (TREE_CODE (arg1) != INTEGER_CST
7041 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7043 error ("argument 2 must be a 5-bit unsigned literal");
7051 /* The evsplat*i instructions are not quite generic. */
7054 case SPE_BUILTIN_EVSPLATFI:
7055 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7057 case SPE_BUILTIN_EVSPLATI:
7058 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7064 d = (struct builtin_description *) bdesc_2arg_spe;
7065 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7066 if (d->code == fcode)
7067 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7069 d = (struct builtin_description *) bdesc_spe_predicates;
7070 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7071 if (d->code == fcode)
7072 return spe_expand_predicate_builtin (d->icode, arglist, target);
7074 d = (struct builtin_description *) bdesc_spe_evsel;
7075 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7076 if (d->code == fcode)
7077 return spe_expand_evsel_builtin (d->icode, arglist, target);
7081 case SPE_BUILTIN_EVSTDDX:
7082 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7083 case SPE_BUILTIN_EVSTDHX:
7084 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7085 case SPE_BUILTIN_EVSTDWX:
7086 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7087 case SPE_BUILTIN_EVSTWHEX:
7088 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7089 case SPE_BUILTIN_EVSTWHOX:
7090 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7091 case SPE_BUILTIN_EVSTWWEX:
7092 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7093 case SPE_BUILTIN_EVSTWWOX:
7094 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7095 case SPE_BUILTIN_EVSTDD:
7096 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7097 case SPE_BUILTIN_EVSTDH:
7098 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7099 case SPE_BUILTIN_EVSTDW:
7100 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7101 case SPE_BUILTIN_EVSTWHE:
7102 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7103 case SPE_BUILTIN_EVSTWHO:
7104 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7105 case SPE_BUILTIN_EVSTWWE:
7106 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7107 case SPE_BUILTIN_EVSTWWO:
7108 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7109 case SPE_BUILTIN_MFSPEFSCR:
7110 icode = CODE_FOR_spe_mfspefscr;
7111 tmode = insn_data[icode].operand[0].mode;
7114 || GET_MODE (target) != tmode
7115 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7116 target = gen_reg_rtx (tmode);
7118 pat = GEN_FCN (icode) (target);
7123 case SPE_BUILTIN_MTSPEFSCR:
7124 icode = CODE_FOR_spe_mtspefscr;
7125 arg0 = TREE_VALUE (arglist);
7126 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7127 mode0 = insn_data[icode].operand[0].mode;
7129 if (arg0 == error_mark_node)
7132 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7133 op0 = copy_to_mode_reg (mode0, op0);
7135 pat = GEN_FCN (icode) (op0);
7148 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7150 rtx pat, scratch, tmp;
7151 tree form = TREE_VALUE (arglist);
7152 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7153 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7154 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7155 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7156 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7157 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7161 if (TREE_CODE (form) != INTEGER_CST)
7163 error ("argument 1 of __builtin_spe_predicate must be a constant");
7167 form_int = TREE_INT_CST_LOW (form);
7172 if (arg0 == error_mark_node || arg1 == error_mark_node)
7176 || GET_MODE (target) != SImode
7177 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7178 target = gen_reg_rtx (SImode);
7180 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7181 op0 = copy_to_mode_reg (mode0, op0);
7182 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7183 op1 = copy_to_mode_reg (mode1, op1);
7185 scratch = gen_reg_rtx (CCmode);
7187 pat = GEN_FCN (icode) (scratch, op0, op1);
7192 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7193 _lower_. We use one compare, but look in different bits of the
7194 CR for each variant.
7196 There are 2 elements in each SPE simd type (upper/lower). The CR
7197 bits are set as follows:
7199 BIT0 | BIT 1 | BIT 2 | BIT 3
7200 U | L | (U | L) | (U & L)
7202 So, for an "all" relationship, BIT 3 would be set.
7203 For an "any" relationship, BIT 2 would be set. Etc.
7205 Following traditional nomenclature, these bits map to:
7207 BIT0 | BIT 1 | BIT 2 | BIT 3
7210 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7215 /* All variant. OV bit. */
7217 /* We need to get to the OV bit, which is the ORDERED bit. We
7218 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7219 that's ugly and will trigger a validate_condition_mode abort.
7220 So let's just use another pattern. */
7221 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7223 /* Any variant. EQ bit. */
7227 /* Upper variant. LT bit. */
7231 /* Lower variant. GT bit. */
7236 error ("argument 1 of __builtin_spe_predicate is out of range");
7240 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7241 emit_move_insn (target, tmp);
7246 /* The evsel builtins look like this:
7248 e = __builtin_spe_evsel_OP (a, b, c, d);
7252 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7253 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7257 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7260 tree arg0 = TREE_VALUE (arglist);
7261 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7262 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7263 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7264 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7265 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7266 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7267 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7268 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7269 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7274 if (arg0 == error_mark_node || arg1 == error_mark_node
7275 || arg2 == error_mark_node || arg3 == error_mark_node)
7279 || GET_MODE (target) != mode0
7280 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7281 target = gen_reg_rtx (mode0);
7283 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7284 op0 = copy_to_mode_reg (mode0, op0);
7285 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7286 op1 = copy_to_mode_reg (mode0, op1);
7287 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7288 op2 = copy_to_mode_reg (mode0, op2);
7289 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7290 op3 = copy_to_mode_reg (mode0, op3);
7292 /* Generate the compare. */
7293 scratch = gen_reg_rtx (CCmode);
7294 pat = GEN_FCN (icode) (scratch, op0, op1);
7299 if (mode0 == V2SImode)
7300 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7302 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7307 /* Expand an expression EXP that calls a built-in function,
7308 with result going to TARGET if that's convenient
7309 (and in mode MODE if that's convenient).
7310 SUBTARGET may be used as the target for computing one of EXP's operands.
7311 IGNORE is nonzero if the value is to be ignored. */
7314 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7315 enum machine_mode mode ATTRIBUTE_UNUSED,
7316 int ignore ATTRIBUTE_UNUSED)
7318 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7319 tree arglist = TREE_OPERAND (exp, 1);
7320 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7321 struct builtin_description *d;
7326 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7327 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7329 int icode = (int) CODE_FOR_altivec_lvsr;
7330 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7331 enum machine_mode mode = insn_data[icode].operand[1].mode;
7335 if (!TARGET_ALTIVEC)
7338 arg = TREE_VALUE (arglist);
7339 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
7341 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7342 addr = memory_address (mode, op);
7343 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7347 /* For the load case need to negate the address. */
7348 op = gen_reg_rtx (GET_MODE (addr));
7349 emit_insn (gen_rtx_SET (VOIDmode, op,
7350 gen_rtx_NEG (GET_MODE (addr), addr)));
7352 op = gen_rtx_MEM (mode, op);
7355 || GET_MODE (target) != tmode
7356 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7357 target = gen_reg_rtx (tmode);
7359 /*pat = gen_altivec_lvsr (target, op);*/
7360 pat = GEN_FCN (icode) (target, op);
7370 ret = altivec_expand_builtin (exp, target, &success);
7377 ret = spe_expand_builtin (exp, target, &success);
7383 if (TARGET_ALTIVEC || TARGET_SPE)
7385 /* Handle simple unary operations. */
7386 d = (struct builtin_description *) bdesc_1arg;
7387 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7388 if (d->code == fcode)
7389 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7391 /* Handle simple binary operations. */
7392 d = (struct builtin_description *) bdesc_2arg;
7393 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7394 if (d->code == fcode)
7395 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7397 /* Handle simple ternary operations. */
7398 d = (struct builtin_description *) bdesc_3arg;
7399 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7400 if (d->code == fcode)
7401 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7409 build_opaque_vector_type (tree node, int nunits)
7411 node = copy_node (node);
7412 TYPE_MAIN_VARIANT (node) = node;
7413 return build_vector_type (node, nunits);
7417 rs6000_init_builtins (void)
7419 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7420 V2SF_type_node = build_vector_type (float_type_node, 2);
7421 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7422 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7423 V4SF_type_node = build_vector_type (float_type_node, 4);
7424 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7425 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7427 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7428 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7429 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7431 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7432 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7433 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7435 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7436 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7437 'vector unsigned short'. */
7439 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7440 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7441 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7442 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7444 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7445 get_identifier ("__bool char"),
7446 bool_char_type_node));
7447 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7448 get_identifier ("__bool short"),
7449 bool_short_type_node));
7450 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7451 get_identifier ("__bool int"),
7452 bool_int_type_node));
7453 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7454 get_identifier ("__pixel"),
7457 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7458 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7459 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7460 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7462 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7463 get_identifier ("__vector unsigned char"),
7464 unsigned_V16QI_type_node));
7465 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7466 get_identifier ("__vector signed char"),
7468 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7469 get_identifier ("__vector __bool char"),
7470 bool_V16QI_type_node));
7472 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7473 get_identifier ("__vector unsigned short"),
7474 unsigned_V8HI_type_node));
7475 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7476 get_identifier ("__vector signed short"),
7478 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7479 get_identifier ("__vector __bool short"),
7480 bool_V8HI_type_node));
7482 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7483 get_identifier ("__vector unsigned int"),
7484 unsigned_V4SI_type_node));
7485 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7486 get_identifier ("__vector signed int"),
7488 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7489 get_identifier ("__vector __bool int"),
7490 bool_V4SI_type_node));
7492 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7493 get_identifier ("__vector float"),
7495 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7496 get_identifier ("__vector __pixel"),
7497 pixel_V8HI_type_node));
7500 spe_init_builtins ();
7502 altivec_init_builtins ();
7503 if (TARGET_ALTIVEC || TARGET_SPE)
7504 rs6000_common_init_builtins ();
7507 /* Search through a set of builtins and enable the mask bits.
7508 DESC is an array of builtins.
7509 SIZE is the total number of builtins.
7510 START is the builtin enum at which to start.
7511 END is the builtin enum at which to end. */
7513 enable_mask_for_builtins (struct builtin_description *desc, int size,
7514 enum rs6000_builtins start,
7515 enum rs6000_builtins end)
7519 for (i = 0; i < size; ++i)
7520 if (desc[i].code == start)
7526 for (; i < size; ++i)
7528 /* Flip all the bits on. */
7529 desc[i].mask = target_flags;
7530 if (desc[i].code == end)
7536 spe_init_builtins (void)
7538 tree endlink = void_list_node;
7539 tree puint_type_node = build_pointer_type (unsigned_type_node);
7540 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7541 struct builtin_description *d;
7544 tree v2si_ftype_4_v2si
7545 = build_function_type
7546 (opaque_V2SI_type_node,
7547 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7548 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7549 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7550 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7553 tree v2sf_ftype_4_v2sf
7554 = build_function_type
7555 (opaque_V2SF_type_node,
7556 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7557 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7558 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7559 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7562 tree int_ftype_int_v2si_v2si
7563 = build_function_type
7565 tree_cons (NULL_TREE, integer_type_node,
7566 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7567 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7570 tree int_ftype_int_v2sf_v2sf
7571 = build_function_type
7573 tree_cons (NULL_TREE, integer_type_node,
7574 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7575 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7578 tree void_ftype_v2si_puint_int
7579 = build_function_type (void_type_node,
7580 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7581 tree_cons (NULL_TREE, puint_type_node,
7582 tree_cons (NULL_TREE,
7586 tree void_ftype_v2si_puint_char
7587 = build_function_type (void_type_node,
7588 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7589 tree_cons (NULL_TREE, puint_type_node,
7590 tree_cons (NULL_TREE,
7594 tree void_ftype_v2si_pv2si_int
7595 = build_function_type (void_type_node,
7596 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7597 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7598 tree_cons (NULL_TREE,
7602 tree void_ftype_v2si_pv2si_char
7603 = build_function_type (void_type_node,
7604 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7605 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7606 tree_cons (NULL_TREE,
7611 = build_function_type (void_type_node,
7612 tree_cons (NULL_TREE, integer_type_node, endlink));
7615 = build_function_type (integer_type_node, endlink);
7617 tree v2si_ftype_pv2si_int
7618 = build_function_type (opaque_V2SI_type_node,
7619 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7620 tree_cons (NULL_TREE, integer_type_node,
7623 tree v2si_ftype_puint_int
7624 = build_function_type (opaque_V2SI_type_node,
7625 tree_cons (NULL_TREE, puint_type_node,
7626 tree_cons (NULL_TREE, integer_type_node,
7629 tree v2si_ftype_pushort_int
7630 = build_function_type (opaque_V2SI_type_node,
7631 tree_cons (NULL_TREE, pushort_type_node,
7632 tree_cons (NULL_TREE, integer_type_node,
7635 tree v2si_ftype_signed_char
7636 = build_function_type (opaque_V2SI_type_node,
7637 tree_cons (NULL_TREE, signed_char_type_node,
7640 /* The initialization of the simple binary and unary builtins is
7641 done in rs6000_common_init_builtins, but we have to enable the
7642 mask bits here manually because we have run out of `target_flags'
7643 bits. We really need to redesign this mask business. */
7645 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7646 ARRAY_SIZE (bdesc_2arg),
7649 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7650 ARRAY_SIZE (bdesc_1arg),
7652 SPE_BUILTIN_EVSUBFUSIAAW);
7653 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7654 ARRAY_SIZE (bdesc_spe_predicates),
7655 SPE_BUILTIN_EVCMPEQ,
7656 SPE_BUILTIN_EVFSTSTLT);
7657 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7658 ARRAY_SIZE (bdesc_spe_evsel),
7659 SPE_BUILTIN_EVSEL_CMPGTS,
7660 SPE_BUILTIN_EVSEL_FSTSTEQ);
7662 (*lang_hooks.decls.pushdecl)
7663 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7664 opaque_V2SI_type_node));
7666 /* Initialize irregular SPE builtins. */
7668 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7669 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7670 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7671 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7672 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7673 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7674 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7675 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7676 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7677 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7678 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7679 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7680 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7681 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7682 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7683 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7684 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7685 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7688 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7689 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7690 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7691 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7692 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7693 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7694 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7695 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7696 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7697 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7698 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7699 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7700 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7701 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7702 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7703 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7704 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7705 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7706 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7707 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7708 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7709 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7712 d = (struct builtin_description *) bdesc_spe_predicates;
7713 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7717 switch (insn_data[d->icode].operand[1].mode)
7720 type = int_ftype_int_v2si_v2si;
7723 type = int_ftype_int_v2sf_v2sf;
7729 def_builtin (d->mask, d->name, type, d->code);
7732 /* Evsel predicates. */
7733 d = (struct builtin_description *) bdesc_spe_evsel;
7734 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7738 switch (insn_data[d->icode].operand[1].mode)
7741 type = v2si_ftype_4_v2si;
7744 type = v2sf_ftype_4_v2sf;
7750 def_builtin (d->mask, d->name, type, d->code);
7755 altivec_init_builtins (void)
7757 struct builtin_description *d;
7758 struct builtin_description_predicates *dp;
7760 tree pfloat_type_node = build_pointer_type (float_type_node);
7761 tree pint_type_node = build_pointer_type (integer_type_node);
7762 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7763 tree pchar_type_node = build_pointer_type (char_type_node);
7765 tree pvoid_type_node = build_pointer_type (void_type_node);
7767 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7768 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7769 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7770 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7772 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7774 tree int_ftype_int_v4si_v4si
7775 = build_function_type_list (integer_type_node,
7776 integer_type_node, V4SI_type_node,
7777 V4SI_type_node, NULL_TREE);
7778 tree v4sf_ftype_pcfloat
7779 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7780 tree void_ftype_pfloat_v4sf
7781 = build_function_type_list (void_type_node,
7782 pfloat_type_node, V4SF_type_node, NULL_TREE);
7783 tree v4si_ftype_pcint
7784 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7785 tree void_ftype_pint_v4si
7786 = build_function_type_list (void_type_node,
7787 pint_type_node, V4SI_type_node, NULL_TREE);
7788 tree v8hi_ftype_pcshort
7789 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7790 tree void_ftype_pshort_v8hi
7791 = build_function_type_list (void_type_node,
7792 pshort_type_node, V8HI_type_node, NULL_TREE);
7793 tree v16qi_ftype_pcchar
7794 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7795 tree void_ftype_pchar_v16qi
7796 = build_function_type_list (void_type_node,
7797 pchar_type_node, V16QI_type_node, NULL_TREE);
7798 tree void_ftype_v4si
7799 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7800 tree v8hi_ftype_void
7801 = build_function_type (V8HI_type_node, void_list_node);
7802 tree void_ftype_void
7803 = build_function_type (void_type_node, void_list_node);
7805 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7807 tree v16qi_ftype_long_pcvoid
7808 = build_function_type_list (V16QI_type_node,
7809 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7810 tree v8hi_ftype_long_pcvoid
7811 = build_function_type_list (V8HI_type_node,
7812 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7813 tree v4si_ftype_long_pcvoid
7814 = build_function_type_list (V4SI_type_node,
7815 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7817 tree void_ftype_v4si_long_pvoid
7818 = build_function_type_list (void_type_node,
7819 V4SI_type_node, long_integer_type_node,
7820 pvoid_type_node, NULL_TREE);
7821 tree void_ftype_v16qi_long_pvoid
7822 = build_function_type_list (void_type_node,
7823 V16QI_type_node, long_integer_type_node,
7824 pvoid_type_node, NULL_TREE);
7825 tree void_ftype_v8hi_long_pvoid
7826 = build_function_type_list (void_type_node,
7827 V8HI_type_node, long_integer_type_node,
7828 pvoid_type_node, NULL_TREE);
7829 tree int_ftype_int_v8hi_v8hi
7830 = build_function_type_list (integer_type_node,
7831 integer_type_node, V8HI_type_node,
7832 V8HI_type_node, NULL_TREE);
7833 tree int_ftype_int_v16qi_v16qi
7834 = build_function_type_list (integer_type_node,
7835 integer_type_node, V16QI_type_node,
7836 V16QI_type_node, NULL_TREE);
7837 tree int_ftype_int_v4sf_v4sf
7838 = build_function_type_list (integer_type_node,
7839 integer_type_node, V4SF_type_node,
7840 V4SF_type_node, NULL_TREE);
7841 tree v4si_ftype_v4si
7842 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7843 tree v8hi_ftype_v8hi
7844 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7845 tree v16qi_ftype_v16qi
7846 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7847 tree v4sf_ftype_v4sf
7848 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7849 tree void_ftype_pcvoid_int_int
7850 = build_function_type_list (void_type_node,
7851 pcvoid_type_node, integer_type_node,
7852 integer_type_node, NULL_TREE);
7853 tree int_ftype_pcchar
7854 = build_function_type_list (integer_type_node,
7855 pcchar_type_node, NULL_TREE);
7857 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7858 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7859 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7860 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7861 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7862 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7863 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7864 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7865 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7866 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7867 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7868 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7869 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7870 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7871 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7872 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7873 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7874 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7875 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7876 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7877 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7878 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7879 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7880 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7881 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7882 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7883 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7884 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7885 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7886 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7887 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7888 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7890 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7891 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7892 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7894 /* Add the DST variants. */
7895 d = (struct builtin_description *) bdesc_dst;
7896 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7897 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7899 /* Initialize the predicates. */
7900 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7901 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7903 enum machine_mode mode1;
7906 mode1 = insn_data[dp->icode].operand[1].mode;
7911 type = int_ftype_int_v4si_v4si;
7914 type = int_ftype_int_v8hi_v8hi;
7917 type = int_ftype_int_v16qi_v16qi;
7920 type = int_ftype_int_v4sf_v4sf;
7926 def_builtin (dp->mask, dp->name, type, dp->code);
7929 /* Initialize the abs* operators. */
7930 d = (struct builtin_description *) bdesc_abs;
7931 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7933 enum machine_mode mode0;
7936 mode0 = insn_data[d->icode].operand[0].mode;
7941 type = v4si_ftype_v4si;
7944 type = v8hi_ftype_v8hi;
7947 type = v16qi_ftype_v16qi;
7950 type = v4sf_ftype_v4sf;
7956 def_builtin (d->mask, d->name, type, d->code);
7963 /* Initialize target builtin that implements
7964 targetm.vectorize.builtin_mask_for_load. */
7966 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
7967 v16qi_ftype_long_pcvoid,
7968 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
7969 BUILT_IN_MD, NULL, NULL_TREE);
7970 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
7971 altivec_builtin_mask_for_load = decl;
7974 /* Initialize target builtin that implements
7975 targetm.vectorize.builtin_mask_for_store. */
7977 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_store",
7978 v16qi_ftype_long_pcvoid,
7979 ALTIVEC_BUILTIN_MASK_FOR_STORE,
7980 BUILT_IN_MD, NULL, NULL_TREE);
7981 /* Record the decl. Will be used by rs6000_builtin_mask_for_store. */
7982 altivec_builtin_mask_for_store = decl;
7987 rs6000_common_init_builtins (void)
7989 struct builtin_description *d;
7992 tree v4sf_ftype_v4sf_v4sf_v16qi
7993 = build_function_type_list (V4SF_type_node,
7994 V4SF_type_node, V4SF_type_node,
7995 V16QI_type_node, NULL_TREE);
7996 tree v4si_ftype_v4si_v4si_v16qi
7997 = build_function_type_list (V4SI_type_node,
7998 V4SI_type_node, V4SI_type_node,
7999 V16QI_type_node, NULL_TREE);
8000 tree v8hi_ftype_v8hi_v8hi_v16qi
8001 = build_function_type_list (V8HI_type_node,
8002 V8HI_type_node, V8HI_type_node,
8003 V16QI_type_node, NULL_TREE);
8004 tree v16qi_ftype_v16qi_v16qi_v16qi
8005 = build_function_type_list (V16QI_type_node,
8006 V16QI_type_node, V16QI_type_node,
8007 V16QI_type_node, NULL_TREE);
8009 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8011 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8012 tree v16qi_ftype_int
8013 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8014 tree v8hi_ftype_v16qi
8015 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8016 tree v4sf_ftype_v4sf
8017 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8019 tree v2si_ftype_v2si_v2si
8020 = build_function_type_list (opaque_V2SI_type_node,
8021 opaque_V2SI_type_node,
8022 opaque_V2SI_type_node, NULL_TREE);
8024 tree v2sf_ftype_v2sf_v2sf
8025 = build_function_type_list (opaque_V2SF_type_node,
8026 opaque_V2SF_type_node,
8027 opaque_V2SF_type_node, NULL_TREE);
8029 tree v2si_ftype_int_int
8030 = build_function_type_list (opaque_V2SI_type_node,
8031 integer_type_node, integer_type_node,
8034 tree v2si_ftype_v2si
8035 = build_function_type_list (opaque_V2SI_type_node,
8036 opaque_V2SI_type_node, NULL_TREE);
8038 tree v2sf_ftype_v2sf
8039 = build_function_type_list (opaque_V2SF_type_node,
8040 opaque_V2SF_type_node, NULL_TREE);
8042 tree v2sf_ftype_v2si
8043 = build_function_type_list (opaque_V2SF_type_node,
8044 opaque_V2SI_type_node, NULL_TREE);
8046 tree v2si_ftype_v2sf
8047 = build_function_type_list (opaque_V2SI_type_node,
8048 opaque_V2SF_type_node, NULL_TREE);
8050 tree v2si_ftype_v2si_char
8051 = build_function_type_list (opaque_V2SI_type_node,
8052 opaque_V2SI_type_node,
8053 char_type_node, NULL_TREE);
8055 tree v2si_ftype_int_char
8056 = build_function_type_list (opaque_V2SI_type_node,
8057 integer_type_node, char_type_node, NULL_TREE);
8059 tree v2si_ftype_char
8060 = build_function_type_list (opaque_V2SI_type_node,
8061 char_type_node, NULL_TREE);
8063 tree int_ftype_int_int
8064 = build_function_type_list (integer_type_node,
8065 integer_type_node, integer_type_node,
8068 tree v4si_ftype_v4si_v4si
8069 = build_function_type_list (V4SI_type_node,
8070 V4SI_type_node, V4SI_type_node, NULL_TREE);
8071 tree v4sf_ftype_v4si_int
8072 = build_function_type_list (V4SF_type_node,
8073 V4SI_type_node, integer_type_node, NULL_TREE);
8074 tree v4si_ftype_v4sf_int
8075 = build_function_type_list (V4SI_type_node,
8076 V4SF_type_node, integer_type_node, NULL_TREE);
8077 tree v4si_ftype_v4si_int
8078 = build_function_type_list (V4SI_type_node,
8079 V4SI_type_node, integer_type_node, NULL_TREE);
8080 tree v8hi_ftype_v8hi_int
8081 = build_function_type_list (V8HI_type_node,
8082 V8HI_type_node, integer_type_node, NULL_TREE);
8083 tree v16qi_ftype_v16qi_int
8084 = build_function_type_list (V16QI_type_node,
8085 V16QI_type_node, integer_type_node, NULL_TREE);
8086 tree v16qi_ftype_v16qi_v16qi_int
8087 = build_function_type_list (V16QI_type_node,
8088 V16QI_type_node, V16QI_type_node,
8089 integer_type_node, NULL_TREE);
8090 tree v8hi_ftype_v8hi_v8hi_int
8091 = build_function_type_list (V8HI_type_node,
8092 V8HI_type_node, V8HI_type_node,
8093 integer_type_node, NULL_TREE);
8094 tree v4si_ftype_v4si_v4si_int
8095 = build_function_type_list (V4SI_type_node,
8096 V4SI_type_node, V4SI_type_node,
8097 integer_type_node, NULL_TREE);
8098 tree v4sf_ftype_v4sf_v4sf_int
8099 = build_function_type_list (V4SF_type_node,
8100 V4SF_type_node, V4SF_type_node,
8101 integer_type_node, NULL_TREE);
8102 tree v4sf_ftype_v4sf_v4sf
8103 = build_function_type_list (V4SF_type_node,
8104 V4SF_type_node, V4SF_type_node, NULL_TREE);
8105 tree v4sf_ftype_v4sf_v4sf_v4si
8106 = build_function_type_list (V4SF_type_node,
8107 V4SF_type_node, V4SF_type_node,
8108 V4SI_type_node, NULL_TREE);
8109 tree v4sf_ftype_v4sf_v4sf_v4sf
8110 = build_function_type_list (V4SF_type_node,
8111 V4SF_type_node, V4SF_type_node,
8112 V4SF_type_node, NULL_TREE);
8113 tree v4si_ftype_v4si_v4si_v4si
8114 = build_function_type_list (V4SI_type_node,
8115 V4SI_type_node, V4SI_type_node,
8116 V4SI_type_node, NULL_TREE);
8117 tree v8hi_ftype_v8hi_v8hi
8118 = build_function_type_list (V8HI_type_node,
8119 V8HI_type_node, V8HI_type_node, NULL_TREE);
8120 tree v8hi_ftype_v8hi_v8hi_v8hi
8121 = build_function_type_list (V8HI_type_node,
8122 V8HI_type_node, V8HI_type_node,
8123 V8HI_type_node, NULL_TREE);
8124 tree v4si_ftype_v8hi_v8hi_v4si
8125 = build_function_type_list (V4SI_type_node,
8126 V8HI_type_node, V8HI_type_node,
8127 V4SI_type_node, NULL_TREE);
8128 tree v4si_ftype_v16qi_v16qi_v4si
8129 = build_function_type_list (V4SI_type_node,
8130 V16QI_type_node, V16QI_type_node,
8131 V4SI_type_node, NULL_TREE);
8132 tree v16qi_ftype_v16qi_v16qi
8133 = build_function_type_list (V16QI_type_node,
8134 V16QI_type_node, V16QI_type_node, NULL_TREE);
8135 tree v4si_ftype_v4sf_v4sf
8136 = build_function_type_list (V4SI_type_node,
8137 V4SF_type_node, V4SF_type_node, NULL_TREE);
8138 tree v8hi_ftype_v16qi_v16qi
8139 = build_function_type_list (V8HI_type_node,
8140 V16QI_type_node, V16QI_type_node, NULL_TREE);
8141 tree v4si_ftype_v8hi_v8hi
8142 = build_function_type_list (V4SI_type_node,
8143 V8HI_type_node, V8HI_type_node, NULL_TREE);
8144 tree v8hi_ftype_v4si_v4si
8145 = build_function_type_list (V8HI_type_node,
8146 V4SI_type_node, V4SI_type_node, NULL_TREE);
8147 tree v16qi_ftype_v8hi_v8hi
8148 = build_function_type_list (V16QI_type_node,
8149 V8HI_type_node, V8HI_type_node, NULL_TREE);
8150 tree v4si_ftype_v16qi_v4si
8151 = build_function_type_list (V4SI_type_node,
8152 V16QI_type_node, V4SI_type_node, NULL_TREE);
8153 tree v4si_ftype_v16qi_v16qi
8154 = build_function_type_list (V4SI_type_node,
8155 V16QI_type_node, V16QI_type_node, NULL_TREE);
8156 tree v4si_ftype_v8hi_v4si
8157 = build_function_type_list (V4SI_type_node,
8158 V8HI_type_node, V4SI_type_node, NULL_TREE);
8159 tree v4si_ftype_v8hi
8160 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8161 tree int_ftype_v4si_v4si
8162 = build_function_type_list (integer_type_node,
8163 V4SI_type_node, V4SI_type_node, NULL_TREE);
8164 tree int_ftype_v4sf_v4sf
8165 = build_function_type_list (integer_type_node,
8166 V4SF_type_node, V4SF_type_node, NULL_TREE);
8167 tree int_ftype_v16qi_v16qi
8168 = build_function_type_list (integer_type_node,
8169 V16QI_type_node, V16QI_type_node, NULL_TREE);
8170 tree int_ftype_v8hi_v8hi
8171 = build_function_type_list (integer_type_node,
8172 V8HI_type_node, V8HI_type_node, NULL_TREE);
8174 /* Add the simple ternary operators. */
8175 d = (struct builtin_description *) bdesc_3arg;
8176 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8179 enum machine_mode mode0, mode1, mode2, mode3;
8182 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8185 mode0 = insn_data[d->icode].operand[0].mode;
8186 mode1 = insn_data[d->icode].operand[1].mode;
8187 mode2 = insn_data[d->icode].operand[2].mode;
8188 mode3 = insn_data[d->icode].operand[3].mode;
8190 /* When all four are of the same mode. */
8191 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8196 type = v4si_ftype_v4si_v4si_v4si;
8199 type = v4sf_ftype_v4sf_v4sf_v4sf;
8202 type = v8hi_ftype_v8hi_v8hi_v8hi;
8205 type = v16qi_ftype_v16qi_v16qi_v16qi;
8211 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8216 type = v4si_ftype_v4si_v4si_v16qi;
8219 type = v4sf_ftype_v4sf_v4sf_v16qi;
8222 type = v8hi_ftype_v8hi_v8hi_v16qi;
8225 type = v16qi_ftype_v16qi_v16qi_v16qi;
8231 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8232 && mode3 == V4SImode)
8233 type = v4si_ftype_v16qi_v16qi_v4si;
8234 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8235 && mode3 == V4SImode)
8236 type = v4si_ftype_v8hi_v8hi_v4si;
8237 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8238 && mode3 == V4SImode)
8239 type = v4sf_ftype_v4sf_v4sf_v4si;
8241 /* vchar, vchar, vchar, 4 bit literal. */
8242 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8244 type = v16qi_ftype_v16qi_v16qi_int;
8246 /* vshort, vshort, vshort, 4 bit literal. */
8247 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8249 type = v8hi_ftype_v8hi_v8hi_int;
8251 /* vint, vint, vint, 4 bit literal. */
8252 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8254 type = v4si_ftype_v4si_v4si_int;
8256 /* vfloat, vfloat, vfloat, 4 bit literal. */
8257 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8259 type = v4sf_ftype_v4sf_v4sf_int;
8264 def_builtin (d->mask, d->name, type, d->code);
8267 /* Add the simple binary operators. */
8268 d = (struct builtin_description *) bdesc_2arg;
8269 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8271 enum machine_mode mode0, mode1, mode2;
8274 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8277 mode0 = insn_data[d->icode].operand[0].mode;
8278 mode1 = insn_data[d->icode].operand[1].mode;
8279 mode2 = insn_data[d->icode].operand[2].mode;
8281 /* When all three operands are of the same mode. */
8282 if (mode0 == mode1 && mode1 == mode2)
8287 type = v4sf_ftype_v4sf_v4sf;
8290 type = v4si_ftype_v4si_v4si;
8293 type = v16qi_ftype_v16qi_v16qi;
8296 type = v8hi_ftype_v8hi_v8hi;
8299 type = v2si_ftype_v2si_v2si;
8302 type = v2sf_ftype_v2sf_v2sf;
8305 type = int_ftype_int_int;
8312 /* A few other combos we really don't want to do manually. */
8314 /* vint, vfloat, vfloat. */
8315 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8316 type = v4si_ftype_v4sf_v4sf;
8318 /* vshort, vchar, vchar. */
8319 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8320 type = v8hi_ftype_v16qi_v16qi;
8322 /* vint, vshort, vshort. */
8323 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8324 type = v4si_ftype_v8hi_v8hi;
8326 /* vshort, vint, vint. */
8327 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8328 type = v8hi_ftype_v4si_v4si;
8330 /* vchar, vshort, vshort. */
8331 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8332 type = v16qi_ftype_v8hi_v8hi;
8334 /* vint, vchar, vint. */
8335 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8336 type = v4si_ftype_v16qi_v4si;
8338 /* vint, vchar, vchar. */
8339 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8340 type = v4si_ftype_v16qi_v16qi;
8342 /* vint, vshort, vint. */
8343 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8344 type = v4si_ftype_v8hi_v4si;
8346 /* vint, vint, 5 bit literal. */
8347 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8348 type = v4si_ftype_v4si_int;
8350 /* vshort, vshort, 5 bit literal. */
8351 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8352 type = v8hi_ftype_v8hi_int;
8354 /* vchar, vchar, 5 bit literal. */
8355 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8356 type = v16qi_ftype_v16qi_int;
8358 /* vfloat, vint, 5 bit literal. */
8359 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8360 type = v4sf_ftype_v4si_int;
8362 /* vint, vfloat, 5 bit literal. */
8363 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8364 type = v4si_ftype_v4sf_int;
8366 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8367 type = v2si_ftype_int_int;
8369 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8370 type = v2si_ftype_v2si_char;
8372 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8373 type = v2si_ftype_int_char;
8376 else if (mode0 == SImode)
8381 type = int_ftype_v4si_v4si;
8384 type = int_ftype_v4sf_v4sf;
8387 type = int_ftype_v16qi_v16qi;
8390 type = int_ftype_v8hi_v8hi;
8400 def_builtin (d->mask, d->name, type, d->code);
8403 /* Add the simple unary operators. */
8404 d = (struct builtin_description *) bdesc_1arg;
8405 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8407 enum machine_mode mode0, mode1;
8410 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8413 mode0 = insn_data[d->icode].operand[0].mode;
8414 mode1 = insn_data[d->icode].operand[1].mode;
8416 if (mode0 == V4SImode && mode1 == QImode)
8417 type = v4si_ftype_int;
8418 else if (mode0 == V8HImode && mode1 == QImode)
8419 type = v8hi_ftype_int;
8420 else if (mode0 == V16QImode && mode1 == QImode)
8421 type = v16qi_ftype_int;
8422 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8423 type = v4sf_ftype_v4sf;
8424 else if (mode0 == V8HImode && mode1 == V16QImode)
8425 type = v8hi_ftype_v16qi;
8426 else if (mode0 == V4SImode && mode1 == V8HImode)
8427 type = v4si_ftype_v8hi;
8428 else if (mode0 == V2SImode && mode1 == V2SImode)
8429 type = v2si_ftype_v2si;
8430 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8431 type = v2sf_ftype_v2sf;
8432 else if (mode0 == V2SFmode && mode1 == V2SImode)
8433 type = v2sf_ftype_v2si;
8434 else if (mode0 == V2SImode && mode1 == V2SFmode)
8435 type = v2si_ftype_v2sf;
8436 else if (mode0 == V2SImode && mode1 == QImode)
8437 type = v2si_ftype_char;
8441 def_builtin (d->mask, d->name, type, d->code);
8446 rs6000_init_libfuncs (void)
8448 if (!TARGET_HARD_FLOAT)
8451 if (DEFAULT_ABI != ABI_V4)
8453 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8455 /* AIX library routines for float->int conversion. */
8456 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8457 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8458 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8459 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8462 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8463 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8464 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8465 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8466 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8470 /* 32-bit SVR4 quad floating point routines. */
8472 set_optab_libfunc (add_optab, TFmode, "_q_add");
8473 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8474 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8475 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8476 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8477 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8478 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8480 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8481 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8482 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8483 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8484 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8485 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8487 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8488 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8489 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8490 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8491 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8492 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8493 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8498 /* Expand a block clear operation, and return 1 if successful. Return 0
8499 if we should let the compiler generate normal code.
8501 operands[0] is the destination
8502 operands[1] is the length
8503 operands[2] is the alignment */
8506 expand_block_clear (rtx operands[])
8508 rtx orig_dest = operands[0];
8509 rtx bytes_rtx = operands[1];
8510 rtx align_rtx = operands[2];
8511 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
8512 HOST_WIDE_INT align;
8513 HOST_WIDE_INT bytes;
8518 /* If this is not a fixed size move, just call memcpy */
8522 /* If this is not a fixed size alignment, abort */
8523 if (GET_CODE (align_rtx) != CONST_INT)
8525 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8527 /* Anything to clear? */
8528 bytes = INTVAL (bytes_rtx);
8532 /* Use the builtin memset after a point, to avoid huge code bloat.
8533 When optimize_size, avoid any significant code bloat; calling
8534 memset is about 4 instructions, so allow for one instruction to
8535 load zero and three to do clearing. */
8536 if (TARGET_ALTIVEC && align >= 128)
8538 else if (TARGET_POWERPC64 && align >= 32)
8543 if (optimize_size && bytes > 3 * clear_step)
8545 if (! optimize_size && bytes > 8 * clear_step)
8548 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8550 enum machine_mode mode = BLKmode;
8553 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
8558 else if (bytes >= 8 && TARGET_POWERPC64
8559 /* 64-bit loads and stores require word-aligned
8561 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8566 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8567 { /* move 4 bytes */
8571 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8572 { /* move 2 bytes */
8576 else /* move 1 byte at a time */
8582 dest = adjust_address (orig_dest, mode, offset);
8584 emit_move_insn (dest, CONST0_RTX (mode));
8591 /* Expand a block move operation, and return 1 if successful. Return 0
8592 if we should let the compiler generate normal code.
8594 operands[0] is the destination
8595 operands[1] is the source
8596 operands[2] is the length
8597 operands[3] is the alignment */
8599 #define MAX_MOVE_REG 4
8602 expand_block_move (rtx operands[])
8604 rtx orig_dest = operands[0];
8605 rtx orig_src = operands[1];
8606 rtx bytes_rtx = operands[2];
8607 rtx align_rtx = operands[3];
8608 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8613 rtx stores[MAX_MOVE_REG];
8616 /* If this is not a fixed size move, just call memcpy */
8620 /* If this is not a fixed size alignment, abort */
8621 if (GET_CODE (align_rtx) != CONST_INT)
8623 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8625 /* Anything to move? */
8626 bytes = INTVAL (bytes_rtx);
8630 /* store_one_arg depends on expand_block_move to handle at least the size of
8631 reg_parm_stack_space. */
8632 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8635 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8638 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8639 rtx (*mov) (rtx, rtx);
8641 enum machine_mode mode = BLKmode;
8644 /* Altivec first, since it will be faster than a string move
8645 when it applies, and usually not significantly larger. */
8646 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
8650 gen_func.mov = gen_movv4si;
8652 else if (TARGET_STRING
8653 && bytes > 24 /* move up to 32 bytes at a time */
8661 && ! fixed_regs[12])
8663 move_bytes = (bytes > 32) ? 32 : bytes;
8664 gen_func.movmemsi = gen_movmemsi_8reg;
8666 else if (TARGET_STRING
8667 && bytes > 16 /* move up to 24 bytes at a time */
8673 && ! fixed_regs[10])
8675 move_bytes = (bytes > 24) ? 24 : bytes;
8676 gen_func.movmemsi = gen_movmemsi_6reg;
8678 else if (TARGET_STRING
8679 && bytes > 8 /* move up to 16 bytes at a time */
8685 move_bytes = (bytes > 16) ? 16 : bytes;
8686 gen_func.movmemsi = gen_movmemsi_4reg;
8688 else if (bytes >= 8 && TARGET_POWERPC64
8689 /* 64-bit loads and stores require word-aligned
8691 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8695 gen_func.mov = gen_movdi;
8697 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8698 { /* move up to 8 bytes at a time */
8699 move_bytes = (bytes > 8) ? 8 : bytes;
8700 gen_func.movmemsi = gen_movmemsi_2reg;
8702 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
8703 { /* move 4 bytes */
8706 gen_func.mov = gen_movsi;
8708 else if (bytes == 2 && (align >= 16 || !STRICT_ALIGNMENT))
8709 { /* move 2 bytes */
8712 gen_func.mov = gen_movhi;
8714 else if (TARGET_STRING && bytes > 1)
8715 { /* move up to 4 bytes at a time */
8716 move_bytes = (bytes > 4) ? 4 : bytes;
8717 gen_func.movmemsi = gen_movmemsi_1reg;
8719 else /* move 1 byte at a time */
8723 gen_func.mov = gen_movqi;
8726 src = adjust_address (orig_src, mode, offset);
8727 dest = adjust_address (orig_dest, mode, offset);
8729 if (mode != BLKmode)
8731 rtx tmp_reg = gen_reg_rtx (mode);
8733 emit_insn ((*gen_func.mov) (tmp_reg, src));
8734 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8737 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8740 for (i = 0; i < num_reg; i++)
8741 emit_insn (stores[i]);
8745 if (mode == BLKmode)
8747 /* Move the address into scratch registers. The movmemsi
8748 patterns require zero offset. */
8749 if (!REG_P (XEXP (src, 0)))
8751 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8752 src = replace_equiv_address (src, src_reg);
8754 set_mem_size (src, GEN_INT (move_bytes));
8756 if (!REG_P (XEXP (dest, 0)))
8758 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8759 dest = replace_equiv_address (dest, dest_reg);
8761 set_mem_size (dest, GEN_INT (move_bytes));
8763 emit_insn ((*gen_func.movmemsi) (dest, src,
8764 GEN_INT (move_bytes & 31),
8773 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8774 known to be a PARALLEL. */
8776 save_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8781 int count = XVECLEN (op, 0);
8787 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8788 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8791 for (i=1; i <= 18; i++)
8793 elt = XVECEXP (op, 0, index++);
8794 if (GET_CODE (elt) != SET
8795 || GET_CODE (SET_DEST (elt)) != MEM
8796 || ! memory_operand (SET_DEST (elt), DFmode)
8797 || GET_CODE (SET_SRC (elt)) != REG
8798 || GET_MODE (SET_SRC (elt)) != DFmode)
8802 for (i=1; i <= 12; i++)
8804 elt = XVECEXP (op, 0, index++);
8805 if (GET_CODE (elt) != SET
8806 || GET_CODE (SET_DEST (elt)) != MEM
8807 || GET_CODE (SET_SRC (elt)) != REG
8808 || GET_MODE (SET_SRC (elt)) != V4SImode)
8812 for (i=1; i <= 19; i++)
8814 elt = XVECEXP (op, 0, index++);
8815 if (GET_CODE (elt) != SET
8816 || GET_CODE (SET_DEST (elt)) != MEM
8817 || ! memory_operand (SET_DEST (elt), Pmode)
8818 || GET_CODE (SET_SRC (elt)) != REG
8819 || GET_MODE (SET_SRC (elt)) != Pmode)
8823 elt = XVECEXP (op, 0, index++);
8824 if (GET_CODE (elt) != SET
8825 || GET_CODE (SET_DEST (elt)) != MEM
8826 || ! memory_operand (SET_DEST (elt), Pmode)
8827 || GET_CODE (SET_SRC (elt)) != REG
8828 || REGNO (SET_SRC (elt)) != CR2_REGNO
8829 || GET_MODE (SET_SRC (elt)) != Pmode)
8832 if (GET_CODE (XVECEXP (op, 0, index++)) != USE
8833 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8834 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8839 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8840 known to be a PARALLEL. */
8842 restore_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8847 int count = XVECLEN (op, 0);
8853 if (GET_CODE (XVECEXP (op, 0, index++)) != RETURN
8854 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8855 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8856 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8859 elt = XVECEXP (op, 0, index++);
8860 if (GET_CODE (elt) != SET
8861 || GET_CODE (SET_SRC (elt)) != MEM
8862 || ! memory_operand (SET_SRC (elt), Pmode)
8863 || GET_CODE (SET_DEST (elt)) != REG
8864 || REGNO (SET_DEST (elt)) != CR2_REGNO
8865 || GET_MODE (SET_DEST (elt)) != Pmode)
8868 for (i=1; i <= 19; i++)
8870 elt = XVECEXP (op, 0, index++);
8871 if (GET_CODE (elt) != SET
8872 || GET_CODE (SET_SRC (elt)) != MEM
8873 || ! memory_operand (SET_SRC (elt), Pmode)
8874 || GET_CODE (SET_DEST (elt)) != REG
8875 || GET_MODE (SET_DEST (elt)) != Pmode)
8879 for (i=1; i <= 12; i++)
8881 elt = XVECEXP (op, 0, index++);
8882 if (GET_CODE (elt) != SET
8883 || GET_CODE (SET_SRC (elt)) != MEM
8884 || GET_CODE (SET_DEST (elt)) != REG
8885 || GET_MODE (SET_DEST (elt)) != V4SImode)
8889 for (i=1; i <= 18; i++)
8891 elt = XVECEXP (op, 0, index++);
8892 if (GET_CODE (elt) != SET
8893 || GET_CODE (SET_SRC (elt)) != MEM
8894 || ! memory_operand (SET_SRC (elt), DFmode)
8895 || GET_CODE (SET_DEST (elt)) != REG
8896 || GET_MODE (SET_DEST (elt)) != DFmode)
8900 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8901 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8902 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8903 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8904 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8910 /* Return 1 if OP is a load multiple operation. It is known to be a
8911 PARALLEL and the first section will be tested. */
8914 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8916 int count = XVECLEN (op, 0);
8917 unsigned int dest_regno;
8921 /* Perform a quick check so we don't blow up below. */
8923 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8924 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8925 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8928 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8929 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8931 for (i = 1; i < count; i++)
8933 rtx elt = XVECEXP (op, 0, i);
8935 if (GET_CODE (elt) != SET
8936 || GET_CODE (SET_DEST (elt)) != REG
8937 || GET_MODE (SET_DEST (elt)) != SImode
8938 || REGNO (SET_DEST (elt)) != dest_regno + i
8939 || GET_CODE (SET_SRC (elt)) != MEM
8940 || GET_MODE (SET_SRC (elt)) != SImode
8941 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8942 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8943 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8944 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8951 /* Similar, but tests for store multiple. Here, the second vector element
8952 is a CLOBBER. It will be tested later. */
8955 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8957 int count = XVECLEN (op, 0) - 1;
8958 unsigned int src_regno;
8962 /* Perform a quick check so we don't blow up below. */
8964 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8965 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8966 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8969 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8970 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8972 for (i = 1; i < count; i++)
8974 rtx elt = XVECEXP (op, 0, i + 1);
8976 if (GET_CODE (elt) != SET
8977 || GET_CODE (SET_SRC (elt)) != REG
8978 || GET_MODE (SET_SRC (elt)) != SImode
8979 || REGNO (SET_SRC (elt)) != src_regno + i
8980 || GET_CODE (SET_DEST (elt)) != MEM
8981 || GET_MODE (SET_DEST (elt)) != SImode
8982 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8983 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8984 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8985 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8992 /* Return a string to perform a load_multiple operation.
8993 operands[0] is the vector.
8994 operands[1] is the source address.
8995 operands[2] is the first destination register. */
8998 rs6000_output_load_multiple (rtx operands[3])
9000 /* We have to handle the case where the pseudo used to contain the address
9001 is assigned to one of the output registers. */
9003 int words = XVECLEN (operands[0], 0);
9006 if (XVECLEN (operands[0], 0) == 1)
9007 return "{l|lwz} %2,0(%1)";
9009 for (i = 0; i < words; i++)
9010 if (refers_to_regno_p (REGNO (operands[2]) + i,
9011 REGNO (operands[2]) + i + 1, operands[1], 0))
9015 xop[0] = GEN_INT (4 * (words-1));
9016 xop[1] = operands[1];
9017 xop[2] = operands[2];
9018 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9023 xop[0] = GEN_INT (4 * (words-1));
9024 xop[1] = operands[1];
9025 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9026 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9031 for (j = 0; j < words; j++)
9034 xop[0] = GEN_INT (j * 4);
9035 xop[1] = operands[1];
9036 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9037 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9039 xop[0] = GEN_INT (i * 4);
9040 xop[1] = operands[1];
9041 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9046 return "{lsi|lswi} %2,%1,%N0";
9049 /* Return 1 for a parallel vrsave operation. */
9052 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9054 int count = XVECLEN (op, 0);
9055 unsigned int dest_regno, src_regno;
9059 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9060 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
9061 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
9064 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
9065 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
9067 if (dest_regno != VRSAVE_REGNO
9068 && src_regno != VRSAVE_REGNO)
9071 for (i = 1; i < count; i++)
9073 rtx elt = XVECEXP (op, 0, i);
9075 if (GET_CODE (elt) != CLOBBER
9076 && GET_CODE (elt) != SET)
9083 /* Return 1 for an PARALLEL suitable for mfcr. */
9086 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9088 int count = XVECLEN (op, 0);
9091 /* Perform a quick check so we don't blow up below. */
9093 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9094 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
9095 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
9098 for (i = 0; i < count; i++)
9100 rtx exp = XVECEXP (op, 0, i);
9105 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
9107 if (GET_CODE (src_reg) != REG
9108 || GET_MODE (src_reg) != CCmode
9109 || ! CR_REGNO_P (REGNO (src_reg)))
9112 if (GET_CODE (exp) != SET
9113 || GET_CODE (SET_DEST (exp)) != REG
9114 || GET_MODE (SET_DEST (exp)) != SImode
9115 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
9117 unspec = SET_SRC (exp);
9118 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
9120 if (GET_CODE (unspec) != UNSPEC
9121 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
9122 || XVECLEN (unspec, 0) != 2
9123 || XVECEXP (unspec, 0, 0) != src_reg
9124 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
9125 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
9131 /* Return 1 for an PARALLEL suitable for mtcrf. */
9134 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9136 int count = XVECLEN (op, 0);
9140 /* Perform a quick check so we don't blow up below. */
9142 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9143 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
9144 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
9146 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
9148 if (GET_CODE (src_reg) != REG
9149 || GET_MODE (src_reg) != SImode
9150 || ! INT_REGNO_P (REGNO (src_reg)))
9153 for (i = 0; i < count; i++)
9155 rtx exp = XVECEXP (op, 0, i);
9159 if (GET_CODE (exp) != SET
9160 || GET_CODE (SET_DEST (exp)) != REG
9161 || GET_MODE (SET_DEST (exp)) != CCmode
9162 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
9164 unspec = SET_SRC (exp);
9165 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
9167 if (GET_CODE (unspec) != UNSPEC
9168 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
9169 || XVECLEN (unspec, 0) != 2
9170 || XVECEXP (unspec, 0, 0) != src_reg
9171 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
9172 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
9178 /* Return 1 for an PARALLEL suitable for lmw. */
9181 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9183 int count = XVECLEN (op, 0);
9184 unsigned int dest_regno;
9186 unsigned int base_regno;
9187 HOST_WIDE_INT offset;
9190 /* Perform a quick check so we don't blow up below. */
9192 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9193 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
9194 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
9197 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
9198 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
9201 || count != 32 - (int) dest_regno)
9204 if (legitimate_indirect_address_p (src_addr, 0))
9207 base_regno = REGNO (src_addr);
9208 if (base_regno == 0)
9211 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, 0))
9213 offset = INTVAL (XEXP (src_addr, 1));
9214 base_regno = REGNO (XEXP (src_addr, 0));
9219 for (i = 0; i < count; i++)
9221 rtx elt = XVECEXP (op, 0, i);
9224 HOST_WIDE_INT newoffset;
9226 if (GET_CODE (elt) != SET
9227 || GET_CODE (SET_DEST (elt)) != REG
9228 || GET_MODE (SET_DEST (elt)) != SImode
9229 || REGNO (SET_DEST (elt)) != dest_regno + i
9230 || GET_CODE (SET_SRC (elt)) != MEM
9231 || GET_MODE (SET_SRC (elt)) != SImode)
9233 newaddr = XEXP (SET_SRC (elt), 0);
9234 if (legitimate_indirect_address_p (newaddr, 0))
9239 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9241 addr_reg = XEXP (newaddr, 0);
9242 newoffset = INTVAL (XEXP (newaddr, 1));
9246 if (REGNO (addr_reg) != base_regno
9247 || newoffset != offset + 4 * i)
9254 /* Return 1 for an PARALLEL suitable for stmw. */
9257 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9259 int count = XVECLEN (op, 0);
9260 unsigned int src_regno;
9262 unsigned int base_regno;
9263 HOST_WIDE_INT offset;
9266 /* Perform a quick check so we don't blow up below. */
9268 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9269 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
9270 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
9273 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
9274 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
9277 || count != 32 - (int) src_regno)
9280 if (legitimate_indirect_address_p (dest_addr, 0))
9283 base_regno = REGNO (dest_addr);
9284 if (base_regno == 0)
9287 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, 0))
9289 offset = INTVAL (XEXP (dest_addr, 1));
9290 base_regno = REGNO (XEXP (dest_addr, 0));
9295 for (i = 0; i < count; i++)
9297 rtx elt = XVECEXP (op, 0, i);
9300 HOST_WIDE_INT newoffset;
9302 if (GET_CODE (elt) != SET
9303 || GET_CODE (SET_SRC (elt)) != REG
9304 || GET_MODE (SET_SRC (elt)) != SImode
9305 || REGNO (SET_SRC (elt)) != src_regno + i
9306 || GET_CODE (SET_DEST (elt)) != MEM
9307 || GET_MODE (SET_DEST (elt)) != SImode)
9309 newaddr = XEXP (SET_DEST (elt), 0);
9310 if (legitimate_indirect_address_p (newaddr, 0))
9315 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9317 addr_reg = XEXP (newaddr, 0);
9318 newoffset = INTVAL (XEXP (newaddr, 1));
9322 if (REGNO (addr_reg) != base_regno
9323 || newoffset != offset + 4 * i)
9330 /* A validation routine: say whether CODE, a condition code, and MODE
9331 match. The other alternatives either don't make sense or should
9332 never be generated. */
9335 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9337 if ((GET_RTX_CLASS (code) != RTX_COMPARE
9338 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
9339 || GET_MODE_CLASS (mode) != MODE_CC)
9342 /* These don't make sense. */
9343 if ((code == GT || code == LT || code == GE || code == LE)
9344 && mode == CCUNSmode)
9347 if ((code == GTU || code == LTU || code == GEU || code == LEU)
9348 && mode != CCUNSmode)
9351 if (mode != CCFPmode
9352 && (code == ORDERED || code == UNORDERED
9353 || code == UNEQ || code == LTGT
9354 || code == UNGT || code == UNLT
9355 || code == UNGE || code == UNLE))
9358 /* These should never be generated except for
9359 flag_finite_math_only. */
9360 if (mode == CCFPmode
9361 && ! flag_finite_math_only
9362 && (code == LE || code == GE
9363 || code == UNEQ || code == LTGT
9364 || code == UNGT || code == UNLT))
9367 /* These are invalid; the information is not there. */
9368 if (mode == CCEQmode
9369 && code != EQ && code != NE)
9373 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
9374 We only check the opcode against the mode of the CC value here. */
9377 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9379 enum rtx_code code = GET_CODE (op);
9380 enum machine_mode cc_mode;
9382 if (!COMPARISON_P (op))
9385 cc_mode = GET_MODE (XEXP (op, 0));
9386 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
9389 validate_condition_mode (code, cc_mode);
9394 /* Return 1 if OP is a comparison operation that is valid for a branch
9395 insn and which is true if the corresponding bit in the CC register
9399 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
9403 if (! branch_comparison_operator (op, mode))
9406 code = GET_CODE (op);
9407 return (code == EQ || code == LT || code == GT
9408 || code == LTU || code == GTU
9409 || code == UNORDERED);
9412 /* Return 1 if OP is a comparison operation that is valid for an scc
9413 insn: it must be a positive comparison. */
9416 scc_comparison_operator (rtx op, enum machine_mode mode)
9418 return branch_positive_comparison_operator (op, mode);
9422 trap_comparison_operator (rtx op, enum machine_mode mode)
9424 if (mode != VOIDmode && mode != GET_MODE (op))
9426 return COMPARISON_P (op);
9430 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9432 enum rtx_code code = GET_CODE (op);
9433 return (code == AND || code == IOR || code == XOR);
9437 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9439 enum rtx_code code = GET_CODE (op);
9440 return (code == IOR || code == XOR);
9444 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9446 enum rtx_code code = GET_CODE (op);
9447 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
9450 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9451 mask required to convert the result of a rotate insn into a shift
9452 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9455 includes_lshift_p (rtx shiftop, rtx andop)
9457 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9459 shift_mask <<= INTVAL (shiftop);
9461 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9464 /* Similar, but for right shift. */
9467 includes_rshift_p (rtx shiftop, rtx andop)
9469 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9471 shift_mask >>= INTVAL (shiftop);
9473 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9476 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9477 to perform a left shift. It must have exactly SHIFTOP least
9478 significant 0's, then one or more 1's, then zero or more 0's. */
9481 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9483 if (GET_CODE (andop) == CONST_INT)
9485 HOST_WIDE_INT c, lsb, shift_mask;
9488 if (c == 0 || c == ~0)
9492 shift_mask <<= INTVAL (shiftop);
9494 /* Find the least significant one bit. */
9497 /* It must coincide with the LSB of the shift mask. */
9498 if (-lsb != shift_mask)
9501 /* Invert to look for the next transition (if any). */
9504 /* Remove the low group of ones (originally low group of zeros). */
9507 /* Again find the lsb, and check we have all 1's above. */
9511 else if (GET_CODE (andop) == CONST_DOUBLE
9512 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9514 HOST_WIDE_INT low, high, lsb;
9515 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9517 low = CONST_DOUBLE_LOW (andop);
9518 if (HOST_BITS_PER_WIDE_INT < 64)
9519 high = CONST_DOUBLE_HIGH (andop);
9521 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9522 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9525 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9527 shift_mask_high = ~0;
9528 if (INTVAL (shiftop) > 32)
9529 shift_mask_high <<= INTVAL (shiftop) - 32;
9533 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9540 return high == -lsb;
9543 shift_mask_low = ~0;
9544 shift_mask_low <<= INTVAL (shiftop);
9548 if (-lsb != shift_mask_low)
9551 if (HOST_BITS_PER_WIDE_INT < 64)
9556 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9559 return high == -lsb;
9563 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9569 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9570 to perform a left shift. It must have SHIFTOP or more least
9571 significant 0's, with the remainder of the word 1's. */
9574 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9576 if (GET_CODE (andop) == CONST_INT)
9578 HOST_WIDE_INT c, lsb, shift_mask;
9581 shift_mask <<= INTVAL (shiftop);
9584 /* Find the least significant one bit. */
9587 /* It must be covered by the shift mask.
9588 This test also rejects c == 0. */
9589 if ((lsb & shift_mask) == 0)
9592 /* Check we have all 1's above the transition, and reject all 1's. */
9593 return c == -lsb && lsb != 1;
9595 else if (GET_CODE (andop) == CONST_DOUBLE
9596 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9598 HOST_WIDE_INT low, lsb, shift_mask_low;
9600 low = CONST_DOUBLE_LOW (andop);
9602 if (HOST_BITS_PER_WIDE_INT < 64)
9604 HOST_WIDE_INT high, shift_mask_high;
9606 high = CONST_DOUBLE_HIGH (andop);
9610 shift_mask_high = ~0;
9611 if (INTVAL (shiftop) > 32)
9612 shift_mask_high <<= INTVAL (shiftop) - 32;
9616 if ((lsb & shift_mask_high) == 0)
9619 return high == -lsb;
9625 shift_mask_low = ~0;
9626 shift_mask_low <<= INTVAL (shiftop);
9630 if ((lsb & shift_mask_low) == 0)
9633 return low == -lsb && lsb != 1;
9639 /* Return 1 if operands will generate a valid arguments to rlwimi
9640 instruction for insert with right shift in 64-bit mode. The mask may
9641 not start on the first bit or stop on the last bit because wrap-around
9642 effects of instruction do not correspond to semantics of RTL insn. */
9645 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9647 if (INTVAL (startop) < 64
9648 && INTVAL (startop) > 32
9649 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9650 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9651 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9652 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9653 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9659 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9660 for lfq and stfq insns iff the registers are hard registers. */
9663 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9665 /* We might have been passed a SUBREG. */
9666 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9669 /* We might have been passed non floating point registers. */
9670 if (!FP_REGNO_P (REGNO (reg1))
9671 || !FP_REGNO_P (REGNO (reg2)))
9674 return (REGNO (reg1) == REGNO (reg2) - 1);
9677 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9678 addr1 and addr2 must be in consecutive memory locations
9679 (addr2 == addr1 + 8). */
9682 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9688 /* The mems cannot be volatile. */
9689 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9692 addr1 = XEXP (mem1, 0);
9693 addr2 = XEXP (mem2, 0);
9695 /* Extract an offset (if used) from the first addr. */
9696 if (GET_CODE (addr1) == PLUS)
9698 /* If not a REG, return zero. */
9699 if (GET_CODE (XEXP (addr1, 0)) != REG)
9703 reg1 = REGNO (XEXP (addr1, 0));
9704 /* The offset must be constant! */
9705 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9707 offset1 = INTVAL (XEXP (addr1, 1));
9710 else if (GET_CODE (addr1) != REG)
9714 reg1 = REGNO (addr1);
9715 /* This was a simple (mem (reg)) expression. Offset is 0. */
9719 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9720 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9721 register as addr1. */
9722 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9724 if (GET_CODE (addr2) != PLUS)
9727 if (GET_CODE (XEXP (addr2, 0)) != REG
9728 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9731 if (reg1 != REGNO (XEXP (addr2, 0)))
9734 /* The offset for the second addr must be 8 more than the first addr. */
9735 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9738 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9743 /* Return the register class of a scratch register needed to copy IN into
9744 or out of a register in CLASS in MODE. If it can be done directly,
9745 NO_REGS is returned. */
9748 secondary_reload_class (enum reg_class class,
9749 enum machine_mode mode ATTRIBUTE_UNUSED,
9754 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9756 && MACHOPIC_INDIRECT
9760 /* We cannot copy a symbolic operand directly into anything
9761 other than BASE_REGS for TARGET_ELF. So indicate that a
9762 register from BASE_REGS is needed as an intermediate
9765 On Darwin, pic addresses require a load from memory, which
9766 needs a base register. */
9767 if (class != BASE_REGS
9768 && (GET_CODE (in) == SYMBOL_REF
9769 || GET_CODE (in) == HIGH
9770 || GET_CODE (in) == LABEL_REF
9771 || GET_CODE (in) == CONST))
9775 if (GET_CODE (in) == REG)
9778 if (regno >= FIRST_PSEUDO_REGISTER)
9780 regno = true_regnum (in);
9781 if (regno >= FIRST_PSEUDO_REGISTER)
9785 else if (GET_CODE (in) == SUBREG)
9787 regno = true_regnum (in);
9788 if (regno >= FIRST_PSEUDO_REGISTER)
9794 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9796 if (class == GENERAL_REGS || class == BASE_REGS
9797 || (regno >= 0 && INT_REGNO_P (regno)))
9800 /* Constants, memory, and FP registers can go into FP registers. */
9801 if ((regno == -1 || FP_REGNO_P (regno))
9802 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9805 /* Memory, and AltiVec registers can go into AltiVec registers. */
9806 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9807 && class == ALTIVEC_REGS)
9810 /* We can copy among the CR registers. */
9811 if ((class == CR_REGS || class == CR0_REGS)
9812 && regno >= 0 && CR_REGNO_P (regno))
9815 /* Otherwise, we need GENERAL_REGS. */
9816 return GENERAL_REGS;
9819 /* Given a comparison operation, return the bit number in CCR to test. We
9820 know this is a valid comparison.
9822 SCC_P is 1 if this is for an scc. That means that %D will have been
9823 used instead of %C, so the bits will be in different places.
9825 Return -1 if OP isn't a valid comparison for some reason. */
9828 ccr_bit (rtx op, int scc_p)
9830 enum rtx_code code = GET_CODE (op);
9831 enum machine_mode cc_mode;
9836 if (!COMPARISON_P (op))
9841 if (GET_CODE (reg) != REG
9842 || ! CR_REGNO_P (REGNO (reg)))
9845 cc_mode = GET_MODE (reg);
9846 cc_regnum = REGNO (reg);
9847 base_bit = 4 * (cc_regnum - CR0_REGNO);
9849 validate_condition_mode (code, cc_mode);
9851 /* When generating a sCOND operation, only positive conditions are
9853 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9854 && code != GTU && code != LTU)
9860 return scc_p ? base_bit + 3 : base_bit + 2;
9862 return base_bit + 2;
9863 case GT: case GTU: case UNLE:
9864 return base_bit + 1;
9865 case LT: case LTU: case UNGE:
9867 case ORDERED: case UNORDERED:
9868 return base_bit + 3;
9871 /* If scc, we will have done a cror to put the bit in the
9872 unordered position. So test that bit. For integer, this is ! LT
9873 unless this is an scc insn. */
9874 return scc_p ? base_bit + 3 : base_bit;
9877 return scc_p ? base_bit + 3 : base_bit + 1;
9884 /* Return the GOT register. */
9887 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9889 /* The second flow pass currently (June 1999) can't update
9890 regs_ever_live without disturbing other parts of the compiler, so
9891 update it here to make the prolog/epilogue code happy. */
9892 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9893 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9895 current_function_uses_pic_offset_table = 1;
9897 return pic_offset_table_rtx;
9900 /* Function to init struct machine_function.
9901 This will be called, via a pointer variable,
9902 from push_function_context. */
9904 static struct machine_function *
9905 rs6000_init_machine_status (void)
9907 return ggc_alloc_cleared (sizeof (machine_function));
9910 /* These macros test for integers and extract the low-order bits. */
9912 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9913 && GET_MODE (X) == VOIDmode)
9915 #define INT_LOWPART(X) \
9916 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9922 unsigned long val = INT_LOWPART (op);
9924 /* If the high bit is zero, the value is the first 1 bit we find
9926 if ((val & 0x80000000) == 0)
9928 if ((val & 0xffffffff) == 0)
9932 while (((val <<= 1) & 0x80000000) == 0)
9937 /* If the high bit is set and the low bit is not, or the mask is all
9938 1's, the value is zero. */
9939 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9942 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9945 while (((val >>= 1) & 1) != 0)
9955 unsigned long val = INT_LOWPART (op);
9957 /* If the low bit is zero, the value is the first 1 bit we find from
9961 if ((val & 0xffffffff) == 0)
9965 while (((val >>= 1) & 1) == 0)
9971 /* If the low bit is set and the high bit is not, or the mask is all
9972 1's, the value is 31. */
9973 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9976 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9979 while (((val <<= 1) & 0x80000000) != 0)
9985 /* Locate some local-dynamic symbol still in use by this function
9986 so that we can print its name in some tls_ld pattern. */
9989 rs6000_get_some_local_dynamic_name (void)
9993 if (cfun->machine->some_ld_name)
9994 return cfun->machine->some_ld_name;
9996 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9998 && for_each_rtx (&PATTERN (insn),
9999 rs6000_get_some_local_dynamic_name_1, 0))
10000 return cfun->machine->some_ld_name;
10005 /* Helper function for rs6000_get_some_local_dynamic_name. */
10008 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10012 if (GET_CODE (x) == SYMBOL_REF)
10014 const char *str = XSTR (x, 0);
10015 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10017 cfun->machine->some_ld_name = str;
10025 /* Write out a function code label. */
10028 rs6000_output_function_entry (FILE *file, const char *fname)
10030 if (fname[0] != '.')
10032 switch (DEFAULT_ABI)
10041 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10050 RS6000_OUTPUT_BASENAME (file, fname);
10052 assemble_name (file, fname);
10055 /* Print an operand. Recognize special options, documented below. */
10058 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10059 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10061 #define SMALL_DATA_RELOC "sda21"
10062 #define SMALL_DATA_REG 0
10066 print_operand (FILE *file, rtx x, int code)
10070 unsigned HOST_WIDE_INT uval;
10075 /* Write out an instruction after the call which may be replaced
10076 with glue code by the loader. This depends on the AIX version. */
10077 asm_fprintf (file, RS6000_CALL_GLUE);
10080 /* %a is output_address. */
10083 /* If X is a constant integer whose low-order 5 bits are zero,
10084 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10085 in the AIX assembler where "sri" with a zero shift count
10086 writes a trash instruction. */
10087 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10094 /* If constant, low-order 16 bits of constant, unsigned.
10095 Otherwise, write normally. */
10097 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10099 print_operand (file, x, 0);
10103 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10104 for 64-bit mask direction. */
10105 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
10108 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10112 /* X is a CR register. Print the number of the GT bit of the CR. */
10113 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10114 output_operand_lossage ("invalid %%E value");
10116 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10120 /* Like 'J' but get to the EQ bit. */
10121 if (GET_CODE (x) != REG)
10124 /* Bit 1 is EQ bit. */
10125 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
10127 /* If we want bit 31, write a shift count of zero, not 32. */
10128 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10132 /* X is a CR register. Print the number of the EQ bit of the CR */
10133 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10134 output_operand_lossage ("invalid %%E value");
10136 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10140 /* X is a CR register. Print the shift count needed to move it
10141 to the high-order four bits. */
10142 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10143 output_operand_lossage ("invalid %%f value");
10145 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10149 /* Similar, but print the count for the rotate in the opposite
10151 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10152 output_operand_lossage ("invalid %%F value");
10154 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10158 /* X is a constant integer. If it is negative, print "m",
10159 otherwise print "z". This is to make an aze or ame insn. */
10160 if (GET_CODE (x) != CONST_INT)
10161 output_operand_lossage ("invalid %%G value");
10162 else if (INTVAL (x) >= 0)
10169 /* If constant, output low-order five bits. Otherwise, write
10172 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10174 print_operand (file, x, 0);
10178 /* If constant, output low-order six bits. Otherwise, write
10181 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10183 print_operand (file, x, 0);
10187 /* Print `i' if this is a constant, else nothing. */
10193 /* Write the bit number in CCR for jump. */
10194 i = ccr_bit (x, 0);
10196 output_operand_lossage ("invalid %%j code");
10198 fprintf (file, "%d", i);
10202 /* Similar, but add one for shift count in rlinm for scc and pass
10203 scc flag to `ccr_bit'. */
10204 i = ccr_bit (x, 1);
10206 output_operand_lossage ("invalid %%J code");
10208 /* If we want bit 31, write a shift count of zero, not 32. */
10209 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10213 /* X must be a constant. Write the 1's complement of the
10216 output_operand_lossage ("invalid %%k value");
10218 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10222 /* X must be a symbolic constant on ELF. Write an
10223 expression suitable for an 'addi' that adds in the low 16
10224 bits of the MEM. */
10225 if (GET_CODE (x) != CONST)
10227 print_operand_address (file, x);
10228 fputs ("@l", file);
10232 if (GET_CODE (XEXP (x, 0)) != PLUS
10233 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10234 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10235 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10236 output_operand_lossage ("invalid %%K value");
10237 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10238 fputs ("@l", file);
10239 /* For GNU as, there must be a non-alphanumeric character
10240 between 'l' and the number. The '-' is added by
10241 print_operand() already. */
10242 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10244 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10248 /* %l is output_asm_label. */
10251 /* Write second word of DImode or DFmode reference. Works on register
10252 or non-indexed memory only. */
10253 if (GET_CODE (x) == REG)
10254 fputs (reg_names[REGNO (x) + 1], file);
10255 else if (GET_CODE (x) == MEM)
10257 /* Handle possible auto-increment. Since it is pre-increment and
10258 we have already done it, we can just use an offset of word. */
10259 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10260 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10261 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10264 output_address (XEXP (adjust_address_nv (x, SImode,
10268 if (small_data_operand (x, GET_MODE (x)))
10269 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10270 reg_names[SMALL_DATA_REG]);
10275 /* MB value for a mask operand. */
10276 if (! mask_operand (x, SImode))
10277 output_operand_lossage ("invalid %%m value");
10279 fprintf (file, "%d", extract_MB (x));
10283 /* ME value for a mask operand. */
10284 if (! mask_operand (x, SImode))
10285 output_operand_lossage ("invalid %%M value");
10287 fprintf (file, "%d", extract_ME (x));
10290 /* %n outputs the negative of its operand. */
10293 /* Write the number of elements in the vector times 4. */
10294 if (GET_CODE (x) != PARALLEL)
10295 output_operand_lossage ("invalid %%N value");
10297 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10301 /* Similar, but subtract 1 first. */
10302 if (GET_CODE (x) != PARALLEL)
10303 output_operand_lossage ("invalid %%O value");
10305 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10309 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10311 || INT_LOWPART (x) < 0
10312 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10313 output_operand_lossage ("invalid %%p value");
10315 fprintf (file, "%d", i);
10319 /* The operand must be an indirect memory reference. The result
10320 is the register name. */
10321 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10322 || REGNO (XEXP (x, 0)) >= 32)
10323 output_operand_lossage ("invalid %%P value");
10325 fputs (reg_names[REGNO (XEXP (x, 0))], file);
10329 /* This outputs the logical code corresponding to a boolean
10330 expression. The expression may have one or both operands
10331 negated (if one, only the first one). For condition register
10332 logical operations, it will also treat the negated
10333 CR codes as NOTs, but not handle NOTs of them. */
10335 const char *const *t = 0;
10337 enum rtx_code code = GET_CODE (x);
10338 static const char * const tbl[3][3] = {
10339 { "and", "andc", "nor" },
10340 { "or", "orc", "nand" },
10341 { "xor", "eqv", "xor" } };
10345 else if (code == IOR)
10347 else if (code == XOR)
10350 output_operand_lossage ("invalid %%q value");
10352 if (GET_CODE (XEXP (x, 0)) != NOT)
10356 if (GET_CODE (XEXP (x, 1)) == NOT)
10374 /* X is a CR register. Print the mask for `mtcrf'. */
10375 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10376 output_operand_lossage ("invalid %%R value");
10378 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10382 /* Low 5 bits of 32 - value */
10384 output_operand_lossage ("invalid %%s value");
10386 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10390 /* PowerPC64 mask position. All 0's is excluded.
10391 CONST_INT 32-bit mask is considered sign-extended so any
10392 transition must occur within the CONST_INT, not on the boundary. */
10393 if (! mask64_operand (x, DImode))
10394 output_operand_lossage ("invalid %%S value");
10396 uval = INT_LOWPART (x);
10398 if (uval & 1) /* Clear Left */
10400 #if HOST_BITS_PER_WIDE_INT > 64
10401 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10405 else /* Clear Right */
10408 #if HOST_BITS_PER_WIDE_INT > 64
10409 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10417 fprintf (file, "%d", i);
10421 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10422 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
10425 /* Bit 3 is OV bit. */
10426 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10428 /* If we want bit 31, write a shift count of zero, not 32. */
10429 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10433 /* Print the symbolic name of a branch target register. */
10434 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10435 && REGNO (x) != COUNT_REGISTER_REGNUM))
10436 output_operand_lossage ("invalid %%T value");
10437 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10438 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10440 fputs ("ctr", file);
10444 /* High-order 16 bits of constant for use in unsigned operand. */
10446 output_operand_lossage ("invalid %%u value");
10448 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10449 (INT_LOWPART (x) >> 16) & 0xffff);
10453 /* High-order 16 bits of constant for use in signed operand. */
10455 output_operand_lossage ("invalid %%v value");
10457 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10458 (INT_LOWPART (x) >> 16) & 0xffff);
10462 /* Print `u' if this has an auto-increment or auto-decrement. */
10463 if (GET_CODE (x) == MEM
10464 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10465 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10470 /* Print the trap code for this operand. */
10471 switch (GET_CODE (x))
10474 fputs ("eq", file); /* 4 */
10477 fputs ("ne", file); /* 24 */
10480 fputs ("lt", file); /* 16 */
10483 fputs ("le", file); /* 20 */
10486 fputs ("gt", file); /* 8 */
10489 fputs ("ge", file); /* 12 */
10492 fputs ("llt", file); /* 2 */
10495 fputs ("lle", file); /* 6 */
10498 fputs ("lgt", file); /* 1 */
10501 fputs ("lge", file); /* 5 */
10509 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10512 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10513 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10515 print_operand (file, x, 0);
10519 /* MB value for a PowerPC64 rldic operand. */
10520 val = (GET_CODE (x) == CONST_INT
10521 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10526 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10527 if ((val <<= 1) < 0)
10530 #if HOST_BITS_PER_WIDE_INT == 32
10531 if (GET_CODE (x) == CONST_INT && i >= 0)
10532 i += 32; /* zero-extend high-part was all 0's */
10533 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10535 val = CONST_DOUBLE_LOW (x);
10542 for ( ; i < 64; i++)
10543 if ((val <<= 1) < 0)
10548 fprintf (file, "%d", i + 1);
10552 if (GET_CODE (x) == MEM
10553 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10558 /* Like 'L', for third word of TImode */
10559 if (GET_CODE (x) == REG)
10560 fputs (reg_names[REGNO (x) + 2], file);
10561 else if (GET_CODE (x) == MEM)
10563 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10564 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10565 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10567 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10568 if (small_data_operand (x, GET_MODE (x)))
10569 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10570 reg_names[SMALL_DATA_REG]);
10575 /* X is a SYMBOL_REF. Write out the name preceded by a
10576 period and without any trailing data in brackets. Used for function
10577 names. If we are configured for System V (or the embedded ABI) on
10578 the PowerPC, do not emit the period, since those systems do not use
10579 TOCs and the like. */
10580 if (GET_CODE (x) != SYMBOL_REF)
10583 /* Mark the decl as referenced so that cgraph will output the
10585 if (SYMBOL_REF_DECL (x))
10586 mark_decl_referenced (SYMBOL_REF_DECL (x));
10588 /* For macho, check to see if we need a stub. */
10591 const char *name = XSTR (x, 0);
10593 if (MACHOPIC_INDIRECT
10594 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10595 name = machopic_indirection_name (x, /*stub_p=*/true);
10597 assemble_name (file, name);
10599 else if (!DOT_SYMBOLS)
10600 assemble_name (file, XSTR (x, 0));
10602 rs6000_output_function_entry (file, XSTR (x, 0));
10606 /* Like 'L', for last word of TImode. */
10607 if (GET_CODE (x) == REG)
10608 fputs (reg_names[REGNO (x) + 3], file);
10609 else if (GET_CODE (x) == MEM)
10611 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10612 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10613 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10615 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10616 if (small_data_operand (x, GET_MODE (x)))
10617 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10618 reg_names[SMALL_DATA_REG]);
10622 /* Print AltiVec or SPE memory operand. */
10627 if (GET_CODE (x) != MEM)
10634 /* Handle [reg]. */
10635 if (GET_CODE (tmp) == REG)
10637 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10640 /* Handle [reg+UIMM]. */
10641 else if (GET_CODE (tmp) == PLUS &&
10642 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10646 if (GET_CODE (XEXP (tmp, 0)) != REG)
10649 x = INTVAL (XEXP (tmp, 1));
10650 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10654 /* Fall through. Must be [reg+reg]. */
10657 && GET_CODE (tmp) == AND
10658 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10659 && INTVAL (XEXP (tmp, 1)) == -16)
10660 tmp = XEXP (tmp, 0);
10661 if (GET_CODE (tmp) == REG)
10662 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10663 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
10665 if (REGNO (XEXP (tmp, 0)) == 0)
10666 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10667 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10669 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10670 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10678 if (GET_CODE (x) == REG)
10679 fprintf (file, "%s", reg_names[REGNO (x)]);
10680 else if (GET_CODE (x) == MEM)
10682 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10683 know the width from the mode. */
10684 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10685 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10686 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10687 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10688 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10689 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10691 output_address (XEXP (x, 0));
10694 output_addr_const (file, x);
10698 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10702 output_operand_lossage ("invalid %%xn code");
10706 /* Print the address of an operand. */
10709 print_operand_address (FILE *file, rtx x)
10711 if (GET_CODE (x) == REG)
10712 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10713 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10714 || GET_CODE (x) == LABEL_REF)
10716 output_addr_const (file, x);
10717 if (small_data_operand (x, GET_MODE (x)))
10718 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10719 reg_names[SMALL_DATA_REG]);
10720 else if (TARGET_TOC)
10723 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10725 if (REGNO (XEXP (x, 0)) == 0)
10726 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10727 reg_names[ REGNO (XEXP (x, 0)) ]);
10729 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10730 reg_names[ REGNO (XEXP (x, 1)) ]);
10732 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10733 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10734 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10736 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10737 && CONSTANT_P (XEXP (x, 1)))
10739 output_addr_const (file, XEXP (x, 1));
10740 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10744 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10745 && CONSTANT_P (XEXP (x, 1)))
10747 fprintf (file, "lo16(");
10748 output_addr_const (file, XEXP (x, 1));
10749 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10752 else if (legitimate_constant_pool_address_p (x))
10754 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10756 rtx contains_minus = XEXP (x, 1);
10760 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10761 turn it into (sym) for output_addr_const. */
10762 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10763 contains_minus = XEXP (contains_minus, 0);
10765 minus = XEXP (contains_minus, 0);
10766 symref = XEXP (minus, 0);
10767 XEXP (contains_minus, 0) = symref;
10772 name = XSTR (symref, 0);
10773 newname = alloca (strlen (name) + sizeof ("@toc"));
10774 strcpy (newname, name);
10775 strcat (newname, "@toc");
10776 XSTR (symref, 0) = newname;
10778 output_addr_const (file, XEXP (x, 1));
10780 XSTR (symref, 0) = name;
10781 XEXP (contains_minus, 0) = minus;
10784 output_addr_const (file, XEXP (x, 1));
10786 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10792 /* Target hook for assembling integer objects. The PowerPC version has
10793 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10794 is defined. It also needs to handle DI-mode objects on 64-bit
10798 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10800 #ifdef RELOCATABLE_NEEDS_FIXUP
10801 /* Special handling for SI values. */
10802 if (size == 4 && aligned_p)
10804 extern int in_toc_section (void);
10805 static int recurse = 0;
10807 /* For -mrelocatable, we mark all addresses that need to be fixed up
10808 in the .fixup section. */
10809 if (TARGET_RELOCATABLE
10810 && !in_toc_section ()
10811 && !in_text_section ()
10812 && !in_unlikely_text_section ()
10814 && GET_CODE (x) != CONST_INT
10815 && GET_CODE (x) != CONST_DOUBLE
10821 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10823 ASM_OUTPUT_LABEL (asm_out_file, buf);
10824 fprintf (asm_out_file, "\t.long\t(");
10825 output_addr_const (asm_out_file, x);
10826 fprintf (asm_out_file, ")@fixup\n");
10827 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10828 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10829 fprintf (asm_out_file, "\t.long\t");
10830 assemble_name (asm_out_file, buf);
10831 fprintf (asm_out_file, "\n\t.previous\n");
10835 /* Remove initial .'s to turn a -mcall-aixdesc function
10836 address into the address of the descriptor, not the function
10838 else if (GET_CODE (x) == SYMBOL_REF
10839 && XSTR (x, 0)[0] == '.'
10840 && DEFAULT_ABI == ABI_AIX)
10842 const char *name = XSTR (x, 0);
10843 while (*name == '.')
10846 fprintf (asm_out_file, "\t.long\t%s\n", name);
10850 #endif /* RELOCATABLE_NEEDS_FIXUP */
10851 return default_assemble_integer (x, size, aligned_p);
10854 #ifdef HAVE_GAS_HIDDEN
10855 /* Emit an assembler directive to set symbol visibility for DECL to
10856 VISIBILITY_TYPE. */
10859 rs6000_assemble_visibility (tree decl, int vis)
10861 /* Functions need to have their entry point symbol visibility set as
10862 well as their descriptor symbol visibility. */
10863 if (DEFAULT_ABI == ABI_AIX
10865 && TREE_CODE (decl) == FUNCTION_DECL)
10867 static const char * const visibility_types[] = {
10868 NULL, "internal", "hidden", "protected"
10871 const char *name, *type;
10873 name = ((* targetm.strip_name_encoding)
10874 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10875 type = visibility_types[vis];
10877 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10878 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10881 default_assemble_visibility (decl, vis);
10886 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10888 /* Reversal of FP compares takes care -- an ordered compare
10889 becomes an unordered compare and vice versa. */
10890 if (mode == CCFPmode
10891 && (!flag_finite_math_only
10892 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10893 || code == UNEQ || code == LTGT))
10894 return reverse_condition_maybe_unordered (code);
10896 return reverse_condition (code);
10899 /* Generate a compare for CODE. Return a brand-new rtx that
10900 represents the result of the compare. */
10903 rs6000_generate_compare (enum rtx_code code)
10905 enum machine_mode comp_mode;
10906 rtx compare_result;
10908 if (rs6000_compare_fp_p)
10909 comp_mode = CCFPmode;
10910 else if (code == GTU || code == LTU
10911 || code == GEU || code == LEU)
10912 comp_mode = CCUNSmode;
10914 comp_mode = CCmode;
10916 /* First, the compare. */
10917 compare_result = gen_reg_rtx (comp_mode);
10919 /* SPE FP compare instructions on the GPRs. Yuck! */
10920 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10921 && rs6000_compare_fp_p)
10923 rtx cmp, or1, or2, or_result, compare_result2;
10924 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10926 if (op_mode == VOIDmode)
10927 op_mode = GET_MODE (rs6000_compare_op1);
10929 /* Note: The E500 comparison instructions set the GT bit (x +
10930 1), on success. This explains the mess. */
10934 case EQ: case UNEQ: case NE: case LTGT:
10935 if (op_mode == SFmode)
10936 cmp = flag_finite_math_only
10937 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10938 rs6000_compare_op1)
10939 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10940 rs6000_compare_op1);
10941 else if (op_mode == DFmode)
10942 cmp = flag_finite_math_only
10943 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10944 rs6000_compare_op1)
10945 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10946 rs6000_compare_op1);
10949 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10950 if (op_mode == SFmode)
10951 cmp = flag_finite_math_only
10952 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10953 rs6000_compare_op1)
10954 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10955 rs6000_compare_op1);
10956 else if (op_mode == DFmode)
10957 cmp = flag_finite_math_only
10958 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10959 rs6000_compare_op1)
10960 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10961 rs6000_compare_op1);
10964 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10965 if (op_mode == SFmode)
10966 cmp = flag_finite_math_only
10967 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10968 rs6000_compare_op1)
10969 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10970 rs6000_compare_op1);
10971 else if (op_mode == DFmode)
10972 cmp = flag_finite_math_only
10973 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
10974 rs6000_compare_op1)
10975 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
10976 rs6000_compare_op1);
10983 /* Synthesize LE and GE from LT/GT || EQ. */
10984 if (code == LE || code == GE || code == LEU || code == GEU)
10990 case LE: code = LT; break;
10991 case GE: code = GT; break;
10992 case LEU: code = LT; break;
10993 case GEU: code = GT; break;
10997 or1 = gen_reg_rtx (SImode);
10998 or2 = gen_reg_rtx (SImode);
10999 or_result = gen_reg_rtx (CCEQmode);
11000 compare_result2 = gen_reg_rtx (CCFPmode);
11003 if (op_mode == SFmode)
11004 cmp = flag_finite_math_only
11005 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11006 rs6000_compare_op1)
11007 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11008 rs6000_compare_op1);
11009 else if (op_mode == DFmode)
11010 cmp = flag_finite_math_only
11011 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11012 rs6000_compare_op1)
11013 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11014 rs6000_compare_op1);
11018 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
11019 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
11021 /* OR them together. */
11022 cmp = gen_rtx_SET (VOIDmode, or_result,
11023 gen_rtx_COMPARE (CCEQmode,
11024 gen_rtx_IOR (SImode, or1, or2),
11026 compare_result = or_result;
11031 if (code == NE || code == LTGT)
11040 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11041 gen_rtx_COMPARE (comp_mode,
11042 rs6000_compare_op0,
11043 rs6000_compare_op1)));
11045 /* Some kinds of FP comparisons need an OR operation;
11046 under flag_finite_math_only we don't bother. */
11047 if (rs6000_compare_fp_p
11048 && ! flag_finite_math_only
11049 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11050 && (code == LE || code == GE
11051 || code == UNEQ || code == LTGT
11052 || code == UNGT || code == UNLT))
11054 enum rtx_code or1, or2;
11055 rtx or1_rtx, or2_rtx, compare2_rtx;
11056 rtx or_result = gen_reg_rtx (CCEQmode);
11060 case LE: or1 = LT; or2 = EQ; break;
11061 case GE: or1 = GT; or2 = EQ; break;
11062 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11063 case LTGT: or1 = LT; or2 = GT; break;
11064 case UNGT: or1 = UNORDERED; or2 = GT; break;
11065 case UNLT: or1 = UNORDERED; or2 = LT; break;
11068 validate_condition_mode (or1, comp_mode);
11069 validate_condition_mode (or2, comp_mode);
11070 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11071 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11072 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11073 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11075 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11077 compare_result = or_result;
11081 validate_condition_mode (code, GET_MODE (compare_result));
11083 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11087 /* Emit the RTL for an sCOND pattern. */
11090 rs6000_emit_sCOND (enum rtx_code code, rtx result)
11093 enum machine_mode op_mode;
11094 enum rtx_code cond_code;
11096 condition_rtx = rs6000_generate_compare (code);
11097 cond_code = GET_CODE (condition_rtx);
11099 if (TARGET_E500 && rs6000_compare_fp_p
11100 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11104 PUT_MODE (condition_rtx, SImode);
11105 t = XEXP (condition_rtx, 0);
11107 if (cond_code != NE && cond_code != EQ)
11110 if (cond_code == NE)
11111 emit_insn (gen_e500_flip_eq_bit (t, t));
11113 emit_insn (gen_move_from_CR_eq_bit (result, t));
11117 if (cond_code == NE
11118 || cond_code == GE || cond_code == LE
11119 || cond_code == GEU || cond_code == LEU
11120 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11122 rtx not_result = gen_reg_rtx (CCEQmode);
11123 rtx not_op, rev_cond_rtx;
11124 enum machine_mode cc_mode;
11126 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11128 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11129 SImode, XEXP (condition_rtx, 0), const0_rtx);
11130 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11131 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11132 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11135 op_mode = GET_MODE (rs6000_compare_op0);
11136 if (op_mode == VOIDmode)
11137 op_mode = GET_MODE (rs6000_compare_op1);
11139 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11141 PUT_MODE (condition_rtx, DImode);
11142 convert_move (result, condition_rtx, 0);
11146 PUT_MODE (condition_rtx, SImode);
11147 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11151 /* Emit a branch of kind CODE to location LOC. */
11154 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11156 rtx condition_rtx, loc_ref;
11158 condition_rtx = rs6000_generate_compare (code);
11159 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11160 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11161 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11162 loc_ref, pc_rtx)));
11165 /* Return the string to output a conditional branch to LABEL, which is
11166 the operand number of the label, or -1 if the branch is really a
11167 conditional return.
11169 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11170 condition code register and its mode specifies what kind of
11171 comparison we made.
11173 REVERSED is nonzero if we should reverse the sense of the comparison.
11175 INSN is the insn. */
11178 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11180 static char string[64];
11181 enum rtx_code code = GET_CODE (op);
11182 rtx cc_reg = XEXP (op, 0);
11183 enum machine_mode mode = GET_MODE (cc_reg);
11184 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11185 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11186 int really_reversed = reversed ^ need_longbranch;
11192 validate_condition_mode (code, mode);
11194 /* Work out which way this really branches. We could use
11195 reverse_condition_maybe_unordered here always but this
11196 makes the resulting assembler clearer. */
11197 if (really_reversed)
11199 /* Reversal of FP compares takes care -- an ordered compare
11200 becomes an unordered compare and vice versa. */
11201 if (mode == CCFPmode)
11202 code = reverse_condition_maybe_unordered (code);
11204 code = reverse_condition (code);
11207 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11209 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11212 /* Opposite of GT. */
11214 else if (code == NE)
11222 /* Not all of these are actually distinct opcodes, but
11223 we distinguish them for clarity of the resulting assembler. */
11224 case NE: case LTGT:
11225 ccode = "ne"; break;
11226 case EQ: case UNEQ:
11227 ccode = "eq"; break;
11229 ccode = "ge"; break;
11230 case GT: case GTU: case UNGT:
11231 ccode = "gt"; break;
11233 ccode = "le"; break;
11234 case LT: case LTU: case UNLT:
11235 ccode = "lt"; break;
11236 case UNORDERED: ccode = "un"; break;
11237 case ORDERED: ccode = "nu"; break;
11238 case UNGE: ccode = "nl"; break;
11239 case UNLE: ccode = "ng"; break;
11244 /* Maybe we have a guess as to how likely the branch is.
11245 The old mnemonics don't have a way to specify this information. */
11247 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11248 if (note != NULL_RTX)
11250 /* PROB is the difference from 50%. */
11251 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11253 /* Only hint for highly probable/improbable branches on newer
11254 cpus as static prediction overrides processor dynamic
11255 prediction. For older cpus we may as well always hint, but
11256 assume not taken for branches that are very close to 50% as a
11257 mispredicted taken branch is more expensive than a
11258 mispredicted not-taken branch. */
11259 if (rs6000_always_hint
11260 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11262 if (abs (prob) > REG_BR_PROB_BASE / 20
11263 && ((prob > 0) ^ need_longbranch))
11271 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11273 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11275 /* We need to escape any '%' characters in the reg_names string.
11276 Assume they'd only be the first character.... */
11277 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11279 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11283 /* If the branch distance was too far, we may have to use an
11284 unconditional branch to go the distance. */
11285 if (need_longbranch)
11286 s += sprintf (s, ",$+8\n\tb %s", label);
11288 s += sprintf (s, ",%s", label);
11294 /* Return the string to flip the EQ bit on a CR. */
11296 output_e500_flip_eq_bit (rtx dst, rtx src)
11298 static char string[64];
11301 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
11302 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
11306 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
11307 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
11309 sprintf (string, "crnot %d,%d", a, b);
11313 /* Return insn index for the vector compare instruction for given CODE,
11314 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11318 get_vec_cmp_insn (enum rtx_code code,
11319 enum machine_mode dest_mode,
11320 enum machine_mode op_mode)
11322 if (!TARGET_ALTIVEC)
11323 return INSN_NOT_AVAILABLE;
11328 if (dest_mode == V16QImode && op_mode == V16QImode)
11329 return UNSPEC_VCMPEQUB;
11330 if (dest_mode == V8HImode && op_mode == V8HImode)
11331 return UNSPEC_VCMPEQUH;
11332 if (dest_mode == V4SImode && op_mode == V4SImode)
11333 return UNSPEC_VCMPEQUW;
11334 if (dest_mode == V4SImode && op_mode == V4SFmode)
11335 return UNSPEC_VCMPEQFP;
11338 if (dest_mode == V4SImode && op_mode == V4SFmode)
11339 return UNSPEC_VCMPGEFP;
11341 if (dest_mode == V16QImode && op_mode == V16QImode)
11342 return UNSPEC_VCMPGTSB;
11343 if (dest_mode == V8HImode && op_mode == V8HImode)
11344 return UNSPEC_VCMPGTSH;
11345 if (dest_mode == V4SImode && op_mode == V4SImode)
11346 return UNSPEC_VCMPGTSW;
11347 if (dest_mode == V4SImode && op_mode == V4SFmode)
11348 return UNSPEC_VCMPGTFP;
11351 if (dest_mode == V16QImode && op_mode == V16QImode)
11352 return UNSPEC_VCMPGTUB;
11353 if (dest_mode == V8HImode && op_mode == V8HImode)
11354 return UNSPEC_VCMPGTUH;
11355 if (dest_mode == V4SImode && op_mode == V4SImode)
11356 return UNSPEC_VCMPGTUW;
11361 return INSN_NOT_AVAILABLE;
11364 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11365 DMODE is expected destination mode. This is a recursive function. */
11368 rs6000_emit_vector_compare (enum rtx_code rcode,
11370 enum machine_mode dmode)
11374 enum machine_mode dest_mode;
11375 enum machine_mode op_mode = GET_MODE (op1);
11377 #ifdef ENABLE_CHECKING
11378 if (!TARGET_ALTIVEC)
11381 if (GET_MODE (op0) != GET_MODE (op1))
11385 /* Floating point vector compare instructions uses destination V4SImode.
11386 Move destination to appropriate mode later. */
11387 if (dmode == V4SFmode)
11388 dest_mode = V4SImode;
11392 mask = gen_reg_rtx (dest_mode);
11393 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11395 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11397 bool swap_operands = false;
11398 bool try_again = false;
11403 swap_operands = true;
11408 swap_operands = true;
11412 /* Treat A != B as ~(A==B). */
11414 enum insn_code nor_code;
11415 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11418 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11419 if (nor_code == CODE_FOR_nothing)
11421 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11423 if (dmode != dest_mode)
11425 rtx temp = gen_reg_rtx (dest_mode);
11426 convert_move (temp, mask, 0);
11436 /* Try GT/GTU/LT/LTU OR EQ */
11439 enum insn_code ior_code;
11440 enum rtx_code new_code;
11444 else if (rcode == GEU)
11446 else if (rcode == LE)
11448 else if (rcode == LEU)
11453 c_rtx = rs6000_emit_vector_compare (new_code,
11454 op0, op1, dest_mode);
11455 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11458 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11459 if (ior_code == CODE_FOR_nothing)
11461 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11462 if (dmode != dest_mode)
11464 rtx temp = gen_reg_rtx (dest_mode);
11465 convert_move (temp, mask, 0);
11477 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11478 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11479 /* You only get two chances. */
11492 emit_insn (gen_rtx_fmt_ee (SET,
11495 gen_rtx_fmt_Ei (UNSPEC, dest_mode,
11496 gen_rtvec (2, op0, op1),
11498 if (dmode != dest_mode)
11500 rtx temp = gen_reg_rtx (dest_mode);
11501 convert_move (temp, mask, 0);
11507 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11508 valid insn doesn exist for given mode. */
11511 get_vsel_insn (enum machine_mode mode)
11516 return UNSPEC_VSEL4SI;
11519 return UNSPEC_VSEL4SF;
11522 return UNSPEC_VSEL8HI;
11525 return UNSPEC_VSEL16QI;
11528 return INSN_NOT_AVAILABLE;
11531 return INSN_NOT_AVAILABLE;
11534 /* Emit vector select insn where DEST is destination using
11535 operands OP1, OP2 and MASK. */
11538 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11541 enum machine_mode dest_mode = GET_MODE (dest);
11542 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11544 temp = gen_reg_rtx (dest_mode);
11546 t = gen_rtx_fmt_ee (SET, VOIDmode, temp,
11547 gen_rtx_fmt_Ei (UNSPEC, dest_mode,
11548 gen_rtvec (3, op1, op2, mask),
11551 emit_move_insn (dest, temp);
11555 /* Emit vector conditional expression.
11556 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11557 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11560 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11561 rtx cond, rtx cc_op0, rtx cc_op1)
11563 enum machine_mode dest_mode = GET_MODE (dest);
11564 enum rtx_code rcode = GET_CODE (cond);
11567 if (!TARGET_ALTIVEC)
11570 /* Get the vector mask for the given relational operations. */
11571 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11573 rs6000_emit_vector_select (dest, op1, op2, mask);
11578 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11579 operands of the last comparison is nonzero/true, FALSE_COND if it
11580 is zero/false. Return 0 if the hardware has no such operation. */
11583 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11585 enum rtx_code code = GET_CODE (op);
11586 rtx op0 = rs6000_compare_op0;
11587 rtx op1 = rs6000_compare_op1;
11588 REAL_VALUE_TYPE c1;
11589 enum machine_mode compare_mode = GET_MODE (op0);
11590 enum machine_mode result_mode = GET_MODE (dest);
11592 bool is_against_zero;
11594 /* These modes should always match. */
11595 if (GET_MODE (op1) != compare_mode
11596 /* In the isel case however, we can use a compare immediate, so
11597 op1 may be a small constant. */
11598 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11600 if (GET_MODE (true_cond) != result_mode)
11602 if (GET_MODE (false_cond) != result_mode)
11605 /* First, work out if the hardware can do this at all, or
11606 if it's too slow.... */
11607 if (! rs6000_compare_fp_p)
11610 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11613 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11614 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11617 is_against_zero = op1 == CONST0_RTX (compare_mode);
11619 /* A floating-point subtract might overflow, underflow, or produce
11620 an inexact result, thus changing the floating-point flags, so it
11621 can't be generated if we care about that. It's safe if one side
11622 of the construct is zero, since then no subtract will be
11624 if (GET_MODE_CLASS (compare_mode) == MODE_FLOAT
11625 && flag_trapping_math && ! is_against_zero)
11628 /* Eliminate half of the comparisons by switching operands, this
11629 makes the remaining code simpler. */
11630 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11631 || code == LTGT || code == LT || code == UNLE)
11633 code = reverse_condition_maybe_unordered (code);
11635 true_cond = false_cond;
11639 /* UNEQ and LTGT take four instructions for a comparison with zero,
11640 it'll probably be faster to use a branch here too. */
11641 if (code == UNEQ && HONOR_NANS (compare_mode))
11644 if (GET_CODE (op1) == CONST_DOUBLE)
11645 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11647 /* We're going to try to implement comparisons by performing
11648 a subtract, then comparing against zero. Unfortunately,
11649 Inf - Inf is NaN which is not zero, and so if we don't
11650 know that the operand is finite and the comparison
11651 would treat EQ different to UNORDERED, we can't do it. */
11652 if (HONOR_INFINITIES (compare_mode)
11653 && code != GT && code != UNGE
11654 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11655 /* Constructs of the form (a OP b ? a : b) are safe. */
11656 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11657 || (! rtx_equal_p (op0, true_cond)
11658 && ! rtx_equal_p (op1, true_cond))))
11661 /* At this point we know we can use fsel. */
11663 /* Reduce the comparison to a comparison against zero. */
11664 if (! is_against_zero)
11666 temp = gen_reg_rtx (compare_mode);
11667 emit_insn (gen_rtx_SET (VOIDmode, temp,
11668 gen_rtx_MINUS (compare_mode, op0, op1)));
11670 op1 = CONST0_RTX (compare_mode);
11673 /* If we don't care about NaNs we can reduce some of the comparisons
11674 down to faster ones. */
11675 if (! HONOR_NANS (compare_mode))
11681 true_cond = false_cond;
11694 /* Now, reduce everything down to a GE. */
11701 temp = gen_reg_rtx (compare_mode);
11702 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11707 temp = gen_reg_rtx (compare_mode);
11708 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11713 temp = gen_reg_rtx (compare_mode);
11714 emit_insn (gen_rtx_SET (VOIDmode, temp,
11715 gen_rtx_NEG (compare_mode,
11716 gen_rtx_ABS (compare_mode, op0))));
11721 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11722 temp = gen_reg_rtx (result_mode);
11723 emit_insn (gen_rtx_SET (VOIDmode, temp,
11724 gen_rtx_IF_THEN_ELSE (result_mode,
11725 gen_rtx_GE (VOIDmode,
11727 true_cond, false_cond)));
11728 false_cond = true_cond;
11731 temp = gen_reg_rtx (compare_mode);
11732 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11737 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11738 temp = gen_reg_rtx (result_mode);
11739 emit_insn (gen_rtx_SET (VOIDmode, temp,
11740 gen_rtx_IF_THEN_ELSE (result_mode,
11741 gen_rtx_GE (VOIDmode,
11743 true_cond, false_cond)));
11744 true_cond = false_cond;
11747 temp = gen_reg_rtx (compare_mode);
11748 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11756 emit_insn (gen_rtx_SET (VOIDmode, dest,
11757 gen_rtx_IF_THEN_ELSE (result_mode,
11758 gen_rtx_GE (VOIDmode,
11760 true_cond, false_cond)));
11764 /* Same as above, but for ints (isel). */
11767 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11769 rtx condition_rtx, cr;
11771 /* All isel implementations thus far are 32-bits. */
11772 if (GET_MODE (rs6000_compare_op0) != SImode)
11775 /* We still have to do the compare, because isel doesn't do a
11776 compare, it just looks at the CRx bits set by a previous compare
11778 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11779 cr = XEXP (condition_rtx, 0);
11781 if (GET_MODE (cr) == CCmode)
11782 emit_insn (gen_isel_signed (dest, condition_rtx,
11783 true_cond, false_cond, cr));
11785 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11786 true_cond, false_cond, cr));
11792 output_isel (rtx *operands)
11794 enum rtx_code code;
11796 code = GET_CODE (operands[1]);
11797 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11799 PUT_CODE (operands[1], reverse_condition (code));
11800 return "isel %0,%3,%2,%j1";
11803 return "isel %0,%2,%3,%j1";
11807 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11809 enum machine_mode mode = GET_MODE (op0);
11813 if (code == SMAX || code == SMIN)
11818 if (code == SMAX || code == UMAX)
11819 target = emit_conditional_move (dest, c, op0, op1, mode,
11820 op0, op1, mode, 0);
11822 target = emit_conditional_move (dest, c, op0, op1, mode,
11823 op1, op0, mode, 0);
11824 if (target == NULL_RTX)
11826 if (target != dest)
11827 emit_move_insn (dest, target);
11830 /* Emit instructions to move SRC to DST. Called by splitters for
11831 multi-register moves. It will emit at most one instruction for
11832 each register that is accessed; that is, it won't emit li/lis pairs
11833 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11837 rs6000_split_multireg_move (rtx dst, rtx src)
11839 /* The register number of the first register being moved. */
11841 /* The mode that is to be moved. */
11842 enum machine_mode mode;
11843 /* The mode that the move is being done in, and its size. */
11844 enum machine_mode reg_mode;
11846 /* The number of registers that will be moved. */
11849 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11850 mode = GET_MODE (dst);
11851 nregs = HARD_REGNO_NREGS (reg, mode);
11852 if (FP_REGNO_P (reg))
11854 else if (ALTIVEC_REGNO_P (reg))
11855 reg_mode = V16QImode;
11857 reg_mode = word_mode;
11858 reg_mode_size = GET_MODE_SIZE (reg_mode);
11860 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
11863 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11865 /* Move register range backwards, if we might have destructive
11868 for (i = nregs - 1; i >= 0; i--)
11869 emit_insn (gen_rtx_SET (VOIDmode,
11870 simplify_gen_subreg (reg_mode, dst, mode,
11871 i * reg_mode_size),
11872 simplify_gen_subreg (reg_mode, src, mode,
11873 i * reg_mode_size)));
11879 bool used_update = false;
11881 if (MEM_P (src) && INT_REGNO_P (reg))
11885 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11886 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11889 breg = XEXP (XEXP (src, 0), 0);
11890 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
11891 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11892 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
11893 emit_insn (TARGET_32BIT
11894 ? gen_addsi3 (breg, breg, delta_rtx)
11895 : gen_adddi3 (breg, breg, delta_rtx));
11896 src = gen_rtx_MEM (mode, breg);
11898 else if (! offsettable_memref_p (src))
11900 rtx newsrc, basereg;
11901 basereg = gen_rtx_REG (Pmode, reg);
11902 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11903 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11904 MEM_COPY_ATTRIBUTES (newsrc, src);
11908 breg = XEXP (src, 0);
11909 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
11910 breg = XEXP (breg, 0);
11912 /* If the base register we are using to address memory is
11913 also a destination reg, then change that register last. */
11915 && REGNO (breg) >= REGNO (dst)
11916 && REGNO (breg) < REGNO (dst) + nregs)
11917 j = REGNO (breg) - REGNO (dst);
11920 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11924 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11925 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11928 breg = XEXP (XEXP (dst, 0), 0);
11929 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
11930 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11931 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
11933 /* We have to update the breg before doing the store.
11934 Use store with update, if available. */
11938 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11939 emit_insn (TARGET_32BIT
11940 ? (TARGET_POWERPC64
11941 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
11942 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
11943 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
11944 used_update = true;
11947 emit_insn (TARGET_32BIT
11948 ? gen_addsi3 (breg, breg, delta_rtx)
11949 : gen_adddi3 (breg, breg, delta_rtx));
11950 dst = gen_rtx_MEM (mode, breg);
11952 else if (! offsettable_memref_p (dst))
11956 for (i = 0; i < nregs; i++)
11958 /* Calculate index to next subword. */
11963 /* If compiler already emitted move of first word by
11964 store with update, no need to do anything. */
11965 if (j == 0 && used_update)
11968 emit_insn (gen_rtx_SET (VOIDmode,
11969 simplify_gen_subreg (reg_mode, dst, mode,
11970 j * reg_mode_size),
11971 simplify_gen_subreg (reg_mode, src, mode,
11972 j * reg_mode_size)));
11978 /* This page contains routines that are used to determine what the
11979 function prologue and epilogue code will do and write them out. */
11981 /* Return the first fixed-point register that is required to be
11982 saved. 32 if none. */
11985 first_reg_to_save (void)
11989 /* Find lowest numbered live register. */
11990 for (first_reg = 13; first_reg <= 31; first_reg++)
11991 if (regs_ever_live[first_reg]
11992 && (! call_used_regs[first_reg]
11993 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11994 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11995 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11996 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12001 && current_function_uses_pic_offset_table
12002 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12003 return RS6000_PIC_OFFSET_TABLE_REGNUM;
12009 /* Similar, for FP regs. */
12012 first_fp_reg_to_save (void)
12016 /* Find lowest numbered live register. */
12017 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12018 if (regs_ever_live[first_reg])
12024 /* Similar, for AltiVec regs. */
12027 first_altivec_reg_to_save (void)
12031 /* Stack frame remains as is unless we are in AltiVec ABI. */
12032 if (! TARGET_ALTIVEC_ABI)
12033 return LAST_ALTIVEC_REGNO + 1;
12035 /* Find lowest numbered live register. */
12036 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12037 if (regs_ever_live[i])
12043 /* Return a 32-bit mask of the AltiVec registers we need to set in
12044 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12045 the 32-bit word is 0. */
12047 static unsigned int
12048 compute_vrsave_mask (void)
12050 unsigned int i, mask = 0;
12052 /* First, find out if we use _any_ altivec registers. */
12053 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12054 if (regs_ever_live[i])
12055 mask |= ALTIVEC_REG_BIT (i);
12060 /* Next, remove the argument registers from the set. These must
12061 be in the VRSAVE mask set by the caller, so we don't need to add
12062 them in again. More importantly, the mask we compute here is
12063 used to generate CLOBBERs in the set_vrsave insn, and we do not
12064 wish the argument registers to die. */
12065 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12066 mask &= ~ALTIVEC_REG_BIT (i);
12068 /* Similarly, remove the return value from the set. */
12071 diddle_return_value (is_altivec_return_reg, &yes);
12073 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12079 /* For a very restricted set of circumstances, we can cut down the
12080 size of prologs/epilogs by calling our own save/restore-the-world
12084 compute_save_world_info(rs6000_stack_t *info_ptr)
12086 info_ptr->world_save_p =
12087 (DEFAULT_ABI == ABI_DARWIN)
12088 && ! (current_function_calls_setjmp && flag_exceptions)
12089 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12090 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12091 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12092 && info_ptr->cr_save_p;
12094 /* This will not work in conjunction with sibcalls. Make sure there
12095 are none. (This check is expensive, but seldom executed.) */
12096 if ( info_ptr->world_save_p )
12099 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12100 if ( GET_CODE (insn) == CALL_INSN
12101 && SIBLING_CALL_P (insn))
12103 info_ptr->world_save_p = 0;
12108 if (info_ptr->world_save_p)
12110 /* Even if we're not touching VRsave, make sure there's room on the
12111 stack for it, if it looks like we're calling SAVE_WORLD, which
12112 will attempt to save it. */
12113 info_ptr->vrsave_size = 4;
12115 /* "Save" the VRsave register too if we're saving the world. */
12116 if (info_ptr->vrsave_mask == 0)
12117 info_ptr->vrsave_mask = compute_vrsave_mask ();
12119 /* Because the Darwin register save/restore routines only handle
12120 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12121 check and abort if there's something worng. */
12122 if (info_ptr->first_fp_reg_save < FIRST_SAVED_FP_REGNO
12123 || info_ptr->first_altivec_reg_save < FIRST_SAVED_ALTIVEC_REGNO)
12131 is_altivec_return_reg (rtx reg, void *xyes)
12133 bool *yes = (bool *) xyes;
12134 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12139 /* Calculate the stack information for the current function. This is
12140 complicated by having two separate calling sequences, the AIX calling
12141 sequence and the V.4 calling sequence.
12143 AIX (and Darwin/Mac OS X) stack frames look like:
12145 SP----> +---------------------------------------+
12146 | back chain to caller | 0 0
12147 +---------------------------------------+
12148 | saved CR | 4 8 (8-11)
12149 +---------------------------------------+
12151 +---------------------------------------+
12152 | reserved for compilers | 12 24
12153 +---------------------------------------+
12154 | reserved for binders | 16 32
12155 +---------------------------------------+
12156 | saved TOC pointer | 20 40
12157 +---------------------------------------+
12158 | Parameter save area (P) | 24 48
12159 +---------------------------------------+
12160 | Alloca space (A) | 24+P etc.
12161 +---------------------------------------+
12162 | Local variable space (L) | 24+P+A
12163 +---------------------------------------+
12164 | Float/int conversion temporary (X) | 24+P+A+L
12165 +---------------------------------------+
12166 | Save area for AltiVec registers (W) | 24+P+A+L+X
12167 +---------------------------------------+
12168 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12169 +---------------------------------------+
12170 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
12171 +---------------------------------------+
12172 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12173 +---------------------------------------+
12174 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
12175 +---------------------------------------+
12176 old SP->| back chain to caller's caller |
12177 +---------------------------------------+
12179 The required alignment for AIX configurations is two words (i.e., 8
12183 V.4 stack frames look like:
12185 SP----> +---------------------------------------+
12186 | back chain to caller | 0
12187 +---------------------------------------+
12188 | caller's saved LR | 4
12189 +---------------------------------------+
12190 | Parameter save area (P) | 8
12191 +---------------------------------------+
12192 | Alloca space (A) | 8+P
12193 +---------------------------------------+
12194 | Varargs save area (V) | 8+P+A
12195 +---------------------------------------+
12196 | Local variable space (L) | 8+P+A+V
12197 +---------------------------------------+
12198 | Float/int conversion temporary (X) | 8+P+A+V+L
12199 +---------------------------------------+
12200 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12201 +---------------------------------------+
12202 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12203 +---------------------------------------+
12204 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12205 +---------------------------------------+
12206 | SPE: area for 64-bit GP registers |
12207 +---------------------------------------+
12208 | SPE alignment padding |
12209 +---------------------------------------+
12210 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
12211 +---------------------------------------+
12212 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
12213 +---------------------------------------+
12214 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
12215 +---------------------------------------+
12216 old SP->| back chain to caller's caller |
12217 +---------------------------------------+
12219 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12220 given. (But note below and in sysv4.h that we require only 8 and
12221 may round up the size of our stack frame anyways. The historical
12222 reason is early versions of powerpc-linux which didn't properly
12223 align the stack at program startup. A happy side-effect is that
12224 -mno-eabi libraries can be used with -meabi programs.)
12226 The EABI configuration defaults to the V.4 layout. However,
12227 the stack alignment requirements may differ. If -mno-eabi is not
12228 given, the required stack alignment is 8 bytes; if -mno-eabi is
12229 given, the required alignment is 16 bytes. (But see V.4 comment
12232 #ifndef ABI_STACK_BOUNDARY
12233 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
12236 static rs6000_stack_t *
12237 rs6000_stack_info (void)
12239 static rs6000_stack_t info, zero_info;
12240 rs6000_stack_t *info_ptr = &info;
12241 int reg_size = TARGET_32BIT ? 4 : 8;
12244 HOST_WIDE_INT non_fixed_size;
12246 /* Zero all fields portably. */
12251 /* Cache value so we don't rescan instruction chain over and over. */
12252 if (cfun->machine->insn_chain_scanned_p == 0)
12254 cfun->machine->insn_chain_scanned_p = 1;
12255 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
12259 /* Select which calling sequence. */
12260 info_ptr->abi = DEFAULT_ABI;
12262 /* Calculate which registers need to be saved & save area size. */
12263 info_ptr->first_gp_reg_save = first_reg_to_save ();
12264 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12265 even if it currently looks like we won't. */
12266 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
12267 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12268 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
12269 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12270 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
12272 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
12274 /* For the SPE, we have an additional upper 32-bits on each GPR.
12275 Ideally we should save the entire 64-bits only when the upper
12276 half is used in SIMD instructions. Since we only record
12277 registers live (not the size they are used in), this proves
12278 difficult because we'd have to traverse the instruction chain at
12279 the right time, taking reload into account. This is a real pain,
12280 so we opt to save the GPRs in 64-bits always if but one register
12281 gets used in 64-bits. Otherwise, all the registers in the frame
12282 get saved in 32-bits.
12284 So... since when we save all GPRs (except the SP) in 64-bits, the
12285 traditional GP save area will be empty. */
12286 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12287 info_ptr->gp_size = 0;
12289 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12290 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12292 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12293 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12294 - info_ptr->first_altivec_reg_save);
12296 /* Does this function call anything? */
12297 info_ptr->calls_p = (! current_function_is_leaf
12298 || cfun->machine->ra_needs_full_frame);
12300 /* Determine if we need to save the link register. */
12301 if (rs6000_ra_ever_killed ()
12302 || (DEFAULT_ABI == ABI_AIX
12303 && current_function_profile
12304 && !TARGET_PROFILE_KERNEL)
12305 #ifdef TARGET_RELOCATABLE
12306 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12308 || (info_ptr->first_fp_reg_save != 64
12309 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
12310 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
12311 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
12312 || (DEFAULT_ABI == ABI_DARWIN
12314 && current_function_uses_pic_offset_table)
12315 || info_ptr->calls_p)
12317 info_ptr->lr_save_p = 1;
12318 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
12321 /* Determine if we need to save the condition code registers. */
12322 if (regs_ever_live[CR2_REGNO]
12323 || regs_ever_live[CR3_REGNO]
12324 || regs_ever_live[CR4_REGNO])
12326 info_ptr->cr_save_p = 1;
12327 if (DEFAULT_ABI == ABI_V4)
12328 info_ptr->cr_size = reg_size;
12331 /* If the current function calls __builtin_eh_return, then we need
12332 to allocate stack space for registers that will hold data for
12333 the exception handler. */
12334 if (current_function_calls_eh_return)
12337 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12340 /* SPE saves EH registers in 64-bits. */
12341 ehrd_size = i * (TARGET_SPE_ABI
12342 && info_ptr->spe_64bit_regs_used != 0
12343 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
12348 /* Determine various sizes. */
12349 info_ptr->reg_size = reg_size;
12350 info_ptr->fixed_size = RS6000_SAVE_AREA;
12351 info_ptr->varargs_size = RS6000_VARARGS_AREA;
12352 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
12353 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
12354 TARGET_ALTIVEC ? 16 : 8);
12356 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12357 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12359 info_ptr->spe_gp_size = 0;
12361 if (TARGET_ALTIVEC_ABI)
12362 info_ptr->vrsave_mask = compute_vrsave_mask ();
12364 info_ptr->vrsave_mask = 0;
12366 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12367 info_ptr->vrsave_size = 4;
12369 info_ptr->vrsave_size = 0;
12371 compute_save_world_info (info_ptr);
12373 /* Calculate the offsets. */
12374 switch (DEFAULT_ABI)
12382 info_ptr->fp_save_offset = - info_ptr->fp_size;
12383 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12385 if (TARGET_ALTIVEC_ABI)
12387 info_ptr->vrsave_save_offset
12388 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12390 /* Align stack so vector save area is on a quadword boundary. */
12391 if (info_ptr->altivec_size != 0)
12392 info_ptr->altivec_padding_size
12393 = 16 - (-info_ptr->vrsave_save_offset % 16);
12395 info_ptr->altivec_padding_size = 0;
12397 info_ptr->altivec_save_offset
12398 = info_ptr->vrsave_save_offset
12399 - info_ptr->altivec_padding_size
12400 - info_ptr->altivec_size;
12402 /* Adjust for AltiVec case. */
12403 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12406 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
12407 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12408 info_ptr->lr_save_offset = 2*reg_size;
12412 info_ptr->fp_save_offset = - info_ptr->fp_size;
12413 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
12414 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
12416 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
12418 /* Align stack so SPE GPR save area is aligned on a
12419 double-word boundary. */
12420 if (info_ptr->spe_gp_size != 0)
12421 info_ptr->spe_padding_size
12422 = 8 - (-info_ptr->cr_save_offset % 8);
12424 info_ptr->spe_padding_size = 0;
12426 info_ptr->spe_gp_save_offset
12427 = info_ptr->cr_save_offset
12428 - info_ptr->spe_padding_size
12429 - info_ptr->spe_gp_size;
12431 /* Adjust for SPE case. */
12432 info_ptr->toc_save_offset
12433 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12435 else if (TARGET_ALTIVEC_ABI)
12437 info_ptr->vrsave_save_offset
12438 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12440 /* Align stack so vector save area is on a quadword boundary. */
12441 if (info_ptr->altivec_size != 0)
12442 info_ptr->altivec_padding_size
12443 = 16 - (-info_ptr->vrsave_save_offset % 16);
12445 info_ptr->altivec_padding_size = 0;
12447 info_ptr->altivec_save_offset
12448 = info_ptr->vrsave_save_offset
12449 - info_ptr->altivec_padding_size
12450 - info_ptr->altivec_size;
12452 /* Adjust for AltiVec case. */
12453 info_ptr->toc_save_offset
12454 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12457 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
12458 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
12459 info_ptr->lr_save_offset = reg_size;
12463 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
12464 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12465 + info_ptr->gp_size
12466 + info_ptr->altivec_size
12467 + info_ptr->altivec_padding_size
12468 + info_ptr->spe_gp_size
12469 + info_ptr->spe_padding_size
12471 + info_ptr->cr_size
12472 + info_ptr->lr_size
12473 + info_ptr->vrsave_size
12474 + info_ptr->toc_size,
12477 non_fixed_size = (info_ptr->vars_size
12478 + info_ptr->parm_size
12479 + info_ptr->save_size
12480 + info_ptr->varargs_size);
12482 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12483 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
12485 /* Determine if we need to allocate any stack frame:
12487 For AIX we need to push the stack if a frame pointer is needed
12488 (because the stack might be dynamically adjusted), if we are
12489 debugging, if we make calls, or if the sum of fp_save, gp_save,
12490 and local variables are more than the space needed to save all
12491 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12492 + 18*8 = 288 (GPR13 reserved).
12494 For V.4 we don't have the stack cushion that AIX uses, but assume
12495 that the debugger can handle stackless frames. */
12497 if (info_ptr->calls_p)
12498 info_ptr->push_p = 1;
12500 else if (DEFAULT_ABI == ABI_V4)
12501 info_ptr->push_p = non_fixed_size != 0;
12503 else if (frame_pointer_needed)
12504 info_ptr->push_p = 1;
12506 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12507 info_ptr->push_p = 1;
12510 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
12512 /* Zero offsets if we're not saving those registers. */
12513 if (info_ptr->fp_size == 0)
12514 info_ptr->fp_save_offset = 0;
12516 if (info_ptr->gp_size == 0)
12517 info_ptr->gp_save_offset = 0;
12519 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12520 info_ptr->altivec_save_offset = 0;
12522 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12523 info_ptr->vrsave_save_offset = 0;
12525 if (! TARGET_SPE_ABI
12526 || info_ptr->spe_64bit_regs_used == 0
12527 || info_ptr->spe_gp_size == 0)
12528 info_ptr->spe_gp_save_offset = 0;
12530 if (! info_ptr->lr_save_p)
12531 info_ptr->lr_save_offset = 0;
12533 if (! info_ptr->cr_save_p)
12534 info_ptr->cr_save_offset = 0;
12536 if (! info_ptr->toc_save_p)
12537 info_ptr->toc_save_offset = 0;
12542 /* Return true if the current function uses any GPRs in 64-bit SIMD
12546 spe_func_has_64bit_regs_p (void)
12550 /* Functions that save and restore all the call-saved registers will
12551 need to save/restore the registers in 64-bits. */
12552 if (current_function_calls_eh_return
12553 || current_function_calls_setjmp
12554 || current_function_has_nonlocal_goto)
12557 insns = get_insns ();
12559 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12565 i = PATTERN (insn);
12566 if (GET_CODE (i) == SET)
12568 enum machine_mode mode = GET_MODE (SET_SRC (i));
12570 if (SPE_VECTOR_MODE (mode))
12572 if (TARGET_E500_DOUBLE && mode == DFmode)
12582 debug_stack_info (rs6000_stack_t *info)
12584 const char *abi_string;
12587 info = rs6000_stack_info ();
12589 fprintf (stderr, "\nStack information for function %s:\n",
12590 ((current_function_decl && DECL_NAME (current_function_decl))
12591 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12596 default: abi_string = "Unknown"; break;
12597 case ABI_NONE: abi_string = "NONE"; break;
12598 case ABI_AIX: abi_string = "AIX"; break;
12599 case ABI_DARWIN: abi_string = "Darwin"; break;
12600 case ABI_V4: abi_string = "V.4"; break;
12603 fprintf (stderr, "\tABI = %5s\n", abi_string);
12605 if (TARGET_ALTIVEC_ABI)
12606 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12608 if (TARGET_SPE_ABI)
12609 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12611 if (info->first_gp_reg_save != 32)
12612 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12614 if (info->first_fp_reg_save != 64)
12615 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
12617 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12618 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12619 info->first_altivec_reg_save);
12621 if (info->lr_save_p)
12622 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
12624 if (info->cr_save_p)
12625 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12627 if (info->toc_save_p)
12628 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
12630 if (info->vrsave_mask)
12631 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
12634 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
12637 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
12639 if (info->gp_save_offset)
12640 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
12642 if (info->fp_save_offset)
12643 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
12645 if (info->altivec_save_offset)
12646 fprintf (stderr, "\taltivec_save_offset = %5d\n",
12647 info->altivec_save_offset);
12649 if (info->spe_gp_save_offset)
12650 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
12651 info->spe_gp_save_offset);
12653 if (info->vrsave_save_offset)
12654 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
12655 info->vrsave_save_offset);
12657 if (info->lr_save_offset)
12658 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
12660 if (info->cr_save_offset)
12661 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
12663 if (info->toc_save_offset)
12664 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
12666 if (info->varargs_save_offset)
12667 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
12669 if (info->total_size)
12670 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12673 if (info->varargs_size)
12674 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
12676 if (info->vars_size)
12677 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12680 if (info->parm_size)
12681 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
12683 if (info->fixed_size)
12684 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
12687 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
12689 if (info->spe_gp_size)
12690 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
12693 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
12695 if (info->altivec_size)
12696 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
12698 if (info->vrsave_size)
12699 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
12701 if (info->altivec_padding_size)
12702 fprintf (stderr, "\taltivec_padding_size= %5d\n",
12703 info->altivec_padding_size);
12705 if (info->spe_padding_size)
12706 fprintf (stderr, "\tspe_padding_size = %5d\n",
12707 info->spe_padding_size);
12710 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
12713 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
12715 if (info->toc_size)
12716 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
12718 if (info->save_size)
12719 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
12721 if (info->reg_size != 4)
12722 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
12724 fprintf (stderr, "\n");
12728 rs6000_return_addr (int count, rtx frame)
12730 /* Currently we don't optimize very well between prolog and body
12731 code and for PIC code the code can be actually quite bad, so
12732 don't try to be too clever here. */
12733 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
12735 cfun->machine->ra_needs_full_frame = 1;
12742 plus_constant (copy_to_reg
12743 (gen_rtx_MEM (Pmode,
12744 memory_address (Pmode, frame))),
12745 RETURN_ADDRESS_OFFSET)));
12748 cfun->machine->ra_need_lr = 1;
12749 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
12752 /* Say whether a function is a candidate for sibcall handling or not.
12753 We do not allow indirect calls to be optimized into sibling calls.
12754 Also, we can't do it if there are any vector parameters; there's
12755 nowhere to put the VRsave code so it works; note that functions with
12756 vector parameters are required to have a prototype, so the argument
12757 type info must be available here. (The tail recursion case can work
12758 with vector parameters, but there's no way to distinguish here.) */
12760 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
12765 if (TARGET_ALTIVEC_VRSAVE)
12767 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
12768 type; type = TREE_CHAIN (type))
12770 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
12774 if (DEFAULT_ABI == ABI_DARWIN
12775 || (*targetm.binds_local_p) (decl))
12777 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12779 if (!lookup_attribute ("longcall", attr_list)
12780 || lookup_attribute ("shortcall", attr_list))
12788 rs6000_ra_ever_killed (void)
12794 if (current_function_is_thunk)
12797 /* regs_ever_live has LR marked as used if any sibcalls are present,
12798 but this should not force saving and restoring in the
12799 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12800 clobbers LR, so that is inappropriate. */
12802 /* Also, the prologue can generate a store into LR that
12803 doesn't really count, like this:
12806 bcl to set PIC register
12810 When we're called from the epilogue, we need to avoid counting
12811 this as a store. */
12813 push_topmost_sequence ();
12814 top = get_insns ();
12815 pop_topmost_sequence ();
12816 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12818 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12822 if (FIND_REG_INC_NOTE (insn, reg))
12824 else if (GET_CODE (insn) == CALL_INSN
12825 && !SIBLING_CALL_P (insn))
12827 else if (set_of (reg, insn) != NULL_RTX
12828 && !prologue_epilogue_contains (insn))
12835 /* Add a REG_MAYBE_DEAD note to the insn. */
12837 rs6000_maybe_dead (rtx insn)
12839 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12844 /* Emit instructions needed to load the TOC register.
12845 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12846 a constant pool; or for SVR4 -fpic. */
12849 rs6000_emit_load_toc_table (int fromprolog)
12852 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12854 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12856 rtx temp = (fromprolog
12857 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12858 : gen_reg_rtx (Pmode));
12859 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
12861 rs6000_maybe_dead (insn);
12862 insn = emit_move_insn (dest, temp);
12864 rs6000_maybe_dead (insn);
12866 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12869 rtx tempLR = (fromprolog
12870 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12871 : gen_reg_rtx (Pmode));
12872 rtx temp0 = (fromprolog
12873 ? gen_rtx_REG (Pmode, 0)
12874 : gen_reg_rtx (Pmode));
12877 /* possibly create the toc section */
12878 if (! toc_initialized)
12881 function_section (current_function_decl);
12888 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12889 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12891 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12892 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12894 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12896 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12897 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12904 static int reload_toc_labelno = 0;
12906 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12908 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
12909 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12911 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
12912 emit_move_insn (dest, tempLR);
12913 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12915 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12917 rs6000_maybe_dead (insn);
12919 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12921 /* This is for AIX code running in non-PIC ELF32. */
12924 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12925 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12927 insn = emit_insn (gen_elf_high (dest, realsym));
12929 rs6000_maybe_dead (insn);
12930 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12932 rs6000_maybe_dead (insn);
12934 else if (DEFAULT_ABI == ABI_AIX)
12937 insn = emit_insn (gen_load_toc_aix_si (dest));
12939 insn = emit_insn (gen_load_toc_aix_di (dest));
12941 rs6000_maybe_dead (insn);
12947 /* Emit instructions to restore the link register after determining where
12948 its value has been stored. */
12951 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12953 rs6000_stack_t *info = rs6000_stack_info ();
12956 operands[0] = source;
12957 operands[1] = scratch;
12959 if (info->lr_save_p)
12961 rtx frame_rtx = stack_pointer_rtx;
12962 HOST_WIDE_INT sp_offset = 0;
12965 if (frame_pointer_needed
12966 || current_function_calls_alloca
12967 || info->total_size > 32767)
12969 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12970 frame_rtx = operands[1];
12972 else if (info->push_p)
12973 sp_offset = info->total_size;
12975 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12976 tmp = gen_rtx_MEM (Pmode, tmp);
12977 emit_move_insn (tmp, operands[0]);
12980 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12983 static GTY(()) int set = -1;
12986 get_TOC_alias_set (void)
12989 set = new_alias_set ();
12993 /* This returns nonzero if the current function uses the TOC. This is
12994 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12995 is generated by the ABI_V4 load_toc_* patterns. */
13002 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13005 rtx pat = PATTERN (insn);
13008 if (GET_CODE (pat) == PARALLEL)
13009 for (i = 0; i < XVECLEN (pat, 0); i++)
13011 rtx sub = XVECEXP (pat, 0, i);
13012 if (GET_CODE (sub) == USE)
13014 sub = XEXP (sub, 0);
13015 if (GET_CODE (sub) == UNSPEC
13016 && XINT (sub, 1) == UNSPEC_TOC)
13026 create_TOC_reference (rtx symbol)
13028 return gen_rtx_PLUS (Pmode,
13029 gen_rtx_REG (Pmode, TOC_REGISTER),
13030 gen_rtx_CONST (Pmode,
13031 gen_rtx_MINUS (Pmode, symbol,
13032 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13035 /* If _Unwind_* has been called from within the same module,
13036 toc register is not guaranteed to be saved to 40(1) on function
13037 entry. Save it there in that case. */
13040 rs6000_aix_emit_builtin_unwind_init (void)
13043 rtx stack_top = gen_reg_rtx (Pmode);
13044 rtx opcode_addr = gen_reg_rtx (Pmode);
13045 rtx opcode = gen_reg_rtx (SImode);
13046 rtx tocompare = gen_reg_rtx (SImode);
13047 rtx no_toc_save_needed = gen_label_rtx ();
13049 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
13050 emit_move_insn (stack_top, mem);
13052 mem = gen_rtx_MEM (Pmode,
13053 gen_rtx_PLUS (Pmode, stack_top,
13054 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13055 emit_move_insn (opcode_addr, mem);
13056 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13057 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13058 : 0xE8410028, SImode));
13060 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13061 SImode, NULL_RTX, NULL_RTX,
13062 no_toc_save_needed);
13064 mem = gen_rtx_MEM (Pmode,
13065 gen_rtx_PLUS (Pmode, stack_top,
13066 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13067 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13068 emit_label (no_toc_save_needed);
13071 /* This ties together stack memory (MEM with an alias set of
13072 rs6000_sr_alias_set) and the change to the stack pointer. */
13075 rs6000_emit_stack_tie (void)
13077 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13079 set_mem_alias_set (mem, rs6000_sr_alias_set);
13080 emit_insn (gen_stack_tie (mem));
13083 /* Emit the correct code for allocating stack space, as insns.
13084 If COPY_R12, make sure a copy of the old frame is left in r12.
13085 The generated code may use hard register 0 as a temporary. */
13088 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13091 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13092 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13093 rtx todec = gen_int_mode (-size, Pmode);
13095 if (INTVAL (todec) != -size)
13097 warning("stack frame too large");
13098 emit_insn (gen_trap ());
13102 if (current_function_limit_stack)
13104 if (REG_P (stack_limit_rtx)
13105 && REGNO (stack_limit_rtx) > 1
13106 && REGNO (stack_limit_rtx) <= 31)
13108 emit_insn (TARGET_32BIT
13109 ? gen_addsi3 (tmp_reg,
13112 : gen_adddi3 (tmp_reg,
13116 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13119 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13121 && DEFAULT_ABI == ABI_V4)
13123 rtx toload = gen_rtx_CONST (VOIDmode,
13124 gen_rtx_PLUS (Pmode,
13128 emit_insn (gen_elf_high (tmp_reg, toload));
13129 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13130 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13134 warning ("stack limit expression is not supported");
13137 if (copy_r12 || ! TARGET_UPDATE)
13138 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13144 /* Need a note here so that try_split doesn't get confused. */
13145 if (get_last_insn() == NULL_RTX)
13146 emit_note (NOTE_INSN_DELETED);
13147 insn = emit_move_insn (tmp_reg, todec);
13148 try_split (PATTERN (insn), insn, 0);
13152 insn = emit_insn (TARGET_32BIT
13153 ? gen_movsi_update (stack_reg, stack_reg,
13155 : gen_movdi_di_update (stack_reg, stack_reg,
13156 todec, stack_reg));
13160 insn = emit_insn (TARGET_32BIT
13161 ? gen_addsi3 (stack_reg, stack_reg, todec)
13162 : gen_adddi3 (stack_reg, stack_reg, todec));
13163 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13164 gen_rtx_REG (Pmode, 12));
13167 RTX_FRAME_RELATED_P (insn) = 1;
13169 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13170 gen_rtx_SET (VOIDmode, stack_reg,
13171 gen_rtx_PLUS (Pmode, stack_reg,
13176 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13177 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13178 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13179 deduce these equivalences by itself so it wasn't necessary to hold
13180 its hand so much. */
13183 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
13184 rtx reg2, rtx rreg)
13188 /* copy_rtx will not make unique copies of registers, so we need to
13189 ensure we don't have unwanted sharing here. */
13191 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13194 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13196 real = copy_rtx (PATTERN (insn));
13198 if (reg2 != NULL_RTX)
13199 real = replace_rtx (real, reg2, rreg);
13201 real = replace_rtx (real, reg,
13202 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13203 STACK_POINTER_REGNUM),
13206 /* We expect that 'real' is either a SET or a PARALLEL containing
13207 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13208 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13210 if (GET_CODE (real) == SET)
13214 temp = simplify_rtx (SET_SRC (set));
13216 SET_SRC (set) = temp;
13217 temp = simplify_rtx (SET_DEST (set));
13219 SET_DEST (set) = temp;
13220 if (GET_CODE (SET_DEST (set)) == MEM)
13222 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13224 XEXP (SET_DEST (set), 0) = temp;
13227 else if (GET_CODE (real) == PARALLEL)
13230 for (i = 0; i < XVECLEN (real, 0); i++)
13231 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13233 rtx set = XVECEXP (real, 0, i);
13235 temp = simplify_rtx (SET_SRC (set));
13237 SET_SRC (set) = temp;
13238 temp = simplify_rtx (SET_DEST (set));
13240 SET_DEST (set) = temp;
13241 if (GET_CODE (SET_DEST (set)) == MEM)
13243 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13245 XEXP (SET_DEST (set), 0) = temp;
13247 RTX_FRAME_RELATED_P (set) = 1;
13254 real = spe_synthesize_frame_save (real);
13256 RTX_FRAME_RELATED_P (insn) = 1;
13257 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13262 /* Given an SPE frame note, return a PARALLEL of SETs with the
13263 original note, plus a synthetic register save. */
13266 spe_synthesize_frame_save (rtx real)
13268 rtx synth, offset, reg, real2;
13270 if (GET_CODE (real) != SET
13271 || GET_MODE (SET_SRC (real)) != V2SImode)
13274 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13275 frame related note. The parallel contains a set of the register
13276 being saved, and another set to a synthetic register (n+1200).
13277 This is so we can differentiate between 64-bit and 32-bit saves.
13278 Words cannot describe this nastiness. */
13280 if (GET_CODE (SET_DEST (real)) != MEM
13281 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
13282 || GET_CODE (SET_SRC (real)) != REG)
13286 (set (mem (plus (reg x) (const y)))
13289 (set (mem (plus (reg x) (const y+4)))
13293 real2 = copy_rtx (real);
13294 PUT_MODE (SET_DEST (real2), SImode);
13295 reg = SET_SRC (real2);
13296 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13297 synth = copy_rtx (real2);
13299 if (BYTES_BIG_ENDIAN)
13301 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13302 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13305 reg = SET_SRC (synth);
13307 synth = replace_rtx (synth, reg,
13308 gen_rtx_REG (SImode, REGNO (reg) + 1200));
13310 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13311 synth = replace_rtx (synth, offset,
13312 GEN_INT (INTVAL (offset)
13313 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13315 RTX_FRAME_RELATED_P (synth) = 1;
13316 RTX_FRAME_RELATED_P (real2) = 1;
13317 if (BYTES_BIG_ENDIAN)
13318 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13320 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13325 /* Returns an insn that has a vrsave set operation with the
13326 appropriate CLOBBERs. */
13329 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
13332 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
13333 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13336 = gen_rtx_SET (VOIDmode,
13338 gen_rtx_UNSPEC_VOLATILE (SImode,
13339 gen_rtvec (2, reg, vrsave),
13344 /* We need to clobber the registers in the mask so the scheduler
13345 does not move sets to VRSAVE before sets of AltiVec registers.
13347 However, if the function receives nonlocal gotos, reload will set
13348 all call saved registers live. We will end up with:
13350 (set (reg 999) (mem))
13351 (parallel [ (set (reg vrsave) (unspec blah))
13352 (clobber (reg 999))])
13354 The clobber will cause the store into reg 999 to be dead, and
13355 flow will attempt to delete an epilogue insn. In this case, we
13356 need an unspec use/set of the register. */
13358 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13359 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13361 if (!epiloguep || call_used_regs [i])
13362 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13363 gen_rtx_REG (V4SImode, i));
13366 rtx reg = gen_rtx_REG (V4SImode, i);
13369 = gen_rtx_SET (VOIDmode,
13371 gen_rtx_UNSPEC (V4SImode,
13372 gen_rtvec (1, reg), 27));
13376 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13378 for (i = 0; i < nclobs; ++i)
13379 XVECEXP (insn, 0, i) = clobs[i];
13384 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13385 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13388 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
13389 unsigned int regno, int offset, HOST_WIDE_INT total_size)
13391 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13392 rtx replacea, replaceb;
13394 int_rtx = GEN_INT (offset);
13396 /* Some cases that need register indexed addressing. */
13397 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
13398 || (TARGET_E500_DOUBLE && mode == DFmode)
13400 && SPE_VECTOR_MODE (mode)
13401 && !SPE_CONST_OFFSET_OK (offset)))
13403 /* Whomever calls us must make sure r11 is available in the
13404 flow path of instructions in the prologue. */
13405 offset_rtx = gen_rtx_REG (Pmode, 11);
13406 emit_move_insn (offset_rtx, int_rtx);
13408 replacea = offset_rtx;
13409 replaceb = int_rtx;
13413 offset_rtx = int_rtx;
13414 replacea = NULL_RTX;
13415 replaceb = NULL_RTX;
13418 reg = gen_rtx_REG (mode, regno);
13419 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13420 mem = gen_rtx_MEM (mode, addr);
13421 set_mem_alias_set (mem, rs6000_sr_alias_set);
13423 insn = emit_move_insn (mem, reg);
13425 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13428 /* Emit an offset memory reference suitable for a frame store, while
13429 converting to a valid addressing mode. */
13432 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
13434 rtx int_rtx, offset_rtx;
13436 int_rtx = GEN_INT (offset);
13438 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13439 || (TARGET_E500_DOUBLE && mode == DFmode))
13441 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13442 emit_move_insn (offset_rtx, int_rtx);
13445 offset_rtx = int_rtx;
13447 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13450 #ifndef TARGET_FIX_AND_CONTINUE
13451 #define TARGET_FIX_AND_CONTINUE 0
13454 /* Emit function prologue as insns. */
13457 rs6000_emit_prologue (void)
13459 rs6000_stack_t *info = rs6000_stack_info ();
13460 enum machine_mode reg_mode = Pmode;
13461 int reg_size = TARGET_32BIT ? 4 : 8;
13462 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13463 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13464 rtx frame_reg_rtx = sp_reg_rtx;
13465 rtx cr_save_rtx = NULL_RTX;
13467 int saving_FPRs_inline;
13468 int using_store_multiple;
13469 HOST_WIDE_INT sp_offset = 0;
13471 if (TARGET_FIX_AND_CONTINUE)
13473 /* gdb on darwin arranges to forward a function from the old
13474 address by modifying the first 4 instructions of the function
13475 to branch to the overriding function. This is necessary to
13476 permit function pointers that point to the old function to
13477 actually forward to the new function. */
13478 emit_insn (gen_nop ());
13479 emit_insn (gen_nop ());
13480 emit_insn (gen_nop ());
13481 emit_insn (gen_nop ());
13484 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13486 reg_mode = V2SImode;
13490 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13491 && (!TARGET_SPE_ABI
13492 || info->spe_64bit_regs_used == 0)
13493 && info->first_gp_reg_save < 31);
13494 saving_FPRs_inline = (info->first_fp_reg_save == 64
13495 || FP_SAVE_INLINE (info->first_fp_reg_save)
13496 || current_function_calls_eh_return
13497 || cfun->machine->ra_need_lr);
13499 /* For V.4, update stack before we do any saving and set back pointer. */
13501 && (DEFAULT_ABI == ABI_V4
13502 || current_function_calls_eh_return))
13504 if (info->total_size < 32767)
13505 sp_offset = info->total_size;
13507 frame_reg_rtx = frame_ptr_rtx;
13508 rs6000_emit_allocate_stack (info->total_size,
13509 (frame_reg_rtx != sp_reg_rtx
13510 && (info->cr_save_p
13512 || info->first_fp_reg_save < 64
13513 || info->first_gp_reg_save < 32
13515 if (frame_reg_rtx != sp_reg_rtx)
13516 rs6000_emit_stack_tie ();
13519 /* Handle world saves specially here. */
13520 if (info->world_save_p)
13526 /* save_world expects lr in r0. */
13527 if (info->lr_save_p)
13529 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13530 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13531 RTX_FRAME_RELATED_P (insn) = 1;
13534 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13535 assumptions about the offsets of various bits of the stack
13536 frame. Abort if things aren't what they should be. */
13537 if (info->gp_save_offset != -220
13538 || info->fp_save_offset != -144
13539 || info->lr_save_offset != 8
13540 || info->cr_save_offset != 4
13542 || !info->lr_save_p
13543 || (current_function_calls_eh_return && info->ehrd_offset != -432)
13544 || (info->vrsave_save_offset != -224
13545 || info->altivec_save_offset != (-224 -16 -192)))
13548 treg = gen_rtx_REG (SImode, 11);
13549 emit_move_insn (treg, GEN_INT (-info->total_size));
13551 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13552 in R11. It also clobbers R12, so beware! */
13554 /* Preserve CR2 for save_world prologues */
13556 sz += 32 - info->first_gp_reg_save;
13557 sz += 64 - info->first_fp_reg_save;
13558 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
13559 p = rtvec_alloc (sz);
13561 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
13562 gen_rtx_REG (Pmode,
13563 LINK_REGISTER_REGNUM));
13564 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13565 gen_rtx_SYMBOL_REF (Pmode,
13567 /* We do floats first so that the instruction pattern matches
13569 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13571 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13572 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13573 GEN_INT (info->fp_save_offset
13574 + sp_offset + 8 * i));
13575 rtx mem = gen_rtx_MEM (DFmode, addr);
13576 set_mem_alias_set (mem, rs6000_sr_alias_set);
13578 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13580 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13582 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13583 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13584 GEN_INT (info->altivec_save_offset
13585 + sp_offset + 16 * i));
13586 rtx mem = gen_rtx_MEM (V4SImode, addr);
13587 set_mem_alias_set (mem, rs6000_sr_alias_set);
13589 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13591 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13593 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13594 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13595 GEN_INT (info->gp_save_offset
13596 + sp_offset + reg_size * i));
13597 rtx mem = gen_rtx_MEM (reg_mode, addr);
13598 set_mem_alias_set (mem, rs6000_sr_alias_set);
13600 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13604 /* CR register traditionally saved as CR2. */
13605 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13606 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13607 GEN_INT (info->cr_save_offset
13609 rtx mem = gen_rtx_MEM (reg_mode, addr);
13610 set_mem_alias_set (mem, rs6000_sr_alias_set);
13612 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13614 /* Prevent any attempt to delete the setting of r0 and treg! */
13615 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
13616 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
13617 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
13619 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13620 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13621 NULL_RTX, NULL_RTX);
13623 if (current_function_calls_eh_return)
13628 unsigned int regno = EH_RETURN_DATA_REGNO (i);
13629 if (regno == INVALID_REGNUM)
13631 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13632 info->ehrd_offset + sp_offset
13633 + reg_size * (int) i,
13639 /* Save AltiVec registers if needed. */
13640 if (! info->world_save_p && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13644 /* There should be a non inline version of this, for when we
13645 are saving lots of vector registers. */
13646 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13647 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13649 rtx areg, savereg, mem;
13652 offset = info->altivec_save_offset + sp_offset
13653 + 16 * (i - info->first_altivec_reg_save);
13655 savereg = gen_rtx_REG (V4SImode, i);
13657 areg = gen_rtx_REG (Pmode, 0);
13658 emit_move_insn (areg, GEN_INT (offset));
13660 /* AltiVec addressing mode is [reg+reg]. */
13661 mem = gen_rtx_MEM (V4SImode,
13662 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
13664 set_mem_alias_set (mem, rs6000_sr_alias_set);
13666 insn = emit_move_insn (mem, savereg);
13668 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13669 areg, GEN_INT (offset));
13673 /* VRSAVE is a bit vector representing which AltiVec registers
13674 are used. The OS uses this to determine which vector
13675 registers to save on a context switch. We need to save
13676 VRSAVE on the stack frame, add whatever AltiVec registers we
13677 used in this function, and do the corresponding magic in the
13680 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13681 && ! info->world_save_p && info->vrsave_mask != 0)
13683 rtx reg, mem, vrsave;
13686 /* Get VRSAVE onto a GPR. */
13687 reg = gen_rtx_REG (SImode, 12);
13688 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13690 emit_insn (gen_get_vrsave_internal (reg));
13692 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
13695 offset = info->vrsave_save_offset + sp_offset;
13697 = gen_rtx_MEM (SImode,
13698 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
13699 set_mem_alias_set (mem, rs6000_sr_alias_set);
13700 insn = emit_move_insn (mem, reg);
13702 /* Include the registers in the mask. */
13703 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
13705 insn = emit_insn (generate_set_vrsave (reg, info, 0));
13708 /* If we use the link register, get it into r0. */
13709 if (! info->world_save_p && info->lr_save_p)
13711 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13712 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13713 RTX_FRAME_RELATED_P (insn) = 1;
13716 /* If we need to save CR, put it into r12. */
13717 if (! info->world_save_p && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
13721 cr_save_rtx = gen_rtx_REG (SImode, 12);
13722 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13723 RTX_FRAME_RELATED_P (insn) = 1;
13724 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13725 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13726 But that's OK. All we have to do is specify that _one_ condition
13727 code register is saved in this stack slot. The thrower's epilogue
13728 will then restore all the call-saved registers.
13729 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13730 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
13731 gen_rtx_REG (SImode, CR2_REGNO));
13732 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13737 /* Do any required saving of fpr's. If only one or two to save, do
13738 it ourselves. Otherwise, call function. */
13739 if (! info->world_save_p && saving_FPRs_inline)
13742 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13743 if ((regs_ever_live[info->first_fp_reg_save+i]
13744 && ! call_used_regs[info->first_fp_reg_save+i]))
13745 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
13746 info->first_fp_reg_save + i,
13747 info->fp_save_offset + sp_offset + 8 * i,
13750 else if (! info->world_save_p && info->first_fp_reg_save != 64)
13754 const char *alloc_rname;
13756 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
13758 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
13759 gen_rtx_REG (Pmode,
13760 LINK_REGISTER_REGNUM));
13761 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
13762 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
13763 alloc_rname = ggc_strdup (rname);
13764 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13765 gen_rtx_SYMBOL_REF (Pmode,
13767 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13769 rtx addr, reg, mem;
13770 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13771 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13772 GEN_INT (info->fp_save_offset
13773 + sp_offset + 8*i));
13774 mem = gen_rtx_MEM (DFmode, addr);
13775 set_mem_alias_set (mem, rs6000_sr_alias_set);
13777 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
13779 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13780 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13781 NULL_RTX, NULL_RTX);
13784 /* Save GPRs. This is done as a PARALLEL if we are using
13785 the store-multiple instructions. */
13786 if (! info->world_save_p && using_store_multiple)
13790 p = rtvec_alloc (32 - info->first_gp_reg_save);
13791 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13793 rtx addr, reg, mem;
13794 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13795 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13796 GEN_INT (info->gp_save_offset
13799 mem = gen_rtx_MEM (reg_mode, addr);
13800 set_mem_alias_set (mem, rs6000_sr_alias_set);
13802 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
13804 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13805 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13806 NULL_RTX, NULL_RTX);
13808 else if (! info->world_save_p)
13811 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13812 if ((regs_ever_live[info->first_gp_reg_save+i]
13813 && (! call_used_regs[info->first_gp_reg_save+i]
13814 || (i+info->first_gp_reg_save
13815 == RS6000_PIC_OFFSET_TABLE_REGNUM
13816 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13817 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13818 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13819 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13821 rtx addr, reg, mem;
13822 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13824 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13826 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13829 if (!SPE_CONST_OFFSET_OK (offset))
13831 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13832 emit_move_insn (b, GEN_INT (offset));
13835 b = GEN_INT (offset);
13837 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13838 mem = gen_rtx_MEM (V2SImode, addr);
13839 set_mem_alias_set (mem, rs6000_sr_alias_set);
13840 insn = emit_move_insn (mem, reg);
13842 if (GET_CODE (b) == CONST_INT)
13843 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13844 NULL_RTX, NULL_RTX);
13846 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13847 b, GEN_INT (offset));
13851 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13852 GEN_INT (info->gp_save_offset
13855 mem = gen_rtx_MEM (reg_mode, addr);
13856 set_mem_alias_set (mem, rs6000_sr_alias_set);
13858 insn = emit_move_insn (mem, reg);
13859 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13860 NULL_RTX, NULL_RTX);
13865 /* ??? There's no need to emit actual instructions here, but it's the
13866 easiest way to get the frame unwind information emitted. */
13867 if (! info->world_save_p && current_function_calls_eh_return)
13869 unsigned int i, regno;
13871 /* In AIX ABI we need to pretend we save r2 here. */
13874 rtx addr, reg, mem;
13876 reg = gen_rtx_REG (reg_mode, 2);
13877 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13878 GEN_INT (sp_offset + 5 * reg_size));
13879 mem = gen_rtx_MEM (reg_mode, addr);
13880 set_mem_alias_set (mem, rs6000_sr_alias_set);
13882 insn = emit_move_insn (mem, reg);
13883 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13884 NULL_RTX, NULL_RTX);
13885 PATTERN (insn) = gen_blockage ();
13890 regno = EH_RETURN_DATA_REGNO (i);
13891 if (regno == INVALID_REGNUM)
13894 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13895 info->ehrd_offset + sp_offset
13896 + reg_size * (int) i,
13901 /* Save lr if we used it. */
13902 if (! info->world_save_p && info->lr_save_p)
13904 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13905 GEN_INT (info->lr_save_offset + sp_offset));
13906 rtx reg = gen_rtx_REG (Pmode, 0);
13907 rtx mem = gen_rtx_MEM (Pmode, addr);
13908 /* This should not be of rs6000_sr_alias_set, because of
13909 __builtin_return_address. */
13911 insn = emit_move_insn (mem, reg);
13912 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13913 NULL_RTX, NULL_RTX);
13916 /* Save CR if we use any that must be preserved. */
13917 if (! info->world_save_p && info->cr_save_p)
13919 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13920 GEN_INT (info->cr_save_offset + sp_offset));
13921 rtx mem = gen_rtx_MEM (SImode, addr);
13922 /* See the large comment above about why CR2_REGNO is used. */
13923 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13925 set_mem_alias_set (mem, rs6000_sr_alias_set);
13927 /* If r12 was used to hold the original sp, copy cr into r0 now
13929 if (REGNO (frame_reg_rtx) == 12)
13933 cr_save_rtx = gen_rtx_REG (SImode, 0);
13934 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13935 RTX_FRAME_RELATED_P (insn) = 1;
13936 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13937 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13942 insn = emit_move_insn (mem, cr_save_rtx);
13944 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13945 NULL_RTX, NULL_RTX);
13948 /* Update stack and set back pointer unless this is V.4,
13949 for which it was done previously. */
13950 if (! info->world_save_p && info->push_p
13951 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13952 rs6000_emit_allocate_stack (info->total_size, FALSE);
13954 /* Set frame pointer, if needed. */
13955 if (frame_pointer_needed)
13957 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13959 RTX_FRAME_RELATED_P (insn) = 1;
13962 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13963 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13964 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
13965 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13967 /* If emit_load_toc_table will use the link register, we need to save
13968 it. We use R12 for this purpose because emit_load_toc_table
13969 can use register 0. This allows us to use a plain 'blr' to return
13970 from the procedure more often. */
13971 int save_LR_around_toc_setup = (TARGET_ELF
13972 && DEFAULT_ABI != ABI_AIX
13974 && ! info->lr_save_p
13975 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
13976 if (save_LR_around_toc_setup)
13978 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13980 insn = emit_move_insn (frame_ptr_rtx, lr);
13981 rs6000_maybe_dead (insn);
13982 RTX_FRAME_RELATED_P (insn) = 1;
13984 rs6000_emit_load_toc_table (TRUE);
13986 insn = emit_move_insn (lr, frame_ptr_rtx);
13987 rs6000_maybe_dead (insn);
13988 RTX_FRAME_RELATED_P (insn) = 1;
13991 rs6000_emit_load_toc_table (TRUE);
13995 if (DEFAULT_ABI == ABI_DARWIN
13996 && flag_pic && current_function_uses_pic_offset_table)
13998 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13999 rtx src = machopic_function_base_sym ();
14001 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14003 insn = emit_move_insn (gen_rtx_REG (Pmode,
14004 RS6000_PIC_OFFSET_TABLE_REGNUM),
14006 rs6000_maybe_dead (insn);
14011 /* Write function prologue. */
14014 rs6000_output_function_prologue (FILE *file,
14015 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14017 rs6000_stack_t *info = rs6000_stack_info ();
14019 if (TARGET_DEBUG_STACK)
14020 debug_stack_info (info);
14022 /* Write .extern for any function we will call to save and restore
14024 if (info->first_fp_reg_save < 64
14025 && !FP_SAVE_INLINE (info->first_fp_reg_save))
14026 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14027 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14028 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14029 RESTORE_FP_SUFFIX);
14031 /* Write .extern for AIX common mode routines, if needed. */
14032 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14034 fputs ("\t.extern __mulh\n", file);
14035 fputs ("\t.extern __mull\n", file);
14036 fputs ("\t.extern __divss\n", file);
14037 fputs ("\t.extern __divus\n", file);
14038 fputs ("\t.extern __quoss\n", file);
14039 fputs ("\t.extern __quous\n", file);
14040 common_mode_defined = 1;
14043 if (! HAVE_prologue)
14047 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14048 the "toplevel" insn chain. */
14049 emit_note (NOTE_INSN_DELETED);
14050 rs6000_emit_prologue ();
14051 emit_note (NOTE_INSN_DELETED);
14053 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14057 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14059 INSN_ADDRESSES_NEW (insn, addr);
14064 if (TARGET_DEBUG_STACK)
14065 debug_rtx_list (get_insns (), 100);
14066 final (get_insns (), file, FALSE, FALSE);
14070 rs6000_pic_labelno++;
14073 /* Emit function epilogue as insns.
14075 At present, dwarf2out_frame_debug_expr doesn't understand
14076 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14077 anywhere in the epilogue. Most of the insns below would in any case
14078 need special notes to explain where r11 is in relation to the stack. */
14081 rs6000_emit_epilogue (int sibcall)
14083 rs6000_stack_t *info;
14084 int restoring_FPRs_inline;
14085 int using_load_multiple;
14086 int using_mfcr_multiple;
14087 int use_backchain_to_restore_sp;
14089 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14090 rtx frame_reg_rtx = sp_reg_rtx;
14091 enum machine_mode reg_mode = Pmode;
14092 int reg_size = TARGET_32BIT ? 4 : 8;
14095 info = rs6000_stack_info ();
14097 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14099 reg_mode = V2SImode;
14103 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14104 && (!TARGET_SPE_ABI
14105 || info->spe_64bit_regs_used == 0)
14106 && info->first_gp_reg_save < 31);
14107 restoring_FPRs_inline = (sibcall
14108 || current_function_calls_eh_return
14109 || info->first_fp_reg_save == 64
14110 || FP_SAVE_INLINE (info->first_fp_reg_save));
14111 use_backchain_to_restore_sp = (frame_pointer_needed
14112 || current_function_calls_alloca
14113 || info->total_size > 32767);
14114 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14115 || rs6000_cpu == PROCESSOR_PPC603
14116 || rs6000_cpu == PROCESSOR_PPC750
14119 if (info->world_save_p)
14123 const char *alloc_rname;
14126 /* eh_rest_world_r10 will return to the location saved in the LR
14127 stack slot (which is not likely to be our caller.)
14128 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14129 rest_world is similar, except any R10 parameter is ignored.
14130 The exception-handling stuff that was here in 2.95 is no
14131 longer necessary. */
14135 + 32 - info->first_gp_reg_save
14136 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14137 + 63 + 1 - info->first_fp_reg_save);
14139 strcpy (rname, ((current_function_calls_eh_return) ?
14140 "*eh_rest_world_r10" : "*rest_world"));
14141 alloc_rname = ggc_strdup (rname);
14144 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14145 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14146 gen_rtx_REG (Pmode,
14147 LINK_REGISTER_REGNUM));
14149 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14150 /* The instruction pattern requires a clobber here;
14151 it is shared with the restVEC helper. */
14153 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14156 /* CR register traditionally saved as CR2. */
14157 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14158 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14159 GEN_INT (info->cr_save_offset));
14160 rtx mem = gen_rtx_MEM (reg_mode, addr);
14161 set_mem_alias_set (mem, rs6000_sr_alias_set);
14163 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14166 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14168 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14169 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14170 GEN_INT (info->gp_save_offset
14172 rtx mem = gen_rtx_MEM (reg_mode, addr);
14173 set_mem_alias_set (mem, rs6000_sr_alias_set);
14175 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14177 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14179 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14180 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14181 GEN_INT (info->altivec_save_offset
14183 rtx mem = gen_rtx_MEM (V4SImode, addr);
14184 set_mem_alias_set (mem, rs6000_sr_alias_set);
14186 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14188 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
14190 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14191 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14192 GEN_INT (info->fp_save_offset
14194 rtx mem = gen_rtx_MEM (DFmode, addr);
14195 set_mem_alias_set (mem, rs6000_sr_alias_set);
14197 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14200 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
14202 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
14204 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
14206 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
14208 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
14209 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14214 /* If we have a frame pointer, a call to alloca, or a large stack
14215 frame, restore the old stack pointer using the backchain. Otherwise,
14216 we know what size to update it with. */
14217 if (use_backchain_to_restore_sp)
14219 /* Under V.4, don't reset the stack pointer until after we're done
14220 loading the saved registers. */
14221 if (DEFAULT_ABI == ABI_V4)
14222 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
14224 emit_move_insn (frame_reg_rtx,
14225 gen_rtx_MEM (Pmode, sp_reg_rtx));
14228 else if (info->push_p)
14230 if (DEFAULT_ABI == ABI_V4
14231 || current_function_calls_eh_return)
14232 sp_offset = info->total_size;
14235 emit_insn (TARGET_32BIT
14236 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14237 GEN_INT (info->total_size))
14238 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14239 GEN_INT (info->total_size)));
14243 /* Restore AltiVec registers if needed. */
14244 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14248 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14249 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14251 rtx addr, areg, mem;
14253 areg = gen_rtx_REG (Pmode, 0);
14255 (areg, GEN_INT (info->altivec_save_offset
14257 + 16 * (i - info->first_altivec_reg_save)));
14259 /* AltiVec addressing mode is [reg+reg]. */
14260 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14261 mem = gen_rtx_MEM (V4SImode, addr);
14262 set_mem_alias_set (mem, rs6000_sr_alias_set);
14264 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14268 /* Restore VRSAVE if needed. */
14269 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14270 && info->vrsave_mask != 0)
14272 rtx addr, mem, reg;
14274 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14275 GEN_INT (info->vrsave_save_offset + sp_offset));
14276 mem = gen_rtx_MEM (SImode, addr);
14277 set_mem_alias_set (mem, rs6000_sr_alias_set);
14278 reg = gen_rtx_REG (SImode, 12);
14279 emit_move_insn (reg, mem);
14281 emit_insn (generate_set_vrsave (reg, info, 1));
14284 /* Get the old lr if we saved it. */
14285 if (info->lr_save_p)
14287 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14288 info->lr_save_offset + sp_offset);
14290 set_mem_alias_set (mem, rs6000_sr_alias_set);
14292 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
14295 /* Get the old cr if we saved it. */
14296 if (info->cr_save_p)
14298 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14299 GEN_INT (info->cr_save_offset + sp_offset));
14300 rtx mem = gen_rtx_MEM (SImode, addr);
14302 set_mem_alias_set (mem, rs6000_sr_alias_set);
14304 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14307 /* Set LR here to try to overlap restores below. */
14308 if (info->lr_save_p)
14309 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14310 gen_rtx_REG (Pmode, 0));
14312 /* Load exception handler data registers, if needed. */
14313 if (current_function_calls_eh_return)
14315 unsigned int i, regno;
14319 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14320 GEN_INT (sp_offset + 5 * reg_size));
14321 rtx mem = gen_rtx_MEM (reg_mode, addr);
14323 set_mem_alias_set (mem, rs6000_sr_alias_set);
14325 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14332 regno = EH_RETURN_DATA_REGNO (i);
14333 if (regno == INVALID_REGNUM)
14336 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14337 info->ehrd_offset + sp_offset
14338 + reg_size * (int) i);
14339 set_mem_alias_set (mem, rs6000_sr_alias_set);
14341 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14345 /* Restore GPRs. This is done as a PARALLEL if we are using
14346 the load-multiple instructions. */
14347 if (using_load_multiple)
14350 p = rtvec_alloc (32 - info->first_gp_reg_save);
14351 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14353 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14354 GEN_INT (info->gp_save_offset
14357 rtx mem = gen_rtx_MEM (reg_mode, addr);
14359 set_mem_alias_set (mem, rs6000_sr_alias_set);
14362 gen_rtx_SET (VOIDmode,
14363 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14366 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14369 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14370 if ((regs_ever_live[info->first_gp_reg_save+i]
14371 && (! call_used_regs[info->first_gp_reg_save+i]
14372 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14373 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14374 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14375 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14376 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14378 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14379 GEN_INT (info->gp_save_offset
14382 rtx mem = gen_rtx_MEM (reg_mode, addr);
14384 /* Restore 64-bit quantities for SPE. */
14385 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14387 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14390 if (!SPE_CONST_OFFSET_OK (offset))
14392 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14393 emit_move_insn (b, GEN_INT (offset));
14396 b = GEN_INT (offset);
14398 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14399 mem = gen_rtx_MEM (V2SImode, addr);
14402 set_mem_alias_set (mem, rs6000_sr_alias_set);
14404 emit_move_insn (gen_rtx_REG (reg_mode,
14405 info->first_gp_reg_save + i), mem);
14408 /* Restore fpr's if we need to do it without calling a function. */
14409 if (restoring_FPRs_inline)
14410 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14411 if ((regs_ever_live[info->first_fp_reg_save+i]
14412 && ! call_used_regs[info->first_fp_reg_save+i]))
14415 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14416 GEN_INT (info->fp_save_offset
14419 mem = gen_rtx_MEM (DFmode, addr);
14420 set_mem_alias_set (mem, rs6000_sr_alias_set);
14422 emit_move_insn (gen_rtx_REG (DFmode,
14423 info->first_fp_reg_save + i),
14427 /* If we saved cr, restore it here. Just those that were used. */
14428 if (info->cr_save_p)
14430 rtx r12_rtx = gen_rtx_REG (SImode, 12);
14433 if (using_mfcr_multiple)
14435 for (i = 0; i < 8; i++)
14436 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14442 if (using_mfcr_multiple && count > 1)
14447 p = rtvec_alloc (count);
14450 for (i = 0; i < 8; i++)
14451 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14453 rtvec r = rtvec_alloc (2);
14454 RTVEC_ELT (r, 0) = r12_rtx;
14455 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
14456 RTVEC_ELT (p, ndx) =
14457 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
14458 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
14461 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14466 for (i = 0; i < 8; i++)
14467 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14469 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
14475 /* If this is V.4, unwind the stack pointer after all of the loads
14476 have been done. We need to emit a block here so that sched
14477 doesn't decide to move the sp change before the register restores
14478 (which may not have any obvious dependency on the stack). This
14479 doesn't hurt performance, because there is no scheduling that can
14480 be done after this point. */
14481 if (DEFAULT_ABI == ABI_V4
14482 || current_function_calls_eh_return)
14484 if (frame_reg_rtx != sp_reg_rtx)
14485 rs6000_emit_stack_tie ();
14487 if (use_backchain_to_restore_sp)
14489 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
14491 else if (sp_offset != 0)
14493 emit_insn (TARGET_32BIT
14494 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14495 GEN_INT (sp_offset))
14496 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14497 GEN_INT (sp_offset)));
14501 if (current_function_calls_eh_return)
14503 rtx sa = EH_RETURN_STACKADJ_RTX;
14504 emit_insn (TARGET_32BIT
14505 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14506 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14512 if (! restoring_FPRs_inline)
14513 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14515 p = rtvec_alloc (2);
14517 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
14518 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14519 gen_rtx_REG (Pmode,
14520 LINK_REGISTER_REGNUM));
14522 /* If we have to restore more than two FP registers, branch to the
14523 restore function. It will return to our caller. */
14524 if (! restoring_FPRs_inline)
14528 const char *alloc_rname;
14530 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
14531 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
14532 alloc_rname = ggc_strdup (rname);
14533 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14534 gen_rtx_SYMBOL_REF (Pmode,
14537 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14540 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
14541 GEN_INT (info->fp_save_offset + 8*i));
14542 mem = gen_rtx_MEM (DFmode, addr);
14543 set_mem_alias_set (mem, rs6000_sr_alias_set);
14545 RTVEC_ELT (p, i+3) =
14546 gen_rtx_SET (VOIDmode,
14547 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
14552 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14556 /* Write function epilogue. */
14559 rs6000_output_function_epilogue (FILE *file,
14560 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14562 rs6000_stack_t *info = rs6000_stack_info ();
14564 if (! HAVE_epilogue)
14566 rtx insn = get_last_insn ();
14567 /* If the last insn was a BARRIER, we don't have to write anything except
14568 the trace table. */
14569 if (GET_CODE (insn) == NOTE)
14570 insn = prev_nonnote_insn (insn);
14571 if (insn == 0 || GET_CODE (insn) != BARRIER)
14573 /* This is slightly ugly, but at least we don't have two
14574 copies of the epilogue-emitting code. */
14577 /* A NOTE_INSN_DELETED is supposed to be at the start
14578 and end of the "toplevel" insn chain. */
14579 emit_note (NOTE_INSN_DELETED);
14580 rs6000_emit_epilogue (FALSE);
14581 emit_note (NOTE_INSN_DELETED);
14583 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14587 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14589 INSN_ADDRESSES_NEW (insn, addr);
14594 if (TARGET_DEBUG_STACK)
14595 debug_rtx_list (get_insns (), 100);
14596 final (get_insns (), file, FALSE, FALSE);
14602 macho_branch_islands ();
14603 /* Mach-O doesn't support labels at the end of objects, so if
14604 it looks like we might want one, insert a NOP. */
14606 rtx insn = get_last_insn ();
14609 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
14610 insn = PREV_INSN (insn);
14614 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
14615 fputs ("\tnop\n", file);
14619 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14622 We don't output a traceback table if -finhibit-size-directive was
14623 used. The documentation for -finhibit-size-directive reads
14624 ``don't output a @code{.size} assembler directive, or anything
14625 else that would cause trouble if the function is split in the
14626 middle, and the two halves are placed at locations far apart in
14627 memory.'' The traceback table has this property, since it
14628 includes the offset from the start of the function to the
14629 traceback table itself.
14631 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14632 different traceback table. */
14633 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
14634 && rs6000_traceback != traceback_none)
14636 const char *fname = NULL;
14637 const char *language_string = lang_hooks.name;
14638 int fixed_parms = 0, float_parms = 0, parm_info = 0;
14640 int optional_tbtab;
14642 if (rs6000_traceback == traceback_full)
14643 optional_tbtab = 1;
14644 else if (rs6000_traceback == traceback_part)
14645 optional_tbtab = 0;
14647 optional_tbtab = !optimize_size && !TARGET_ELF;
14649 if (optional_tbtab)
14651 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
14652 while (*fname == '.') /* V.4 encodes . in the name */
14655 /* Need label immediately before tbtab, so we can compute
14656 its offset from the function start. */
14657 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14658 ASM_OUTPUT_LABEL (file, fname);
14661 /* The .tbtab pseudo-op can only be used for the first eight
14662 expressions, since it can't handle the possibly variable
14663 length fields that follow. However, if you omit the optional
14664 fields, the assembler outputs zeros for all optional fields
14665 anyways, giving each variable length field is minimum length
14666 (as defined in sys/debug.h). Thus we can not use the .tbtab
14667 pseudo-op at all. */
14669 /* An all-zero word flags the start of the tbtab, for debuggers
14670 that have to find it by searching forward from the entry
14671 point or from the current pc. */
14672 fputs ("\t.long 0\n", file);
14674 /* Tbtab format type. Use format type 0. */
14675 fputs ("\t.byte 0,", file);
14677 /* Language type. Unfortunately, there does not seem to be any
14678 official way to discover the language being compiled, so we
14679 use language_string.
14680 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14681 Java is 13. Objective-C is 14. */
14682 if (! strcmp (language_string, "GNU C"))
14684 else if (! strcmp (language_string, "GNU F77")
14685 || ! strcmp (language_string, "GNU F95"))
14687 else if (! strcmp (language_string, "GNU Pascal"))
14689 else if (! strcmp (language_string, "GNU Ada"))
14691 else if (! strcmp (language_string, "GNU C++"))
14693 else if (! strcmp (language_string, "GNU Java"))
14695 else if (! strcmp (language_string, "GNU Objective-C"))
14699 fprintf (file, "%d,", i);
14701 /* 8 single bit fields: global linkage (not set for C extern linkage,
14702 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14703 from start of procedure stored in tbtab, internal function, function
14704 has controlled storage, function has no toc, function uses fp,
14705 function logs/aborts fp operations. */
14706 /* Assume that fp operations are used if any fp reg must be saved. */
14707 fprintf (file, "%d,",
14708 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
14710 /* 6 bitfields: function is interrupt handler, name present in
14711 proc table, function calls alloca, on condition directives
14712 (controls stack walks, 3 bits), saves condition reg, saves
14714 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14715 set up as a frame pointer, even when there is no alloca call. */
14716 fprintf (file, "%d,",
14717 ((optional_tbtab << 6)
14718 | ((optional_tbtab & frame_pointer_needed) << 5)
14719 | (info->cr_save_p << 1)
14720 | (info->lr_save_p)));
14722 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14724 fprintf (file, "%d,",
14725 (info->push_p << 7) | (64 - info->first_fp_reg_save));
14727 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14728 fprintf (file, "%d,", (32 - first_reg_to_save ()));
14730 if (optional_tbtab)
14732 /* Compute the parameter info from the function decl argument
14735 int next_parm_info_bit = 31;
14737 for (decl = DECL_ARGUMENTS (current_function_decl);
14738 decl; decl = TREE_CHAIN (decl))
14740 rtx parameter = DECL_INCOMING_RTL (decl);
14741 enum machine_mode mode = GET_MODE (parameter);
14743 if (GET_CODE (parameter) == REG)
14745 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
14751 if (mode == SFmode)
14753 else if (mode == DFmode || mode == TFmode)
14758 /* If only one bit will fit, don't or in this entry. */
14759 if (next_parm_info_bit > 0)
14760 parm_info |= (bits << (next_parm_info_bit - 1));
14761 next_parm_info_bit -= 2;
14765 fixed_parms += ((GET_MODE_SIZE (mode)
14766 + (UNITS_PER_WORD - 1))
14768 next_parm_info_bit -= 1;
14774 /* Number of fixed point parameters. */
14775 /* This is actually the number of words of fixed point parameters; thus
14776 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14777 fprintf (file, "%d,", fixed_parms);
14779 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14781 /* This is actually the number of fp registers that hold parameters;
14782 and thus the maximum value is 13. */
14783 /* Set parameters on stack bit if parameters are not in their original
14784 registers, regardless of whether they are on the stack? Xlc
14785 seems to set the bit when not optimizing. */
14786 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
14788 if (! optional_tbtab)
14791 /* Optional fields follow. Some are variable length. */
14793 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14794 11 double float. */
14795 /* There is an entry for each parameter in a register, in the order that
14796 they occur in the parameter list. Any intervening arguments on the
14797 stack are ignored. If the list overflows a long (max possible length
14798 34 bits) then completely leave off all elements that don't fit. */
14799 /* Only emit this long if there was at least one parameter. */
14800 if (fixed_parms || float_parms)
14801 fprintf (file, "\t.long %d\n", parm_info);
14803 /* Offset from start of code to tb table. */
14804 fputs ("\t.long ", file);
14805 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14807 RS6000_OUTPUT_BASENAME (file, fname);
14809 assemble_name (file, fname);
14811 rs6000_output_function_entry (file, fname);
14814 /* Interrupt handler mask. */
14815 /* Omit this long, since we never set the interrupt handler bit
14818 /* Number of CTL (controlled storage) anchors. */
14819 /* Omit this long, since the has_ctl bit is never set above. */
14821 /* Displacement into stack of each CTL anchor. */
14822 /* Omit this list of longs, because there are no CTL anchors. */
14824 /* Length of function name. */
14827 fprintf (file, "\t.short %d\n", (int) strlen (fname));
14829 /* Function name. */
14830 assemble_string (fname, strlen (fname));
14832 /* Register for alloca automatic storage; this is always reg 31.
14833 Only emit this if the alloca bit was set above. */
14834 if (frame_pointer_needed)
14835 fputs ("\t.byte 31\n", file);
14837 fputs ("\t.align 2\n", file);
14841 /* A C compound statement that outputs the assembler code for a thunk
14842 function, used to implement C++ virtual function calls with
14843 multiple inheritance. The thunk acts as a wrapper around a virtual
14844 function, adjusting the implicit object parameter before handing
14845 control off to the real function.
14847 First, emit code to add the integer DELTA to the location that
14848 contains the incoming first argument. Assume that this argument
14849 contains a pointer, and is the one used to pass the `this' pointer
14850 in C++. This is the incoming argument *before* the function
14851 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14852 values of all other incoming arguments.
14854 After the addition, emit code to jump to FUNCTION, which is a
14855 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14856 not touch the return address. Hence returning from FUNCTION will
14857 return to whoever called the current `thunk'.
14859 The effect must be as if FUNCTION had been called directly with the
14860 adjusted first argument. This macro is responsible for emitting
14861 all of the code for a thunk function; output_function_prologue()
14862 and output_function_epilogue() are not invoked.
14864 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14865 been extracted from it.) It might possibly be useful on some
14866 targets, but probably not.
14868 If you do not define this macro, the target-independent code in the
14869 C++ frontend will generate a less efficient heavyweight thunk that
14870 calls FUNCTION instead of jumping to it. The generic approach does
14871 not support varargs. */
14874 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
14875 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
14878 rtx this, insn, funexp;
14880 reload_completed = 1;
14881 epilogue_completed = 1;
14882 no_new_pseudos = 1;
14883 reset_block_changes ();
14885 /* Mark the end of the (empty) prologue. */
14886 emit_note (NOTE_INSN_PROLOGUE_END);
14888 /* Find the "this" pointer. If the function returns a structure,
14889 the structure return pointer is in r3. */
14890 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
14891 this = gen_rtx_REG (Pmode, 4);
14893 this = gen_rtx_REG (Pmode, 3);
14895 /* Apply the constant offset, if required. */
14898 rtx delta_rtx = GEN_INT (delta);
14899 emit_insn (TARGET_32BIT
14900 ? gen_addsi3 (this, this, delta_rtx)
14901 : gen_adddi3 (this, this, delta_rtx));
14904 /* Apply the offset from the vtable, if required. */
14907 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
14908 rtx tmp = gen_rtx_REG (Pmode, 12);
14910 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
14911 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
14913 emit_insn (TARGET_32BIT
14914 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
14915 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
14916 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
14920 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
14922 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
14924 emit_insn (TARGET_32BIT
14925 ? gen_addsi3 (this, this, tmp)
14926 : gen_adddi3 (this, this, tmp));
14929 /* Generate a tail call to the target function. */
14930 if (!TREE_USED (function))
14932 assemble_external (function);
14933 TREE_USED (function) = 1;
14935 funexp = XEXP (DECL_RTL (function), 0);
14936 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
14939 if (MACHOPIC_INDIRECT)
14940 funexp = machopic_indirect_call_target (funexp);
14943 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14944 generate sibcall RTL explicitly to avoid constraint abort. */
14945 insn = emit_call_insn (
14946 gen_rtx_PARALLEL (VOIDmode,
14948 gen_rtx_CALL (VOIDmode,
14949 funexp, const0_rtx),
14950 gen_rtx_USE (VOIDmode, const0_rtx),
14951 gen_rtx_USE (VOIDmode,
14952 gen_rtx_REG (SImode,
14953 LINK_REGISTER_REGNUM)),
14954 gen_rtx_RETURN (VOIDmode))));
14955 SIBLING_CALL_P (insn) = 1;
14958 /* Run just enough of rest_of_compilation to get the insns emitted.
14959 There's not really enough bulk here to make other passes such as
14960 instruction scheduling worth while. Note that use_thunk calls
14961 assemble_start_function and assemble_end_function. */
14962 insn = get_insns ();
14963 insn_locators_initialize ();
14964 shorten_branches (insn);
14965 final_start_function (insn, file, 1);
14966 final (insn, file, 1, 0);
14967 final_end_function ();
14969 reload_completed = 0;
14970 epilogue_completed = 0;
14971 no_new_pseudos = 0;
14974 /* A quick summary of the various types of 'constant-pool tables'
14977 Target Flags Name One table per
14978 AIX (none) AIX TOC object file
14979 AIX -mfull-toc AIX TOC object file
14980 AIX -mminimal-toc AIX minimal TOC translation unit
14981 SVR4/EABI (none) SVR4 SDATA object file
14982 SVR4/EABI -fpic SVR4 pic object file
14983 SVR4/EABI -fPIC SVR4 PIC translation unit
14984 SVR4/EABI -mrelocatable EABI TOC function
14985 SVR4/EABI -maix AIX TOC object file
14986 SVR4/EABI -maix -mminimal-toc
14987 AIX minimal TOC translation unit
14989 Name Reg. Set by entries contains:
14990 made by addrs? fp? sum?
14992 AIX TOC 2 crt0 as Y option option
14993 AIX minimal TOC 30 prolog gcc Y Y option
14994 SVR4 SDATA 13 crt0 gcc N Y N
14995 SVR4 pic 30 prolog ld Y not yet N
14996 SVR4 PIC 30 prolog gcc Y option option
14997 EABI TOC 30 prolog gcc Y option option
15001 /* Hash functions for the hash table. */
15004 rs6000_hash_constant (rtx k)
15006 enum rtx_code code = GET_CODE (k);
15007 enum machine_mode mode = GET_MODE (k);
15008 unsigned result = (code << 3) ^ mode;
15009 const char *format;
15012 format = GET_RTX_FORMAT (code);
15013 flen = strlen (format);
15019 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15022 if (mode != VOIDmode)
15023 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15035 for (; fidx < flen; fidx++)
15036 switch (format[fidx])
15041 const char *str = XSTR (k, fidx);
15042 len = strlen (str);
15043 result = result * 613 + len;
15044 for (i = 0; i < len; i++)
15045 result = result * 613 + (unsigned) str[i];
15050 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15054 result = result * 613 + (unsigned) XINT (k, fidx);
15057 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15058 result = result * 613 + (unsigned) XWINT (k, fidx);
15062 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
15063 result = result * 613 + (unsigned) (XWINT (k, fidx)
15077 toc_hash_function (const void *hash_entry)
15079 const struct toc_hash_struct *thc =
15080 (const struct toc_hash_struct *) hash_entry;
15081 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15084 /* Compare H1 and H2 for equivalence. */
15087 toc_hash_eq (const void *h1, const void *h2)
15089 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15090 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15092 if (((const struct toc_hash_struct *) h1)->key_mode
15093 != ((const struct toc_hash_struct *) h2)->key_mode)
15096 return rtx_equal_p (r1, r2);
15099 /* These are the names given by the C++ front-end to vtables, and
15100 vtable-like objects. Ideally, this logic should not be here;
15101 instead, there should be some programmatic way of inquiring as
15102 to whether or not an object is a vtable. */
15104 #define VTABLE_NAME_P(NAME) \
15105 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
15106 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15107 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
15108 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
15109 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15112 rs6000_output_symbol_ref (FILE *file, rtx x)
15114 /* Currently C++ toc references to vtables can be emitted before it
15115 is decided whether the vtable is public or private. If this is
15116 the case, then the linker will eventually complain that there is
15117 a reference to an unknown section. Thus, for vtables only,
15118 we emit the TOC reference to reference the symbol and not the
15120 const char *name = XSTR (x, 0);
15122 if (VTABLE_NAME_P (name))
15124 RS6000_OUTPUT_BASENAME (file, name);
15127 assemble_name (file, name);
15130 /* Output a TOC entry. We derive the entry name from what is being
15134 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15137 const char *name = buf;
15138 const char *real_name;
15145 /* When the linker won't eliminate them, don't output duplicate
15146 TOC entries (this happens on AIX if there is any kind of TOC,
15147 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15149 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15151 struct toc_hash_struct *h;
15154 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
15155 time because GGC is not initialized at that point. */
15156 if (toc_hash_table == NULL)
15157 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15158 toc_hash_eq, NULL);
15160 h = ggc_alloc (sizeof (*h));
15162 h->key_mode = mode;
15163 h->labelno = labelno;
15165 found = htab_find_slot (toc_hash_table, h, 1);
15166 if (*found == NULL)
15168 else /* This is indeed a duplicate.
15169 Set this label equal to that label. */
15171 fputs ("\t.set ", file);
15172 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15173 fprintf (file, "%d,", labelno);
15174 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15175 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15181 /* If we're going to put a double constant in the TOC, make sure it's
15182 aligned properly when strict alignment is on. */
15183 if (GET_CODE (x) == CONST_DOUBLE
15184 && STRICT_ALIGNMENT
15185 && GET_MODE_BITSIZE (mode) >= 64
15186 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15187 ASM_OUTPUT_ALIGN (file, 3);
15190 (*targetm.asm_out.internal_label) (file, "LC", labelno);
15192 /* Handle FP constants specially. Note that if we have a minimal
15193 TOC, things we put here aren't actually in the TOC, so we can allow
15195 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15197 REAL_VALUE_TYPE rv;
15200 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15201 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15205 if (TARGET_MINIMAL_TOC)
15206 fputs (DOUBLE_INT_ASM_OP, file);
15208 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15209 k[0] & 0xffffffff, k[1] & 0xffffffff,
15210 k[2] & 0xffffffff, k[3] & 0xffffffff);
15211 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15212 k[0] & 0xffffffff, k[1] & 0xffffffff,
15213 k[2] & 0xffffffff, k[3] & 0xffffffff);
15218 if (TARGET_MINIMAL_TOC)
15219 fputs ("\t.long ", file);
15221 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15222 k[0] & 0xffffffff, k[1] & 0xffffffff,
15223 k[2] & 0xffffffff, k[3] & 0xffffffff);
15224 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15225 k[0] & 0xffffffff, k[1] & 0xffffffff,
15226 k[2] & 0xffffffff, k[3] & 0xffffffff);
15230 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
15232 REAL_VALUE_TYPE rv;
15235 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15236 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
15240 if (TARGET_MINIMAL_TOC)
15241 fputs (DOUBLE_INT_ASM_OP, file);
15243 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15244 k[0] & 0xffffffff, k[1] & 0xffffffff);
15245 fprintf (file, "0x%lx%08lx\n",
15246 k[0] & 0xffffffff, k[1] & 0xffffffff);
15251 if (TARGET_MINIMAL_TOC)
15252 fputs ("\t.long ", file);
15254 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15255 k[0] & 0xffffffff, k[1] & 0xffffffff);
15256 fprintf (file, "0x%lx,0x%lx\n",
15257 k[0] & 0xffffffff, k[1] & 0xffffffff);
15261 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
15263 REAL_VALUE_TYPE rv;
15266 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15267 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15271 if (TARGET_MINIMAL_TOC)
15272 fputs (DOUBLE_INT_ASM_OP, file);
15274 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15275 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
15280 if (TARGET_MINIMAL_TOC)
15281 fputs ("\t.long ", file);
15283 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15284 fprintf (file, "0x%lx\n", l & 0xffffffff);
15288 else if (GET_MODE (x) == VOIDmode
15289 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
15291 unsigned HOST_WIDE_INT low;
15292 HOST_WIDE_INT high;
15294 if (GET_CODE (x) == CONST_DOUBLE)
15296 low = CONST_DOUBLE_LOW (x);
15297 high = CONST_DOUBLE_HIGH (x);
15300 #if HOST_BITS_PER_WIDE_INT == 32
15303 high = (low & 0x80000000) ? ~0 : 0;
15307 low = INTVAL (x) & 0xffffffff;
15308 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
15312 /* TOC entries are always Pmode-sized, but since this
15313 is a bigendian machine then if we're putting smaller
15314 integer constants in the TOC we have to pad them.
15315 (This is still a win over putting the constants in
15316 a separate constant pool, because then we'd have
15317 to have both a TOC entry _and_ the actual constant.)
15319 For a 32-bit target, CONST_INT values are loaded and shifted
15320 entirely within `low' and can be stored in one TOC entry. */
15322 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
15323 abort ();/* It would be easy to make this work, but it doesn't now. */
15325 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
15327 #if HOST_BITS_PER_WIDE_INT == 32
15328 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15329 POINTER_SIZE, &low, &high, 0);
15332 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15333 high = (HOST_WIDE_INT) low >> 32;
15340 if (TARGET_MINIMAL_TOC)
15341 fputs (DOUBLE_INT_ASM_OP, file);
15343 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15344 (long) high & 0xffffffff, (long) low & 0xffffffff);
15345 fprintf (file, "0x%lx%08lx\n",
15346 (long) high & 0xffffffff, (long) low & 0xffffffff);
15351 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15353 if (TARGET_MINIMAL_TOC)
15354 fputs ("\t.long ", file);
15356 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15357 (long) high & 0xffffffff, (long) low & 0xffffffff);
15358 fprintf (file, "0x%lx,0x%lx\n",
15359 (long) high & 0xffffffff, (long) low & 0xffffffff);
15363 if (TARGET_MINIMAL_TOC)
15364 fputs ("\t.long ", file);
15366 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15367 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
15373 if (GET_CODE (x) == CONST)
15375 if (GET_CODE (XEXP (x, 0)) != PLUS)
15378 base = XEXP (XEXP (x, 0), 0);
15379 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15382 if (GET_CODE (base) == SYMBOL_REF)
15383 name = XSTR (base, 0);
15384 else if (GET_CODE (base) == LABEL_REF)
15385 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
15386 else if (GET_CODE (base) == CODE_LABEL)
15387 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15391 real_name = (*targetm.strip_name_encoding) (name);
15392 if (TARGET_MINIMAL_TOC)
15393 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
15396 fprintf (file, "\t.tc %s", real_name);
15399 fprintf (file, ".N%d", - offset);
15401 fprintf (file, ".P%d", offset);
15403 fputs ("[TC],", file);
15406 /* Currently C++ toc references to vtables can be emitted before it
15407 is decided whether the vtable is public or private. If this is
15408 the case, then the linker will eventually complain that there is
15409 a TOC reference to an unknown section. Thus, for vtables only,
15410 we emit the TOC reference to reference the symbol and not the
15412 if (VTABLE_NAME_P (name))
15414 RS6000_OUTPUT_BASENAME (file, name);
15416 fprintf (file, "%d", offset);
15417 else if (offset > 0)
15418 fprintf (file, "+%d", offset);
15421 output_addr_const (file, x);
15425 /* Output an assembler pseudo-op to write an ASCII string of N characters
15426 starting at P to FILE.
15428 On the RS/6000, we have to do this using the .byte operation and
15429 write out special characters outside the quoted string.
15430 Also, the assembler is broken; very long strings are truncated,
15431 so we must artificially break them up early. */
15434 output_ascii (FILE *file, const char *p, int n)
15437 int i, count_string;
15438 const char *for_string = "\t.byte \"";
15439 const char *for_decimal = "\t.byte ";
15440 const char *to_close = NULL;
15443 for (i = 0; i < n; i++)
15446 if (c >= ' ' && c < 0177)
15449 fputs (for_string, file);
15452 /* Write two quotes to get one. */
15460 for_decimal = "\"\n\t.byte ";
15464 if (count_string >= 512)
15466 fputs (to_close, file);
15468 for_string = "\t.byte \"";
15469 for_decimal = "\t.byte ";
15477 fputs (for_decimal, file);
15478 fprintf (file, "%d", c);
15480 for_string = "\n\t.byte \"";
15481 for_decimal = ", ";
15487 /* Now close the string if we have written one. Then end the line. */
15489 fputs (to_close, file);
15492 /* Generate a unique section name for FILENAME for a section type
15493 represented by SECTION_DESC. Output goes into BUF.
15495 SECTION_DESC can be any string, as long as it is different for each
15496 possible section type.
15498 We name the section in the same manner as xlc. The name begins with an
15499 underscore followed by the filename (after stripping any leading directory
15500 names) with the last period replaced by the string SECTION_DESC. If
15501 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15505 rs6000_gen_section_name (char **buf, const char *filename,
15506 const char *section_desc)
15508 const char *q, *after_last_slash, *last_period = 0;
15512 after_last_slash = filename;
15513 for (q = filename; *q; q++)
15516 after_last_slash = q + 1;
15517 else if (*q == '.')
15521 len = strlen (after_last_slash) + strlen (section_desc) + 2;
15522 *buf = (char *) xmalloc (len);
15527 for (q = after_last_slash; *q; q++)
15529 if (q == last_period)
15531 strcpy (p, section_desc);
15532 p += strlen (section_desc);
15536 else if (ISALNUM (*q))
15540 if (last_period == 0)
15541 strcpy (p, section_desc);
15546 /* Emit profile function. */
15549 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
15551 if (TARGET_PROFILE_KERNEL)
15554 if (DEFAULT_ABI == ABI_AIX)
15556 #ifndef NO_PROFILE_COUNTERS
15557 # define NO_PROFILE_COUNTERS 0
15559 if (NO_PROFILE_COUNTERS)
15560 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
15564 const char *label_name;
15567 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15568 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
15569 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
15571 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
15575 else if (DEFAULT_ABI == ABI_DARWIN)
15577 const char *mcount_name = RS6000_MCOUNT;
15578 int caller_addr_regno = LINK_REGISTER_REGNUM;
15580 /* Be conservative and always set this, at least for now. */
15581 current_function_uses_pic_offset_table = 1;
15584 /* For PIC code, set up a stub and collect the caller's address
15585 from r0, which is where the prologue puts it. */
15586 if (MACHOPIC_INDIRECT
15587 && current_function_uses_pic_offset_table)
15588 caller_addr_regno = 0;
15590 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
15592 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
15596 /* Write function profiler code. */
15599 output_function_profiler (FILE *file, int labelno)
15604 switch (DEFAULT_ABI)
15613 warning ("no profiling of 64-bit code for this ABI");
15616 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15617 fprintf (file, "\tmflr %s\n", reg_names[0]);
15620 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
15621 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15622 reg_names[0], save_lr, reg_names[1]);
15623 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15624 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
15625 assemble_name (file, buf);
15626 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
15628 else if (flag_pic > 1)
15630 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15631 reg_names[0], save_lr, reg_names[1]);
15632 /* Now, we need to get the address of the label. */
15633 fputs ("\tbl 1f\n\t.long ", file);
15634 assemble_name (file, buf);
15635 fputs ("-.\n1:", file);
15636 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
15637 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
15638 reg_names[0], reg_names[11]);
15639 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
15640 reg_names[0], reg_names[0], reg_names[11]);
15644 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
15645 assemble_name (file, buf);
15646 fputs ("@ha\n", file);
15647 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15648 reg_names[0], save_lr, reg_names[1]);
15649 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
15650 assemble_name (file, buf);
15651 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
15654 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15655 fprintf (file, "\tbl %s%s\n",
15656 RS6000_MCOUNT, flag_pic ? "@plt" : "");
15661 if (!TARGET_PROFILE_KERNEL)
15663 /* Don't do anything, done in output_profile_hook (). */
15670 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
15671 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
15673 if (cfun->static_chain_decl != NULL)
15675 asm_fprintf (file, "\tstd %s,24(%s)\n",
15676 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15677 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15678 asm_fprintf (file, "\tld %s,24(%s)\n",
15679 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15682 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15689 /* Power4 load update and store update instructions are cracked into a
15690 load or store and an integer insn which are executed in the same cycle.
15691 Branches have their own dispatch slot which does not count against the
15692 GCC issue rate, but it changes the program flow so there are no other
15693 instructions to issue in this cycle. */
15696 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
15697 int verbose ATTRIBUTE_UNUSED,
15698 rtx insn, int more)
15700 if (GET_CODE (PATTERN (insn)) == USE
15701 || GET_CODE (PATTERN (insn)) == CLOBBER)
15704 if (rs6000_sched_groups)
15706 if (is_microcoded_insn (insn))
15708 else if (is_cracked_insn (insn))
15709 return more > 2 ? more - 2 : 0;
15715 /* Adjust the cost of a scheduling dependency. Return the new cost of
15716 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15719 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
15721 if (! recog_memoized (insn))
15724 if (REG_NOTE_KIND (link) != 0)
15727 if (REG_NOTE_KIND (link) == 0)
15729 /* Data dependency; DEP_INSN writes a register that INSN reads
15730 some cycles later. */
15732 /* Separate a load from a narrower, dependent store. */
15733 if (rs6000_sched_groups
15734 && GET_CODE (PATTERN (insn)) == SET
15735 && GET_CODE (PATTERN (dep_insn)) == SET
15736 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
15737 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
15738 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
15739 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
15742 switch (get_attr_type (insn))
15745 /* Tell the first scheduling pass about the latency between
15746 a mtctr and bctr (and mtlr and br/blr). The first
15747 scheduling pass will not know about this latency since
15748 the mtctr instruction, which has the latency associated
15749 to it, will be generated by reload. */
15750 return TARGET_POWER ? 5 : 4;
15752 /* Leave some extra cycles between a compare and its
15753 dependent branch, to inhibit expensive mispredicts. */
15754 if ((rs6000_cpu_attr == CPU_PPC603
15755 || rs6000_cpu_attr == CPU_PPC604
15756 || rs6000_cpu_attr == CPU_PPC604E
15757 || rs6000_cpu_attr == CPU_PPC620
15758 || rs6000_cpu_attr == CPU_PPC630
15759 || rs6000_cpu_attr == CPU_PPC750
15760 || rs6000_cpu_attr == CPU_PPC7400
15761 || rs6000_cpu_attr == CPU_PPC7450
15762 || rs6000_cpu_attr == CPU_POWER4
15763 || rs6000_cpu_attr == CPU_POWER5)
15764 && recog_memoized (dep_insn)
15765 && (INSN_CODE (dep_insn) >= 0)
15766 && (get_attr_type (dep_insn) == TYPE_CMP
15767 || get_attr_type (dep_insn) == TYPE_COMPARE
15768 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
15769 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
15770 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
15771 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
15772 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
15773 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
15778 /* Fall out to return default cost. */
15784 /* The function returns a true if INSN is microcoded.
15785 Return false otherwise. */
15788 is_microcoded_insn (rtx insn)
15790 if (!insn || !INSN_P (insn)
15791 || GET_CODE (PATTERN (insn)) == USE
15792 || GET_CODE (PATTERN (insn)) == CLOBBER)
15795 if (rs6000_sched_groups)
15797 enum attr_type type = get_attr_type (insn);
15798 if (type == TYPE_LOAD_EXT_U
15799 || type == TYPE_LOAD_EXT_UX
15800 || type == TYPE_LOAD_UX
15801 || type == TYPE_STORE_UX
15802 || type == TYPE_MFCR)
15809 /* The function returns a nonzero value if INSN can be scheduled only
15810 as the first insn in a dispatch group ("dispatch-slot restricted").
15811 In this case, the returned value indicates how many dispatch slots
15812 the insn occupies (at the beginning of the group).
15813 Return 0 otherwise. */
15816 is_dispatch_slot_restricted (rtx insn)
15818 enum attr_type type;
15820 if (!rs6000_sched_groups)
15824 || insn == NULL_RTX
15825 || GET_CODE (insn) == NOTE
15826 || GET_CODE (PATTERN (insn)) == USE
15827 || GET_CODE (PATTERN (insn)) == CLOBBER)
15830 type = get_attr_type (insn);
15837 case TYPE_DELAYED_CR:
15838 case TYPE_CR_LOGICAL:
15846 if (rs6000_cpu == PROCESSOR_POWER5
15847 && is_cracked_insn (insn))
15853 /* The function returns true if INSN is cracked into 2 instructions
15854 by the processor (and therefore occupies 2 issue slots). */
15857 is_cracked_insn (rtx insn)
15859 if (!insn || !INSN_P (insn)
15860 || GET_CODE (PATTERN (insn)) == USE
15861 || GET_CODE (PATTERN (insn)) == CLOBBER)
15864 if (rs6000_sched_groups)
15866 enum attr_type type = get_attr_type (insn);
15867 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
15868 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
15869 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
15870 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
15871 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
15872 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
15873 || type == TYPE_IDIV || type == TYPE_LDIV
15874 || type == TYPE_INSERT_WORD)
15881 /* The function returns true if INSN can be issued only from
15882 the branch slot. */
15885 is_branch_slot_insn (rtx insn)
15887 if (!insn || !INSN_P (insn)
15888 || GET_CODE (PATTERN (insn)) == USE
15889 || GET_CODE (PATTERN (insn)) == CLOBBER)
15892 if (rs6000_sched_groups)
15894 enum attr_type type = get_attr_type (insn);
15895 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
15903 /* A C statement (sans semicolon) to update the integer scheduling
15904 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15905 INSN earlier, reduce the priority to execute INSN later. Do not
15906 define this macro if you do not need to adjust the scheduling
15907 priorities of insns. */
15910 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
15912 /* On machines (like the 750) which have asymmetric integer units,
15913 where one integer unit can do multiply and divides and the other
15914 can't, reduce the priority of multiply/divide so it is scheduled
15915 before other integer operations. */
15918 if (! INSN_P (insn))
15921 if (GET_CODE (PATTERN (insn)) == USE)
15924 switch (rs6000_cpu_attr) {
15926 switch (get_attr_type (insn))
15933 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
15934 priority, priority);
15935 if (priority >= 0 && priority < 0x01000000)
15942 if (is_dispatch_slot_restricted (insn)
15943 && reload_completed
15944 && current_sched_info->sched_max_insns_priority
15945 && rs6000_sched_restricted_insns_priority)
15948 /* Prioritize insns that can be dispatched only in the first
15950 if (rs6000_sched_restricted_insns_priority == 1)
15951 /* Attach highest priority to insn. This means that in
15952 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15953 precede 'priority' (critical path) considerations. */
15954 return current_sched_info->sched_max_insns_priority;
15955 else if (rs6000_sched_restricted_insns_priority == 2)
15956 /* Increase priority of insn by a minimal amount. This means that in
15957 haifa-sched.c:ready_sort(), only 'priority' (critical path)
15958 considerations precede dispatch-slot restriction considerations. */
15959 return (priority + 1);
15965 /* Return how many instructions the machine can issue per cycle. */
15968 rs6000_issue_rate (void)
15970 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15971 if (!reload_completed)
15974 switch (rs6000_cpu_attr) {
15975 case CPU_RIOS1: /* ? */
15977 case CPU_PPC601: /* ? */
16000 /* Return how many instructions to look ahead for better insn
16004 rs6000_use_sched_lookahead (void)
16006 if (rs6000_cpu_attr == CPU_PPC8540)
16011 /* Determine is PAT refers to memory. */
16014 is_mem_ref (rtx pat)
16020 if (GET_CODE (pat) == MEM)
16023 /* Recursively process the pattern. */
16024 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16026 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16029 ret |= is_mem_ref (XEXP (pat, i));
16030 else if (fmt[i] == 'E')
16031 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16032 ret |= is_mem_ref (XVECEXP (pat, i, j));
16038 /* Determine if PAT is a PATTERN of a load insn. */
16041 is_load_insn1 (rtx pat)
16043 if (!pat || pat == NULL_RTX)
16046 if (GET_CODE (pat) == SET)
16047 return is_mem_ref (SET_SRC (pat));
16049 if (GET_CODE (pat) == PARALLEL)
16053 for (i = 0; i < XVECLEN (pat, 0); i++)
16054 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16061 /* Determine if INSN loads from memory. */
16064 is_load_insn (rtx insn)
16066 if (!insn || !INSN_P (insn))
16069 if (GET_CODE (insn) == CALL_INSN)
16072 return is_load_insn1 (PATTERN (insn));
16075 /* Determine if PAT is a PATTERN of a store insn. */
16078 is_store_insn1 (rtx pat)
16080 if (!pat || pat == NULL_RTX)
16083 if (GET_CODE (pat) == SET)
16084 return is_mem_ref (SET_DEST (pat));
16086 if (GET_CODE (pat) == PARALLEL)
16090 for (i = 0; i < XVECLEN (pat, 0); i++)
16091 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16098 /* Determine if INSN stores to memory. */
16101 is_store_insn (rtx insn)
16103 if (!insn || !INSN_P (insn))
16106 return is_store_insn1 (PATTERN (insn));
16109 /* Returns whether the dependence between INSN and NEXT is considered
16110 costly by the given target. */
16113 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16116 /* If the flag is not enbled - no dependence is considered costly;
16117 allow all dependent insns in the same group.
16118 This is the most aggressive option. */
16119 if (rs6000_sched_costly_dep == no_dep_costly)
16122 /* If the flag is set to 1 - a dependence is always considered costly;
16123 do not allow dependent instructions in the same group.
16124 This is the most conservative option. */
16125 if (rs6000_sched_costly_dep == all_deps_costly)
16128 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16129 && is_load_insn (next)
16130 && is_store_insn (insn))
16131 /* Prevent load after store in the same group. */
16134 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16135 && is_load_insn (next)
16136 && is_store_insn (insn)
16137 && (!link || (int) REG_NOTE_KIND (link) == 0))
16138 /* Prevent load after store in the same group if it is a true
16142 /* The flag is set to X; dependences with latency >= X are considered costly,
16143 and will not be scheduled in the same group. */
16144 if (rs6000_sched_costly_dep <= max_dep_latency
16145 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16151 /* Return the next insn after INSN that is found before TAIL is reached,
16152 skipping any "non-active" insns - insns that will not actually occupy
16153 an issue slot. Return NULL_RTX if such an insn is not found. */
16156 get_next_active_insn (rtx insn, rtx tail)
16160 if (!insn || insn == tail)
16163 next_insn = NEXT_INSN (insn);
16166 && next_insn != tail
16167 && (GET_CODE(next_insn) == NOTE
16168 || GET_CODE (PATTERN (next_insn)) == USE
16169 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
16171 next_insn = NEXT_INSN (next_insn);
16174 if (!next_insn || next_insn == tail)
16180 /* Return whether the presence of INSN causes a dispatch group termination
16181 of group WHICH_GROUP.
16183 If WHICH_GROUP == current_group, this function will return true if INSN
16184 causes the termination of the current group (i.e, the dispatch group to
16185 which INSN belongs). This means that INSN will be the last insn in the
16186 group it belongs to.
16188 If WHICH_GROUP == previous_group, this function will return true if INSN
16189 causes the termination of the previous group (i.e, the dispatch group that
16190 precedes the group to which INSN belongs). This means that INSN will be
16191 the first insn in the group it belongs to). */
16194 insn_terminates_group_p (rtx insn, enum group_termination which_group)
16196 enum attr_type type;
16201 type = get_attr_type (insn);
16203 if (is_microcoded_insn (insn))
16206 if (which_group == current_group)
16208 if (is_branch_slot_insn (insn))
16212 else if (which_group == previous_group)
16214 if (is_dispatch_slot_restricted (insn))
16222 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16223 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16226 is_costly_group (rtx *group_insns, rtx next_insn)
16231 int issue_rate = rs6000_issue_rate ();
16233 for (i = 0; i < issue_rate; i++)
16235 rtx insn = group_insns[i];
16238 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
16240 rtx next = XEXP (link, 0);
16241 if (next == next_insn)
16243 cost = insn_cost (insn, link, next_insn);
16244 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16253 /* Utility of the function redefine_groups.
16254 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16255 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16256 to keep it "far" (in a separate group) from GROUP_INSNS, following
16257 one of the following schemes, depending on the value of the flag
16258 -minsert_sched_nops = X:
16259 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16260 in order to force NEXT_INSN into a separate group.
16261 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16262 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16263 insertion (has a group just ended, how many vacant issue slots remain in the
16264 last group, and how many dispatch groups were encountered so far). */
16267 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16268 rtx next_insn, bool *group_end, int can_issue_more,
16273 int issue_rate = rs6000_issue_rate ();
16274 bool end = *group_end;
16277 if (next_insn == NULL_RTX)
16278 return can_issue_more;
16280 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16281 return can_issue_more;
16283 force = is_costly_group (group_insns, next_insn);
16285 return can_issue_more;
16287 if (sched_verbose > 6)
16288 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
16289 *group_count ,can_issue_more);
16291 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16294 can_issue_more = 0;
16296 /* Since only a branch can be issued in the last issue_slot, it is
16297 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16298 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16299 in this case the last nop will start a new group and the branch
16300 will be forced to the new group. */
16301 if (can_issue_more && !is_branch_slot_insn (next_insn))
16304 while (can_issue_more > 0)
16307 emit_insn_before (nop, next_insn);
16315 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16317 int n_nops = rs6000_sched_insert_nops;
16319 /* Nops can't be issued from the branch slot, so the effective
16320 issue_rate for nops is 'issue_rate - 1'. */
16321 if (can_issue_more == 0)
16322 can_issue_more = issue_rate;
16324 if (can_issue_more == 0)
16326 can_issue_more = issue_rate - 1;
16329 for (i = 0; i < issue_rate; i++)
16331 group_insns[i] = 0;
16338 emit_insn_before (nop, next_insn);
16339 if (can_issue_more == issue_rate - 1) /* new group begins */
16342 if (can_issue_more == 0)
16344 can_issue_more = issue_rate - 1;
16347 for (i = 0; i < issue_rate; i++)
16349 group_insns[i] = 0;
16355 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16358 /* Is next_insn going to start a new group? */
16361 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16362 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16363 || (can_issue_more < issue_rate &&
16364 insn_terminates_group_p (next_insn, previous_group)));
16365 if (*group_end && end)
16368 if (sched_verbose > 6)
16369 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16370 *group_count, can_issue_more);
16371 return can_issue_more;
16374 return can_issue_more;
16377 /* This function tries to synch the dispatch groups that the compiler "sees"
16378 with the dispatch groups that the processor dispatcher is expected to
16379 form in practice. It tries to achieve this synchronization by forcing the
16380 estimated processor grouping on the compiler (as opposed to the function
16381 'pad_goups' which tries to force the scheduler's grouping on the processor).
16383 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16384 examines the (estimated) dispatch groups that will be formed by the processor
16385 dispatcher. It marks these group boundaries to reflect the estimated
16386 processor grouping, overriding the grouping that the scheduler had marked.
16387 Depending on the value of the flag '-minsert-sched-nops' this function can
16388 force certain insns into separate groups or force a certain distance between
16389 them by inserting nops, for example, if there exists a "costly dependence"
16392 The function estimates the group boundaries that the processor will form as
16393 folllows: It keeps track of how many vacant issue slots are available after
16394 each insn. A subsequent insn will start a new group if one of the following
16396 - no more vacant issue slots remain in the current dispatch group.
16397 - only the last issue slot, which is the branch slot, is vacant, but the next
16398 insn is not a branch.
16399 - only the last 2 or less issue slots, including the branch slot, are vacant,
16400 which means that a cracked insn (which occupies two issue slots) can't be
16401 issued in this group.
16402 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16403 start a new group. */
16406 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16408 rtx insn, next_insn;
16410 int can_issue_more;
16413 int group_count = 0;
16417 issue_rate = rs6000_issue_rate ();
16418 group_insns = alloca (issue_rate * sizeof (rtx));
16419 for (i = 0; i < issue_rate; i++)
16421 group_insns[i] = 0;
16423 can_issue_more = issue_rate;
16425 insn = get_next_active_insn (prev_head_insn, tail);
16428 while (insn != NULL_RTX)
16430 slot = (issue_rate - can_issue_more);
16431 group_insns[slot] = insn;
16433 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16434 if (insn_terminates_group_p (insn, current_group))
16435 can_issue_more = 0;
16437 next_insn = get_next_active_insn (insn, tail);
16438 if (next_insn == NULL_RTX)
16439 return group_count + 1;
16441 /* Is next_insn going to start a new group? */
16443 = (can_issue_more == 0
16444 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16445 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16446 || (can_issue_more < issue_rate &&
16447 insn_terminates_group_p (next_insn, previous_group)));
16449 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
16450 next_insn, &group_end, can_issue_more,
16456 can_issue_more = 0;
16457 for (i = 0; i < issue_rate; i++)
16459 group_insns[i] = 0;
16463 if (GET_MODE (next_insn) == TImode && can_issue_more)
16464 PUT_MODE(next_insn, VOIDmode);
16465 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
16466 PUT_MODE (next_insn, TImode);
16469 if (can_issue_more == 0)
16470 can_issue_more = issue_rate;
16473 return group_count;
16476 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16477 dispatch group boundaries that the scheduler had marked. Pad with nops
16478 any dispatch groups which have vacant issue slots, in order to force the
16479 scheduler's grouping on the processor dispatcher. The function
16480 returns the number of dispatch groups found. */
16483 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16485 rtx insn, next_insn;
16488 int can_issue_more;
16490 int group_count = 0;
16492 /* Initialize issue_rate. */
16493 issue_rate = rs6000_issue_rate ();
16494 can_issue_more = issue_rate;
16496 insn = get_next_active_insn (prev_head_insn, tail);
16497 next_insn = get_next_active_insn (insn, tail);
16499 while (insn != NULL_RTX)
16502 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16504 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
16506 if (next_insn == NULL_RTX)
16511 /* If the scheduler had marked group termination at this location
16512 (between insn and next_indn), and neither insn nor next_insn will
16513 force group termination, pad the group with nops to force group
16516 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
16517 && !insn_terminates_group_p (insn, current_group)
16518 && !insn_terminates_group_p (next_insn, previous_group))
16520 if (!is_branch_slot_insn(next_insn))
16523 while (can_issue_more)
16526 emit_insn_before (nop, next_insn);
16531 can_issue_more = issue_rate;
16536 next_insn = get_next_active_insn (insn, tail);
16539 return group_count;
16542 /* The following function is called at the end of scheduling BB.
16543 After reload, it inserts nops at insn group bundling. */
16546 rs6000_sched_finish (FILE *dump, int sched_verbose)
16551 fprintf (dump, "=== Finishing schedule.\n");
16553 if (reload_completed && rs6000_sched_groups)
16555 if (rs6000_sched_insert_nops == sched_finish_none)
16558 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
16559 n_groups = pad_groups (dump, sched_verbose,
16560 current_sched_info->prev_head,
16561 current_sched_info->next_tail);
16563 n_groups = redefine_groups (dump, sched_verbose,
16564 current_sched_info->prev_head,
16565 current_sched_info->next_tail);
16567 if (sched_verbose >= 6)
16569 fprintf (dump, "ngroups = %d\n", n_groups);
16570 print_rtl (dump, current_sched_info->prev_head);
16571 fprintf (dump, "Done finish_sched\n");
16576 /* Length in units of the trampoline for entering a nested function. */
16579 rs6000_trampoline_size (void)
16583 switch (DEFAULT_ABI)
16589 ret = (TARGET_32BIT) ? 12 : 24;
16594 ret = (TARGET_32BIT) ? 40 : 48;
16601 /* Emit RTL insns to initialize the variable parts of a trampoline.
16602 FNADDR is an RTX for the address of the function's pure code.
16603 CXT is an RTX for the static chain value for the function. */
16606 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
16608 enum machine_mode pmode = Pmode;
16609 int regsize = (TARGET_32BIT) ? 4 : 8;
16610 rtx ctx_reg = force_reg (pmode, cxt);
16612 switch (DEFAULT_ABI)
16617 /* Macros to shorten the code expansions below. */
16618 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16619 #define MEM_PLUS(addr,offset) \
16620 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16622 /* Under AIX, just build the 3 word function descriptor */
16625 rtx fn_reg = gen_reg_rtx (pmode);
16626 rtx toc_reg = gen_reg_rtx (pmode);
16627 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
16628 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
16629 emit_move_insn (MEM_DEREF (addr), fn_reg);
16630 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
16631 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
16635 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16638 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
16639 FALSE, VOIDmode, 4,
16641 GEN_INT (rs6000_trampoline_size ()), SImode,
16651 /* Table of valid machine attributes. */
16653 const struct attribute_spec rs6000_attribute_table[] =
16655 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16656 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
16657 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16658 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16659 #ifdef SUBTARGET_ATTRIBUTE_TABLE
16660 SUBTARGET_ATTRIBUTE_TABLE,
16662 { NULL, 0, 0, false, false, false, NULL }
16665 /* Handle the "altivec" attribute. The attribute may have
16666 arguments as follows:
16668 __attribute__((altivec(vector__)))
16669 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16670 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16672 and may appear more than once (e.g., 'vector bool char') in a
16673 given declaration. */
16676 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
16677 int flags ATTRIBUTE_UNUSED,
16678 bool *no_add_attrs)
16680 tree type = *node, result = NULL_TREE;
16681 enum machine_mode mode;
16684 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
16685 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
16686 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
16689 while (POINTER_TYPE_P (type)
16690 || TREE_CODE (type) == FUNCTION_TYPE
16691 || TREE_CODE (type) == METHOD_TYPE
16692 || TREE_CODE (type) == ARRAY_TYPE)
16693 type = TREE_TYPE (type);
16695 mode = TYPE_MODE (type);
16697 if (rs6000_warn_altivec_long
16698 && (type == long_unsigned_type_node || type == long_integer_type_node))
16699 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
16701 switch (altivec_type)
16704 unsigned_p = TYPE_UNSIGNED (type);
16708 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
16711 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
16714 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
16716 case SFmode: result = V4SF_type_node; break;
16717 /* If the user says 'vector int bool', we may be handed the 'bool'
16718 attribute _before_ the 'vector' attribute, and so select the
16719 proper type in the 'b' case below. */
16720 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
16728 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
16729 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
16730 case QImode: case V16QImode: result = bool_V16QI_type_node;
16737 case V8HImode: result = pixel_V8HI_type_node;
16743 if (result && result != type && TYPE_READONLY (type))
16744 result = build_qualified_type (result, TYPE_QUAL_CONST);
16746 *no_add_attrs = true; /* No need to hang on to the attribute. */
16749 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
16751 *node = reconstruct_complex_type (*node, result);
16756 /* AltiVec defines four built-in scalar types that serve as vector
16757 elements; we must teach the compiler how to mangle them. */
16759 static const char *
16760 rs6000_mangle_fundamental_type (tree type)
16762 if (type == bool_char_type_node) return "U6__boolc";
16763 if (type == bool_short_type_node) return "U6__bools";
16764 if (type == pixel_type_node) return "u7__pixel";
16765 if (type == bool_int_type_node) return "U6__booli";
16767 /* For all other types, use normal C++ mangling. */
16771 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16772 struct attribute_spec.handler. */
16775 rs6000_handle_longcall_attribute (tree *node, tree name,
16776 tree args ATTRIBUTE_UNUSED,
16777 int flags ATTRIBUTE_UNUSED,
16778 bool *no_add_attrs)
16780 if (TREE_CODE (*node) != FUNCTION_TYPE
16781 && TREE_CODE (*node) != FIELD_DECL
16782 && TREE_CODE (*node) != TYPE_DECL)
16784 warning ("`%s' attribute only applies to functions",
16785 IDENTIFIER_POINTER (name));
16786 *no_add_attrs = true;
16792 /* Set longcall attributes on all functions declared when
16793 rs6000_default_long_calls is true. */
16795 rs6000_set_default_type_attributes (tree type)
16797 if (rs6000_default_long_calls
16798 && (TREE_CODE (type) == FUNCTION_TYPE
16799 || TREE_CODE (type) == METHOD_TYPE))
16800 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
16802 TYPE_ATTRIBUTES (type));
16805 /* Return a reference suitable for calling a function with the
16806 longcall attribute. */
16809 rs6000_longcall_ref (rtx call_ref)
16811 const char *call_name;
16814 if (GET_CODE (call_ref) != SYMBOL_REF)
16817 /* System V adds '.' to the internal name, so skip them. */
16818 call_name = XSTR (call_ref, 0);
16819 if (*call_name == '.')
16821 while (*call_name == '.')
16824 node = get_identifier (call_name);
16825 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
16828 return force_reg (Pmode, call_ref);
16831 #ifdef USING_ELFOS_H
16833 /* A C statement or statements to switch to the appropriate section
16834 for output of RTX in mode MODE. You can assume that RTX is some
16835 kind of constant in RTL. The argument MODE is redundant except in
16836 the case of a `const_int' rtx. Select the section by calling
16837 `text_section' or one of the alternatives for other sections.
16839 Do not define this macro if you put all constants in the read-only
16843 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
16844 unsigned HOST_WIDE_INT align)
16846 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16849 default_elf_select_rtx_section (mode, x, align);
16852 /* A C statement or statements to switch to the appropriate
16853 section for output of DECL. DECL is either a `VAR_DECL' node
16854 or a constant of some sort. RELOC indicates whether forming
16855 the initial value of DECL requires link-time relocations. */
16858 rs6000_elf_select_section (tree decl, int reloc,
16859 unsigned HOST_WIDE_INT align)
16861 /* Pretend that we're always building for a shared library when
16862 ABI_AIX, because otherwise we end up with dynamic relocations
16863 in read-only sections. This happens for function pointers,
16864 references to vtables in typeinfo, and probably other cases. */
16865 default_elf_select_section_1 (decl, reloc, align,
16866 flag_pic || DEFAULT_ABI == ABI_AIX);
16869 /* A C statement to build up a unique section name, expressed as a
16870 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16871 RELOC indicates whether the initial value of EXP requires
16872 link-time relocations. If you do not define this macro, GCC will use
16873 the symbol name prefixed by `.' as the section name. Note - this
16874 macro can now be called for uninitialized data items as well as
16875 initialized data and functions. */
16878 rs6000_elf_unique_section (tree decl, int reloc)
16880 /* As above, pretend that we're always building for a shared library
16881 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16882 default_unique_section_1 (decl, reloc,
16883 flag_pic || DEFAULT_ABI == ABI_AIX);
16886 /* For a SYMBOL_REF, set generic flags and then perform some
16887 target-specific processing.
16889 When the AIX ABI is requested on a non-AIX system, replace the
16890 function name with the real name (with a leading .) rather than the
16891 function descriptor name. This saves a lot of overriding code to
16892 read the prefixes. */
16895 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
16897 default_encode_section_info (decl, rtl, first);
16900 && TREE_CODE (decl) == FUNCTION_DECL
16902 && DEFAULT_ABI == ABI_AIX)
16904 rtx sym_ref = XEXP (rtl, 0);
16905 size_t len = strlen (XSTR (sym_ref, 0));
16906 char *str = alloca (len + 2);
16908 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
16909 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
16914 rs6000_elf_in_small_data_p (tree decl)
16916 if (rs6000_sdata == SDATA_NONE)
16919 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
16921 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
16922 if (strcmp (section, ".sdata") == 0
16923 || strcmp (section, ".sdata2") == 0
16924 || strcmp (section, ".sbss") == 0
16925 || strcmp (section, ".sbss2") == 0
16926 || strcmp (section, ".PPC.EMB.sdata0") == 0
16927 || strcmp (section, ".PPC.EMB.sbss0") == 0)
16932 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
16935 && (unsigned HOST_WIDE_INT) size <= g_switch_value
16936 /* If it's not public, and we're not going to reference it there,
16937 there's no need to put it in the small data section. */
16938 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
16945 #endif /* USING_ELFOS_H */
16948 /* Return a REG that occurs in ADDR with coefficient 1.
16949 ADDR can be effectively incremented by incrementing REG.
16951 r0 is special and we must not select it as an address
16952 register by this routine since our caller will try to
16953 increment the returned register via an "la" instruction. */
16956 find_addr_reg (rtx addr)
16958 while (GET_CODE (addr) == PLUS)
16960 if (GET_CODE (XEXP (addr, 0)) == REG
16961 && REGNO (XEXP (addr, 0)) != 0)
16962 addr = XEXP (addr, 0);
16963 else if (GET_CODE (XEXP (addr, 1)) == REG
16964 && REGNO (XEXP (addr, 1)) != 0)
16965 addr = XEXP (addr, 1);
16966 else if (CONSTANT_P (XEXP (addr, 0)))
16967 addr = XEXP (addr, 1);
16968 else if (CONSTANT_P (XEXP (addr, 1)))
16969 addr = XEXP (addr, 0);
16973 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
16979 rs6000_fatal_bad_address (rtx op)
16981 fatal_insn ("bad address", op);
16986 static tree branch_island_list = 0;
16988 /* Remember to generate a branch island for far calls to the given
16992 add_compiler_branch_island (tree label_name, tree function_name,
16995 tree branch_island = build_tree_list (function_name, label_name);
16996 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
16997 TREE_CHAIN (branch_island) = branch_island_list;
16998 branch_island_list = branch_island;
17001 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17002 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17003 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17004 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17006 /* Generate far-jump branch islands for everything on the
17007 branch_island_list. Invoked immediately after the last instruction
17008 of the epilogue has been emitted; the branch-islands must be
17009 appended to, and contiguous with, the function body. Mach-O stubs
17010 are generated in machopic_output_stub(). */
17013 macho_branch_islands (void)
17016 tree branch_island;
17018 for (branch_island = branch_island_list;
17020 branch_island = TREE_CHAIN (branch_island))
17022 const char *label =
17023 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17025 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17026 char name_buf[512];
17027 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17028 if (name[0] == '*' || name[0] == '&')
17029 strcpy (name_buf, name+1);
17033 strcpy (name_buf+1, name);
17035 strcpy (tmp_buf, "\n");
17036 strcat (tmp_buf, label);
17037 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17038 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17039 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17040 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17043 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17044 strcat (tmp_buf, label);
17045 strcat (tmp_buf, "_pic\n");
17046 strcat (tmp_buf, label);
17047 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
17049 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17050 strcat (tmp_buf, name_buf);
17051 strcat (tmp_buf, " - ");
17052 strcat (tmp_buf, label);
17053 strcat (tmp_buf, "_pic)\n");
17055 strcat (tmp_buf, "\tmtlr r0\n");
17057 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17058 strcat (tmp_buf, name_buf);
17059 strcat (tmp_buf, " - ");
17060 strcat (tmp_buf, label);
17061 strcat (tmp_buf, "_pic)\n");
17063 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17067 strcat (tmp_buf, ":\nlis r12,hi16(");
17068 strcat (tmp_buf, name_buf);
17069 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17070 strcat (tmp_buf, name_buf);
17071 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17073 output_asm_insn (tmp_buf, 0);
17074 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17075 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17076 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
17077 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17080 branch_island_list = 0;
17083 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
17084 already there or not. */
17087 no_previous_def (tree function_name)
17089 tree branch_island;
17090 for (branch_island = branch_island_list;
17092 branch_island = TREE_CHAIN (branch_island))
17093 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17098 /* GET_PREV_LABEL gets the label name from the previous definition of
17102 get_prev_label (tree function_name)
17104 tree branch_island;
17105 for (branch_island = branch_island_list;
17107 branch_island = TREE_CHAIN (branch_island))
17108 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17109 return BRANCH_ISLAND_LABEL_NAME (branch_island);
17113 /* INSN is either a function call or a millicode call. It may have an
17114 unconditional jump in its delay slot.
17116 CALL_DEST is the routine we are calling. */
17119 output_call (rtx insn, rtx *operands, int dest_operand_number,
17120 int cookie_operand_number)
17122 static char buf[256];
17123 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17124 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
17127 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
17129 if (no_previous_def (funname))
17131 int line_number = 0;
17132 rtx label_rtx = gen_label_rtx ();
17133 char *label_buf, temp_buf[256];
17134 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17135 CODE_LABEL_NUMBER (label_rtx));
17136 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17137 labelname = get_identifier (label_buf);
17138 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17140 line_number = NOTE_LINE_NUMBER (insn);
17141 add_compiler_branch_island (labelname, funname, line_number);
17144 labelname = get_prev_label (funname);
17146 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17147 instruction will reach 'foo', otherwise link as 'bl L42'".
17148 "L42" should be a 'branch island', that will do a far jump to
17149 'foo'. Branch islands are generated in
17150 macho_branch_islands(). */
17151 sprintf (buf, "jbsr %%z%d,%.246s",
17152 dest_operand_number, IDENTIFIER_POINTER (labelname));
17155 sprintf (buf, "bl %%z%d", dest_operand_number);
17159 /* Generate PIC and indirect symbol stubs. */
17162 machopic_output_stub (FILE *file, const char *symb, const char *stub)
17164 unsigned int length;
17165 char *symbol_name, *lazy_ptr_name;
17166 char *local_label_0;
17167 static int label = 0;
17169 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17170 symb = (*targetm.strip_name_encoding) (symb);
17173 length = strlen (symb);
17174 symbol_name = alloca (length + 32);
17175 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17177 lazy_ptr_name = alloca (length + 32);
17178 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17181 machopic_picsymbol_stub1_section ();
17183 machopic_symbol_stub1_section ();
17187 fprintf (file, "\t.align 5\n");
17189 fprintf (file, "%s:\n", stub);
17190 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17193 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
17194 sprintf (local_label_0, "\"L%011d$spb\"", label);
17196 fprintf (file, "\tmflr r0\n");
17197 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17198 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17199 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17200 lazy_ptr_name, local_label_0);
17201 fprintf (file, "\tmtlr r0\n");
17202 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17203 (TARGET_64BIT ? "ldu" : "lwzu"),
17204 lazy_ptr_name, local_label_0);
17205 fprintf (file, "\tmtctr r12\n");
17206 fprintf (file, "\tbctr\n");
17210 fprintf (file, "\t.align 4\n");
17212 fprintf (file, "%s:\n", stub);
17213 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17215 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
17216 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
17217 fprintf (file, "\tmtctr r12\n");
17218 fprintf (file, "\tbctr\n");
17221 machopic_lazy_symbol_ptr_section ();
17222 fprintf (file, "%s:\n", lazy_ptr_name);
17223 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17224 fprintf (file, "%sdyld_stub_binding_helper\n",
17225 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
17228 /* Legitimize PIC addresses. If the address is already
17229 position-independent, we return ORIG. Newly generated
17230 position-independent addresses go into a reg. This is REG if non
17231 zero, otherwise we allocate register(s) as necessary. */
17233 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
17236 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
17241 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17242 reg = gen_reg_rtx (Pmode);
17244 if (GET_CODE (orig) == CONST)
17246 if (GET_CODE (XEXP (orig, 0)) == PLUS
17247 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17250 if (GET_CODE (XEXP (orig, 0)) == PLUS)
17252 /* Use a different reg for the intermediate value, as
17253 it will be marked UNCHANGING. */
17254 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17257 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17260 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17266 if (GET_CODE (offset) == CONST_INT)
17268 if (SMALL_INT (offset))
17269 return plus_constant (base, INTVAL (offset));
17270 else if (! reload_in_progress && ! reload_completed)
17271 offset = force_reg (Pmode, offset);
17274 rtx mem = force_const_mem (Pmode, orig);
17275 return machopic_legitimize_pic_address (mem, Pmode, reg);
17278 return gen_rtx_PLUS (Pmode, base, offset);
17281 /* Fall back on generic machopic code. */
17282 return machopic_legitimize_pic_address (orig, mode, reg);
17285 /* This is just a placeholder to make linking work without having to
17286 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17287 ever needed for Darwin (not too likely!) this would have to get a
17288 real definition. */
17295 /* Output a .machine directive for the Darwin assembler, and call
17296 the generic start_file routine. */
17299 rs6000_darwin_file_start (void)
17301 static const struct
17307 { "ppc64", "ppc64", MASK_64BIT },
17308 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17309 { "power4", "ppc970", 0 },
17310 { "G5", "ppc970", 0 },
17311 { "7450", "ppc7450", 0 },
17312 { "7400", "ppc7400", MASK_ALTIVEC },
17313 { "G4", "ppc7400", 0 },
17314 { "750", "ppc750", 0 },
17315 { "740", "ppc750", 0 },
17316 { "G3", "ppc750", 0 },
17317 { "604e", "ppc604e", 0 },
17318 { "604", "ppc604", 0 },
17319 { "603e", "ppc603", 0 },
17320 { "603", "ppc603", 0 },
17321 { "601", "ppc601", 0 },
17322 { NULL, "ppc", 0 } };
17323 const char *cpu_id = "";
17326 rs6000_file_start();
17328 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17329 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17330 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17331 && rs6000_select[i].string[0] != '\0')
17332 cpu_id = rs6000_select[i].string;
17334 /* Look through the mapping array. Pick the first name that either
17335 matches the argument, has a bit set in IF_SET that is also set
17336 in the target flags, or has a NULL name. */
17339 while (mapping[i].arg != NULL
17340 && strcmp (mapping[i].arg, cpu_id) != 0
17341 && (mapping[i].if_set & target_flags) == 0)
17344 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17347 #endif /* TARGET_MACHO */
17350 static unsigned int
17351 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
17353 return default_section_type_flags_1 (decl, name, reloc,
17354 flag_pic || DEFAULT_ABI == ABI_AIX);
17357 /* Record an element in the table of global constructors. SYMBOL is
17358 a SYMBOL_REF of the function to be called; PRIORITY is a number
17359 between 0 and MAX_INIT_PRIORITY.
17361 This differs from default_named_section_asm_out_constructor in
17362 that we have special handling for -mrelocatable. */
17365 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
17367 const char *section = ".ctors";
17370 if (priority != DEFAULT_INIT_PRIORITY)
17372 sprintf (buf, ".ctors.%.5u",
17373 /* Invert the numbering so the linker puts us in the proper
17374 order; constructors are run from right to left, and the
17375 linker sorts in increasing order. */
17376 MAX_INIT_PRIORITY - priority);
17380 named_section_flags (section, SECTION_WRITE);
17381 assemble_align (POINTER_SIZE);
17383 if (TARGET_RELOCATABLE)
17385 fputs ("\t.long (", asm_out_file);
17386 output_addr_const (asm_out_file, symbol);
17387 fputs (")@fixup\n", asm_out_file);
17390 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17394 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
17396 const char *section = ".dtors";
17399 if (priority != DEFAULT_INIT_PRIORITY)
17401 sprintf (buf, ".dtors.%.5u",
17402 /* Invert the numbering so the linker puts us in the proper
17403 order; constructors are run from right to left, and the
17404 linker sorts in increasing order. */
17405 MAX_INIT_PRIORITY - priority);
17409 named_section_flags (section, SECTION_WRITE);
17410 assemble_align (POINTER_SIZE);
17412 if (TARGET_RELOCATABLE)
17414 fputs ("\t.long (", asm_out_file);
17415 output_addr_const (asm_out_file, symbol);
17416 fputs (")@fixup\n", asm_out_file);
17419 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
17423 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
17427 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17428 ASM_OUTPUT_LABEL (file, name);
17429 fputs (DOUBLE_INT_ASM_OP, file);
17430 rs6000_output_function_entry (file, name);
17431 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17434 fputs ("\t.size\t", file);
17435 assemble_name (file, name);
17436 fputs (",24\n\t.type\t.", file);
17437 assemble_name (file, name);
17438 fputs (",@function\n", file);
17439 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17441 fputs ("\t.globl\t.", file);
17442 assemble_name (file, name);
17447 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17448 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17449 rs6000_output_function_entry (file, name);
17450 fputs (":\n", file);
17454 if (TARGET_RELOCATABLE
17455 && (get_pool_size () != 0 || current_function_profile)
17460 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17462 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17463 fprintf (file, "\t.long ");
17464 assemble_name (file, buf);
17466 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17467 assemble_name (file, buf);
17471 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17472 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17474 if (DEFAULT_ABI == ABI_AIX)
17476 const char *desc_name, *orig_name;
17478 orig_name = (*targetm.strip_name_encoding) (name);
17479 desc_name = orig_name;
17480 while (*desc_name == '.')
17483 if (TREE_PUBLIC (decl))
17484 fprintf (file, "\t.globl %s\n", desc_name);
17486 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17487 fprintf (file, "%s:\n", desc_name);
17488 fprintf (file, "\t.long %s\n", orig_name);
17489 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
17490 if (DEFAULT_ABI == ABI_AIX)
17491 fputs ("\t.long 0\n", file);
17492 fprintf (file, "\t.previous\n");
17494 ASM_OUTPUT_LABEL (file, name);
17500 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
17502 fputs (GLOBAL_ASM_OP, stream);
17503 RS6000_OUTPUT_BASENAME (stream, name);
17504 putc ('\n', stream);
17508 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
17509 tree decl ATTRIBUTE_UNUSED)
17512 static const char * const suffix[3] = { "PR", "RO", "RW" };
17514 if (flags & SECTION_CODE)
17516 else if (flags & SECTION_WRITE)
17521 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
17522 (flags & SECTION_CODE) ? "." : "",
17523 name, suffix[smclass], flags & SECTION_ENTSIZE);
17527 rs6000_xcoff_select_section (tree decl, int reloc,
17528 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17530 if (decl_readonly_section_1 (decl, reloc, 1))
17532 if (TREE_PUBLIC (decl))
17533 read_only_data_section ();
17535 read_only_private_data_section ();
17539 if (TREE_PUBLIC (decl))
17542 private_data_section ();
17547 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
17551 /* Use select_section for private and uninitialized data. */
17552 if (!TREE_PUBLIC (decl)
17553 || DECL_COMMON (decl)
17554 || DECL_INITIAL (decl) == NULL_TREE
17555 || DECL_INITIAL (decl) == error_mark_node
17556 || (flag_zero_initialized_in_bss
17557 && initializer_zerop (DECL_INITIAL (decl))))
17560 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
17561 name = (*targetm.strip_name_encoding) (name);
17562 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
17565 /* Select section for constant in constant pool.
17567 On RS/6000, all constants are in the private read-only data area.
17568 However, if this is being placed in the TOC it must be output as a
17572 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
17573 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
17575 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17578 read_only_private_data_section ();
17581 /* Remove any trailing [DS] or the like from the symbol name. */
17583 static const char *
17584 rs6000_xcoff_strip_name_encoding (const char *name)
17589 len = strlen (name);
17590 if (name[len - 1] == ']')
17591 return ggc_alloc_string (name, len - 4);
17596 /* Section attributes. AIX is always PIC. */
17598 static unsigned int
17599 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
17601 unsigned int align;
17602 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
17604 /* Align to at least UNIT size. */
17605 if (flags & SECTION_CODE)
17606 align = MIN_UNITS_PER_WORD;
17608 /* Increase alignment of large objects if not already stricter. */
17609 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
17610 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
17611 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
17613 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
17616 /* Output at beginning of assembler file.
17618 Initialize the section names for the RS/6000 at this point.
17620 Specify filename, including full path, to assembler.
17622 We want to go into the TOC section so at least one .toc will be emitted.
17623 Also, in order to output proper .bs/.es pairs, we need at least one static
17624 [RW] section emitted.
17626 Finally, declare mcount when profiling to make the assembler happy. */
17629 rs6000_xcoff_file_start (void)
17631 rs6000_gen_section_name (&xcoff_bss_section_name,
17632 main_input_filename, ".bss_");
17633 rs6000_gen_section_name (&xcoff_private_data_section_name,
17634 main_input_filename, ".rw_");
17635 rs6000_gen_section_name (&xcoff_read_only_section_name,
17636 main_input_filename, ".ro_");
17638 fputs ("\t.file\t", asm_out_file);
17639 output_quoted_string (asm_out_file, main_input_filename);
17640 fputc ('\n', asm_out_file);
17642 if (write_symbols != NO_DEBUG)
17643 private_data_section ();
17646 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
17647 rs6000_file_start ();
17650 /* Output at end of assembler file.
17651 On the RS/6000, referencing data should automatically pull in text. */
17654 rs6000_xcoff_file_end (void)
17657 fputs ("_section_.text:\n", asm_out_file);
17659 fputs (TARGET_32BIT
17660 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17663 #endif /* TARGET_XCOFF */
17666 /* Cross-module name binding. Darwin does not support overriding
17667 functions at dynamic-link time. */
17670 rs6000_binds_local_p (tree decl)
17672 return default_binds_local_p_1 (decl, 0);
17676 /* Compute a (partial) cost for rtx X. Return true if the complete
17677 cost has been computed, and false if subexpressions should be
17678 scanned. In either case, *TOTAL contains the cost result. */
17681 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
17683 enum machine_mode mode = GET_MODE (x);
17687 /* On the RS/6000, if it is valid in the insn, it is free. */
17689 if (((outer_code == SET
17690 || outer_code == PLUS
17691 || outer_code == MINUS)
17692 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17693 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17694 || ((outer_code == IOR || outer_code == XOR)
17695 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17696 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17697 || ((outer_code == DIV || outer_code == UDIV
17698 || outer_code == MOD || outer_code == UMOD)
17699 && exact_log2 (INTVAL (x)) >= 0)
17700 || (outer_code == AND
17701 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17702 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17703 || mask_operand (x, VOIDmode)))
17704 || outer_code == ASHIFT
17705 || outer_code == ASHIFTRT
17706 || outer_code == LSHIFTRT
17707 || outer_code == ROTATE
17708 || outer_code == ROTATERT
17709 || outer_code == ZERO_EXTRACT
17710 || (outer_code == MULT
17711 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17712 || (outer_code == COMPARE
17713 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17714 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K'))))
17719 else if ((outer_code == PLUS
17720 && reg_or_add_cint64_operand (x, VOIDmode))
17721 || (outer_code == MINUS
17722 && reg_or_sub_cint64_operand (x, VOIDmode))
17723 || ((outer_code == SET
17724 || outer_code == IOR
17725 || outer_code == XOR)
17727 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
17729 *total = COSTS_N_INSNS (1);
17736 && ((outer_code == AND
17737 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17738 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17739 || mask64_operand (x, DImode)))
17740 || ((outer_code == IOR || outer_code == XOR)
17741 && CONST_DOUBLE_HIGH (x) == 0
17742 && (CONST_DOUBLE_LOW (x)
17743 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
17748 else if (mode == DImode
17749 && (outer_code == SET
17750 || outer_code == IOR
17751 || outer_code == XOR)
17752 && CONST_DOUBLE_HIGH (x) == 0)
17754 *total = COSTS_N_INSNS (1);
17763 /* When optimizing for size, MEM should be slightly more expensive
17764 than generating address, e.g., (plus (reg) (const)).
17765 L1 cache latency is about two instructions. */
17766 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17774 if (mode == DFmode)
17776 if (GET_CODE (XEXP (x, 0)) == MULT)
17778 /* FNMA accounted in outer NEG. */
17779 if (outer_code == NEG)
17780 *total = rs6000_cost->dmul - rs6000_cost->fp;
17782 *total = rs6000_cost->dmul;
17785 *total = rs6000_cost->fp;
17787 else if (mode == SFmode)
17789 /* FNMA accounted in outer NEG. */
17790 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17793 *total = rs6000_cost->fp;
17795 else if (GET_CODE (XEXP (x, 0)) == MULT)
17797 /* The rs6000 doesn't have shift-and-add instructions. */
17798 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
17799 *total += COSTS_N_INSNS (1);
17802 *total = COSTS_N_INSNS (1);
17806 if (mode == DFmode)
17808 if (GET_CODE (XEXP (x, 0)) == MULT)
17810 /* FNMA accounted in outer NEG. */
17811 if (outer_code == NEG)
17814 *total = rs6000_cost->dmul;
17817 *total = rs6000_cost->fp;
17819 else if (mode == SFmode)
17821 /* FNMA accounted in outer NEG. */
17822 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17825 *total = rs6000_cost->fp;
17827 else if (GET_CODE (XEXP (x, 0)) == MULT)
17829 /* The rs6000 doesn't have shift-and-sub instructions. */
17830 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
17831 *total += COSTS_N_INSNS (1);
17834 *total = COSTS_N_INSNS (1);
17838 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17839 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
17841 if (INTVAL (XEXP (x, 1)) >= -256
17842 && INTVAL (XEXP (x, 1)) <= 255)
17843 *total = rs6000_cost->mulsi_const9;
17845 *total = rs6000_cost->mulsi_const;
17847 /* FMA accounted in outer PLUS/MINUS. */
17848 else if ((mode == DFmode || mode == SFmode)
17849 && (outer_code == PLUS || outer_code == MINUS))
17851 else if (mode == DFmode)
17852 *total = rs6000_cost->dmul;
17853 else if (mode == SFmode)
17854 *total = rs6000_cost->fp;
17855 else if (mode == DImode)
17856 *total = rs6000_cost->muldi;
17858 *total = rs6000_cost->mulsi;
17863 if (FLOAT_MODE_P (mode))
17865 *total = mode == DFmode ? rs6000_cost->ddiv
17866 : rs6000_cost->sdiv;
17873 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17874 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
17876 if (code == DIV || code == MOD)
17878 *total = COSTS_N_INSNS (2);
17881 *total = COSTS_N_INSNS (1);
17885 if (GET_MODE (XEXP (x, 1)) == DImode)
17886 *total = rs6000_cost->divdi;
17888 *total = rs6000_cost->divsi;
17890 /* Add in shift and subtract for MOD. */
17891 if (code == MOD || code == UMOD)
17892 *total += COSTS_N_INSNS (2);
17896 *total = COSTS_N_INSNS (4);
17900 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
17911 *total = COSTS_N_INSNS (1);
17919 /* Handle mul_highpart. */
17920 if (outer_code == TRUNCATE
17921 && GET_CODE (XEXP (x, 0)) == MULT)
17923 if (mode == DImode)
17924 *total = rs6000_cost->muldi;
17926 *total = rs6000_cost->mulsi;
17929 else if (outer_code == AND)
17932 *total = COSTS_N_INSNS (1);
17937 if (GET_CODE (XEXP (x, 0)) == MEM)
17940 *total = COSTS_N_INSNS (1);
17946 if (!FLOAT_MODE_P (mode))
17948 *total = COSTS_N_INSNS (1);
17954 case UNSIGNED_FLOAT:
17958 case FLOAT_TRUNCATE:
17959 *total = rs6000_cost->fp;
17963 switch (XINT (x, 1))
17966 *total = rs6000_cost->fp;
17978 *total = COSTS_N_INSNS (1);
17981 else if (FLOAT_MODE_P (mode)
17982 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
17984 *total = rs6000_cost->fp;
17997 /* A C expression returning the cost of moving data from a register of class
17998 CLASS1 to one of CLASS2. */
18001 rs6000_register_move_cost (enum machine_mode mode,
18002 enum reg_class from, enum reg_class to)
18004 /* Moves from/to GENERAL_REGS. */
18005 if (reg_classes_intersect_p (to, GENERAL_REGS)
18006 || reg_classes_intersect_p (from, GENERAL_REGS))
18008 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18011 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18012 return (rs6000_memory_move_cost (mode, from, 0)
18013 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18015 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18017 else if (from == CR_REGS)
18021 /* A move will cost one instruction per GPR moved. */
18022 return 2 * HARD_REGNO_NREGS (0, mode);
18025 /* Moving between two similar registers is just one instruction. */
18026 else if (reg_classes_intersect_p (to, from))
18027 return mode == TFmode ? 4 : 2;
18029 /* Everything else has to go through GENERAL_REGS. */
18031 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
18032 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18035 /* A C expressions returning the cost of moving data of MODE from a register to
18039 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
18040 int in ATTRIBUTE_UNUSED)
18042 if (reg_classes_intersect_p (class, GENERAL_REGS))
18043 return 4 * HARD_REGNO_NREGS (0, mode);
18044 else if (reg_classes_intersect_p (class, FLOAT_REGS))
18045 return 4 * HARD_REGNO_NREGS (32, mode);
18046 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
18047 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
18049 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18052 /* Return an RTX representing where to find the function value of a
18053 function returning MODE. */
18055 rs6000_complex_function_value (enum machine_mode mode)
18057 unsigned int regno;
18059 enum machine_mode inner = GET_MODE_INNER (mode);
18060 unsigned int inner_bytes = GET_MODE_SIZE (inner);
18062 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18063 regno = FP_ARG_RETURN;
18066 regno = GP_ARG_RETURN;
18068 /* 32-bit is OK since it'll go in r3/r4. */
18069 if (TARGET_32BIT && inner_bytes >= 4)
18070 return gen_rtx_REG (mode, regno);
18073 if (inner_bytes >= 8)
18074 return gen_rtx_REG (mode, regno);
18076 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18078 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
18079 GEN_INT (inner_bytes));
18080 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18083 /* Define how to find the value returned by a function.
18084 VALTYPE is the data type of the value (as a tree).
18085 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18086 otherwise, FUNC is 0.
18088 On the SPE, both FPs and vectors are returned in r3.
18090 On RS/6000 an integer value is in r3 and a floating-point value is in
18091 fp1, unless -msoft-float. */
18094 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18096 enum machine_mode mode;
18097 unsigned int regno;
18099 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18101 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18102 return gen_rtx_PARALLEL (DImode,
18104 gen_rtx_EXPR_LIST (VOIDmode,
18105 gen_rtx_REG (SImode, GP_ARG_RETURN),
18107 gen_rtx_EXPR_LIST (VOIDmode,
18108 gen_rtx_REG (SImode,
18109 GP_ARG_RETURN + 1),
18113 if ((INTEGRAL_TYPE_P (valtype)
18114 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18115 || POINTER_TYPE_P (valtype))
18116 mode = TARGET_32BIT ? SImode : DImode;
18118 mode = TYPE_MODE (valtype);
18120 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
18121 regno = FP_ARG_RETURN;
18122 else if (TREE_CODE (valtype) == COMPLEX_TYPE
18123 && targetm.calls.split_complex_arg)
18124 return rs6000_complex_function_value (mode);
18125 else if (TREE_CODE (valtype) == VECTOR_TYPE
18126 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
18127 && ALTIVEC_VECTOR_MODE(mode))
18128 regno = ALTIVEC_ARG_RETURN;
18129 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT && mode == DFmode)
18130 return spe_build_register_parallel (DFmode, GP_ARG_RETURN);
18132 regno = GP_ARG_RETURN;
18134 return gen_rtx_REG (mode, regno);
18137 /* Define how to find the value returned by a library function
18138 assuming the value has mode MODE. */
18140 rs6000_libcall_value (enum machine_mode mode)
18142 unsigned int regno;
18144 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18146 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18147 return gen_rtx_PARALLEL (DImode,
18149 gen_rtx_EXPR_LIST (VOIDmode,
18150 gen_rtx_REG (SImode, GP_ARG_RETURN),
18152 gen_rtx_EXPR_LIST (VOIDmode,
18153 gen_rtx_REG (SImode,
18154 GP_ARG_RETURN + 1),
18158 if (GET_MODE_CLASS (mode) == MODE_FLOAT
18159 && TARGET_HARD_FLOAT && TARGET_FPRS)
18160 regno = FP_ARG_RETURN;
18161 else if (ALTIVEC_VECTOR_MODE (mode)
18162 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
18163 regno = ALTIVEC_ARG_RETURN;
18164 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
18165 return rs6000_complex_function_value (mode);
18166 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT && mode == DFmode)
18167 return spe_build_register_parallel (DFmode, GP_ARG_RETURN);
18169 regno = GP_ARG_RETURN;
18171 return gen_rtx_REG (mode, regno);
18174 /* Define the offset between two registers, FROM to be eliminated and its
18175 replacement TO, at the start of a routine. */
18177 rs6000_initial_elimination_offset (int from, int to)
18179 rs6000_stack_t *info = rs6000_stack_info ();
18180 HOST_WIDE_INT offset;
18182 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18183 offset = info->push_p ? 0 : -info->total_size;
18184 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
18185 offset = info->total_size;
18186 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18187 offset = info->push_p ? info->total_size : 0;
18188 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18196 /* Return true if TYPE is of type __ev64_opaque__. */
18199 is_ev64_opaque_type (tree type)
18202 && (type == opaque_V2SI_type_node
18203 || type == opaque_V2SF_type_node
18204 || type == opaque_p_V2SI_type_node));
18208 rs6000_dwarf_register_span (rtx reg)
18213 && (SPE_VECTOR_MODE (GET_MODE (reg))
18214 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18219 regno = REGNO (reg);
18221 /* The duality of the SPE register size wreaks all kinds of havoc.
18222 This is a way of distinguishing r0 in 32-bits from r0 in
18225 gen_rtx_PARALLEL (VOIDmode,
18228 gen_rtx_REG (SImode, regno + 1200),
18229 gen_rtx_REG (SImode, regno))
18231 gen_rtx_REG (SImode, regno),
18232 gen_rtx_REG (SImode, regno + 1200)));
18235 /* Map internal gcc register numbers to DWARF2 register numbers. */
18238 rs6000_dbx_register_number (unsigned int regno)
18240 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18242 if (regno == MQ_REGNO)
18244 if (regno == LINK_REGISTER_REGNUM)
18246 if (regno == COUNT_REGISTER_REGNUM)
18248 if (CR_REGNO_P (regno))
18249 return regno - CR0_REGNO + 86;
18250 if (regno == XER_REGNO)
18252 if (ALTIVEC_REGNO_P (regno))
18253 return regno - FIRST_ALTIVEC_REGNO + 1124;
18254 if (regno == VRSAVE_REGNO)
18256 if (regno == VSCR_REGNO)
18258 if (regno == SPE_ACC_REGNO)
18260 if (regno == SPEFSCR_REGNO)
18262 /* SPE high reg number. We get these values of regno from
18263 rs6000_dwarf_register_span. */
18264 if (regno >= 1200 && regno < 1232)
18270 /* target hook eh_return_filter_mode */
18271 static enum machine_mode
18272 rs6000_eh_return_filter_mode (void)
18274 return TARGET_32BIT ? SImode : word_mode;
18277 /* Target hook for vector_mode_supported_p. */
18279 rs6000_vector_mode_supported_p (enum machine_mode mode)
18282 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
18285 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
18292 #include "gt-rs6000.h"