1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 int world_save_p; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Always emit branch hint bits. */
128 static GTY(()) bool rs6000_always_hint;
130 /* Schedule instructions for group formation. */
131 static GTY(()) bool rs6000_sched_groups;
133 /* Support adjust_priority scheduler hook
134 and -mprioritize-restricted-insns= option. */
135 const char *rs6000_sched_restricted_insns_priority_str;
136 int rs6000_sched_restricted_insns_priority;
138 /* Support for -msched-costly-dep option. */
139 const char *rs6000_sched_costly_dep_str;
140 enum rs6000_dependence_cost rs6000_sched_costly_dep;
142 /* Support for -minsert-sched-nops option. */
143 const char *rs6000_sched_insert_nops_str;
144 enum rs6000_nop_insertion rs6000_sched_insert_nops;
146 /* Size of long double */
147 const char *rs6000_long_double_size_string;
148 int rs6000_long_double_type_size;
150 /* Whether -mabi=altivec has appeared */
151 int rs6000_altivec_abi;
153 /* Whether VRSAVE instructions should be generated. */
154 int rs6000_altivec_vrsave;
156 /* String from -mvrsave= option. */
157 const char *rs6000_altivec_vrsave_string;
159 /* Nonzero if we want SPE ABI extensions. */
162 /* Whether isel instructions should be generated. */
165 /* Whether SPE simd instructions should be generated. */
168 /* Nonzero if floating point operations are done in the GPRs. */
169 int rs6000_float_gprs = 0;
171 /* String from -mfloat-gprs=. */
172 const char *rs6000_float_gprs_string;
174 /* String from -misel=. */
175 const char *rs6000_isel_string;
177 /* String from -mspe=. */
178 const char *rs6000_spe_string;
180 /* Set to nonzero once AIX common-mode calls have been defined. */
181 static GTY(()) int common_mode_defined;
183 /* Save information from a "cmpxx" operation until the branch or scc is
185 rtx rs6000_compare_op0, rs6000_compare_op1;
186 int rs6000_compare_fp_p;
188 /* Label number of label created for -mrelocatable, to call to so we can
189 get the address of the GOT section */
190 int rs6000_pic_labelno;
193 /* Which abi to adhere to */
194 const char *rs6000_abi_name;
196 /* Semantics of the small data area */
197 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199 /* Which small data model to use */
200 const char *rs6000_sdata_name = (char *)0;
202 /* Counter for labels which are to be placed in .fixup. */
203 int fixuplabelno = 0;
206 /* Bit size of immediate TLS offsets and string from which it is decoded. */
207 int rs6000_tls_size = 32;
208 const char *rs6000_tls_size_string;
210 /* ABI enumeration available for subtarget to use. */
211 enum rs6000_abi rs6000_current_abi;
213 /* ABI string from -mabi= option. */
214 const char *rs6000_abi_string;
217 const char *rs6000_debug_name;
218 int rs6000_debug_stack; /* debug stack applications */
219 int rs6000_debug_arg; /* debug argument handling */
221 /* Value is TRUE if register/mode pair is accepatable. */
222 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
225 static GTY(()) tree opaque_V2SI_type_node;
226 static GTY(()) tree opaque_V2SF_type_node;
227 static GTY(()) tree opaque_p_V2SI_type_node;
228 static GTY(()) tree V16QI_type_node;
229 static GTY(()) tree V2SI_type_node;
230 static GTY(()) tree V2SF_type_node;
231 static GTY(()) tree V4HI_type_node;
232 static GTY(()) tree V4SI_type_node;
233 static GTY(()) tree V4SF_type_node;
234 static GTY(()) tree V8HI_type_node;
235 static GTY(()) tree unsigned_V16QI_type_node;
236 static GTY(()) tree unsigned_V8HI_type_node;
237 static GTY(()) tree unsigned_V4SI_type_node;
238 static GTY(()) tree bool_char_type_node; /* __bool char */
239 static GTY(()) tree bool_short_type_node; /* __bool short */
240 static GTY(()) tree bool_int_type_node; /* __bool int */
241 static GTY(()) tree pixel_type_node; /* __pixel */
242 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
243 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
244 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
245 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
247 int rs6000_warn_altivec_long = 1; /* On by default. */
248 const char *rs6000_warn_altivec_long_switch;
250 const char *rs6000_traceback_name;
252 traceback_default = 0,
258 /* Flag to say the TOC is initialized */
260 char toc_label_name[10];
262 /* Alias set for saves and restores from the rs6000 stack. */
263 static GTY(()) int rs6000_sr_alias_set;
265 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
266 The only place that looks at this is rs6000_set_default_type_attributes;
267 everywhere else should rely on the presence or absence of a longcall
268 attribute on the function declaration. */
269 int rs6000_default_long_calls;
270 const char *rs6000_longcall_switch;
272 /* Control alignment for fields within structures. */
273 /* String from -malign-XXXXX. */
274 const char *rs6000_alignment_string;
275 int rs6000_alignment_flags;
277 struct builtin_description
279 /* mask is not const because we're going to alter it below. This
280 nonsense will go away when we rewrite the -march infrastructure
281 to give us more target flag bits. */
283 const enum insn_code icode;
284 const char *const name;
285 const enum rs6000_builtins code;
288 /* Target cpu costs. */
290 struct processor_costs {
291 const int mulsi; /* cost of SImode multiplication. */
292 const int mulsi_const; /* cost of SImode multiplication by constant. */
293 const int mulsi_const9; /* cost of SImode mult by short constant. */
294 const int muldi; /* cost of DImode multiplication. */
295 const int divsi; /* cost of SImode division. */
296 const int divdi; /* cost of DImode division. */
297 const int fp; /* cost of simple SFmode and DFmode insns. */
298 const int dmul; /* cost of DFmode multiplication (and fmadd). */
299 const int sdiv; /* cost of SFmode division (fdivs). */
300 const int ddiv; /* cost of DFmode division (fdiv). */
303 const struct processor_costs *rs6000_cost;
305 /* Processor costs (relative to an add) */
307 /* Instruction size costs on 32bit processors. */
309 struct processor_costs size32_cost = {
310 COSTS_N_INSNS (1), /* mulsi */
311 COSTS_N_INSNS (1), /* mulsi_const */
312 COSTS_N_INSNS (1), /* mulsi_const9 */
313 COSTS_N_INSNS (1), /* muldi */
314 COSTS_N_INSNS (1), /* divsi */
315 COSTS_N_INSNS (1), /* divdi */
316 COSTS_N_INSNS (1), /* fp */
317 COSTS_N_INSNS (1), /* dmul */
318 COSTS_N_INSNS (1), /* sdiv */
319 COSTS_N_INSNS (1), /* ddiv */
322 /* Instruction size costs on 64bit processors. */
324 struct processor_costs size64_cost = {
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
337 /* Instruction costs on RIOS1 processors. */
339 struct processor_costs rios1_cost = {
340 COSTS_N_INSNS (5), /* mulsi */
341 COSTS_N_INSNS (4), /* mulsi_const */
342 COSTS_N_INSNS (3), /* mulsi_const9 */
343 COSTS_N_INSNS (5), /* muldi */
344 COSTS_N_INSNS (19), /* divsi */
345 COSTS_N_INSNS (19), /* divdi */
346 COSTS_N_INSNS (2), /* fp */
347 COSTS_N_INSNS (2), /* dmul */
348 COSTS_N_INSNS (19), /* sdiv */
349 COSTS_N_INSNS (19), /* ddiv */
352 /* Instruction costs on RIOS2 processors. */
354 struct processor_costs rios2_cost = {
355 COSTS_N_INSNS (2), /* mulsi */
356 COSTS_N_INSNS (2), /* mulsi_const */
357 COSTS_N_INSNS (2), /* mulsi_const9 */
358 COSTS_N_INSNS (2), /* muldi */
359 COSTS_N_INSNS (13), /* divsi */
360 COSTS_N_INSNS (13), /* divdi */
361 COSTS_N_INSNS (2), /* fp */
362 COSTS_N_INSNS (2), /* dmul */
363 COSTS_N_INSNS (17), /* sdiv */
364 COSTS_N_INSNS (17), /* ddiv */
367 /* Instruction costs on RS64A processors. */
369 struct processor_costs rs64a_cost = {
370 COSTS_N_INSNS (20), /* mulsi */
371 COSTS_N_INSNS (12), /* mulsi_const */
372 COSTS_N_INSNS (8), /* mulsi_const9 */
373 COSTS_N_INSNS (34), /* muldi */
374 COSTS_N_INSNS (65), /* divsi */
375 COSTS_N_INSNS (67), /* divdi */
376 COSTS_N_INSNS (4), /* fp */
377 COSTS_N_INSNS (4), /* dmul */
378 COSTS_N_INSNS (31), /* sdiv */
379 COSTS_N_INSNS (31), /* ddiv */
382 /* Instruction costs on MPCCORE processors. */
384 struct processor_costs mpccore_cost = {
385 COSTS_N_INSNS (2), /* mulsi */
386 COSTS_N_INSNS (2), /* mulsi_const */
387 COSTS_N_INSNS (2), /* mulsi_const9 */
388 COSTS_N_INSNS (2), /* muldi */
389 COSTS_N_INSNS (6), /* divsi */
390 COSTS_N_INSNS (6), /* divdi */
391 COSTS_N_INSNS (4), /* fp */
392 COSTS_N_INSNS (5), /* dmul */
393 COSTS_N_INSNS (10), /* sdiv */
394 COSTS_N_INSNS (17), /* ddiv */
397 /* Instruction costs on PPC403 processors. */
399 struct processor_costs ppc403_cost = {
400 COSTS_N_INSNS (4), /* mulsi */
401 COSTS_N_INSNS (4), /* mulsi_const */
402 COSTS_N_INSNS (4), /* mulsi_const9 */
403 COSTS_N_INSNS (4), /* muldi */
404 COSTS_N_INSNS (33), /* divsi */
405 COSTS_N_INSNS (33), /* divdi */
406 COSTS_N_INSNS (11), /* fp */
407 COSTS_N_INSNS (11), /* dmul */
408 COSTS_N_INSNS (11), /* sdiv */
409 COSTS_N_INSNS (11), /* ddiv */
412 /* Instruction costs on PPC405 processors. */
414 struct processor_costs ppc405_cost = {
415 COSTS_N_INSNS (5), /* mulsi */
416 COSTS_N_INSNS (4), /* mulsi_const */
417 COSTS_N_INSNS (3), /* mulsi_const9 */
418 COSTS_N_INSNS (5), /* muldi */
419 COSTS_N_INSNS (35), /* divsi */
420 COSTS_N_INSNS (35), /* divdi */
421 COSTS_N_INSNS (11), /* fp */
422 COSTS_N_INSNS (11), /* dmul */
423 COSTS_N_INSNS (11), /* sdiv */
424 COSTS_N_INSNS (11), /* ddiv */
427 /* Instruction costs on PPC440 processors. */
429 struct processor_costs ppc440_cost = {
430 COSTS_N_INSNS (3), /* mulsi */
431 COSTS_N_INSNS (2), /* mulsi_const */
432 COSTS_N_INSNS (2), /* mulsi_const9 */
433 COSTS_N_INSNS (3), /* muldi */
434 COSTS_N_INSNS (34), /* divsi */
435 COSTS_N_INSNS (34), /* divdi */
436 COSTS_N_INSNS (5), /* fp */
437 COSTS_N_INSNS (5), /* dmul */
438 COSTS_N_INSNS (19), /* sdiv */
439 COSTS_N_INSNS (33), /* ddiv */
442 /* Instruction costs on PPC601 processors. */
444 struct processor_costs ppc601_cost = {
445 COSTS_N_INSNS (5), /* mulsi */
446 COSTS_N_INSNS (5), /* mulsi_const */
447 COSTS_N_INSNS (5), /* mulsi_const9 */
448 COSTS_N_INSNS (5), /* muldi */
449 COSTS_N_INSNS (36), /* divsi */
450 COSTS_N_INSNS (36), /* divdi */
451 COSTS_N_INSNS (4), /* fp */
452 COSTS_N_INSNS (5), /* dmul */
453 COSTS_N_INSNS (17), /* sdiv */
454 COSTS_N_INSNS (31), /* ddiv */
457 /* Instruction costs on PPC603 processors. */
459 struct processor_costs ppc603_cost = {
460 COSTS_N_INSNS (5), /* mulsi */
461 COSTS_N_INSNS (3), /* mulsi_const */
462 COSTS_N_INSNS (2), /* mulsi_const9 */
463 COSTS_N_INSNS (5), /* muldi */
464 COSTS_N_INSNS (37), /* divsi */
465 COSTS_N_INSNS (37), /* divdi */
466 COSTS_N_INSNS (3), /* fp */
467 COSTS_N_INSNS (4), /* dmul */
468 COSTS_N_INSNS (18), /* sdiv */
469 COSTS_N_INSNS (33), /* ddiv */
472 /* Instruction costs on PPC604 processors. */
474 struct processor_costs ppc604_cost = {
475 COSTS_N_INSNS (4), /* mulsi */
476 COSTS_N_INSNS (4), /* mulsi_const */
477 COSTS_N_INSNS (4), /* mulsi_const9 */
478 COSTS_N_INSNS (4), /* muldi */
479 COSTS_N_INSNS (20), /* divsi */
480 COSTS_N_INSNS (20), /* divdi */
481 COSTS_N_INSNS (3), /* fp */
482 COSTS_N_INSNS (3), /* dmul */
483 COSTS_N_INSNS (18), /* sdiv */
484 COSTS_N_INSNS (32), /* ddiv */
487 /* Instruction costs on PPC604e processors. */
489 struct processor_costs ppc604e_cost = {
490 COSTS_N_INSNS (2), /* mulsi */
491 COSTS_N_INSNS (2), /* mulsi_const */
492 COSTS_N_INSNS (2), /* mulsi_const9 */
493 COSTS_N_INSNS (2), /* muldi */
494 COSTS_N_INSNS (20), /* divsi */
495 COSTS_N_INSNS (20), /* divdi */
496 COSTS_N_INSNS (3), /* fp */
497 COSTS_N_INSNS (3), /* dmul */
498 COSTS_N_INSNS (18), /* sdiv */
499 COSTS_N_INSNS (32), /* ddiv */
502 /* Instruction costs on PPC620 processors. */
504 struct processor_costs ppc620_cost = {
505 COSTS_N_INSNS (5), /* mulsi */
506 COSTS_N_INSNS (4), /* mulsi_const */
507 COSTS_N_INSNS (3), /* mulsi_const9 */
508 COSTS_N_INSNS (7), /* muldi */
509 COSTS_N_INSNS (21), /* divsi */
510 COSTS_N_INSNS (37), /* divdi */
511 COSTS_N_INSNS (3), /* fp */
512 COSTS_N_INSNS (3), /* dmul */
513 COSTS_N_INSNS (18), /* sdiv */
514 COSTS_N_INSNS (32), /* ddiv */
517 /* Instruction costs on PPC630 processors. */
519 struct processor_costs ppc630_cost = {
520 COSTS_N_INSNS (5), /* mulsi */
521 COSTS_N_INSNS (4), /* mulsi_const */
522 COSTS_N_INSNS (3), /* mulsi_const9 */
523 COSTS_N_INSNS (7), /* muldi */
524 COSTS_N_INSNS (21), /* divsi */
525 COSTS_N_INSNS (37), /* divdi */
526 COSTS_N_INSNS (3), /* fp */
527 COSTS_N_INSNS (3), /* dmul */
528 COSTS_N_INSNS (17), /* sdiv */
529 COSTS_N_INSNS (21), /* ddiv */
532 /* Instruction costs on PPC750 and PPC7400 processors. */
534 struct processor_costs ppc750_cost = {
535 COSTS_N_INSNS (5), /* mulsi */
536 COSTS_N_INSNS (3), /* mulsi_const */
537 COSTS_N_INSNS (2), /* mulsi_const9 */
538 COSTS_N_INSNS (5), /* muldi */
539 COSTS_N_INSNS (17), /* divsi */
540 COSTS_N_INSNS (17), /* divdi */
541 COSTS_N_INSNS (3), /* fp */
542 COSTS_N_INSNS (3), /* dmul */
543 COSTS_N_INSNS (17), /* sdiv */
544 COSTS_N_INSNS (31), /* ddiv */
547 /* Instruction costs on PPC7450 processors. */
549 struct processor_costs ppc7450_cost = {
550 COSTS_N_INSNS (4), /* mulsi */
551 COSTS_N_INSNS (3), /* mulsi_const */
552 COSTS_N_INSNS (3), /* mulsi_const9 */
553 COSTS_N_INSNS (4), /* muldi */
554 COSTS_N_INSNS (23), /* divsi */
555 COSTS_N_INSNS (23), /* divdi */
556 COSTS_N_INSNS (5), /* fp */
557 COSTS_N_INSNS (5), /* dmul */
558 COSTS_N_INSNS (21), /* sdiv */
559 COSTS_N_INSNS (35), /* ddiv */
562 /* Instruction costs on PPC8540 processors. */
564 struct processor_costs ppc8540_cost = {
565 COSTS_N_INSNS (4), /* mulsi */
566 COSTS_N_INSNS (4), /* mulsi_const */
567 COSTS_N_INSNS (4), /* mulsi_const9 */
568 COSTS_N_INSNS (4), /* muldi */
569 COSTS_N_INSNS (19), /* divsi */
570 COSTS_N_INSNS (19), /* divdi */
571 COSTS_N_INSNS (4), /* fp */
572 COSTS_N_INSNS (4), /* dmul */
573 COSTS_N_INSNS (29), /* sdiv */
574 COSTS_N_INSNS (29), /* ddiv */
577 /* Instruction costs on POWER4 and POWER5 processors. */
579 struct processor_costs power4_cost = {
580 COSTS_N_INSNS (3), /* mulsi */
581 COSTS_N_INSNS (2), /* mulsi_const */
582 COSTS_N_INSNS (2), /* mulsi_const9 */
583 COSTS_N_INSNS (4), /* muldi */
584 COSTS_N_INSNS (18), /* divsi */
585 COSTS_N_INSNS (34), /* divdi */
586 COSTS_N_INSNS (3), /* fp */
587 COSTS_N_INSNS (3), /* dmul */
588 COSTS_N_INSNS (17), /* sdiv */
589 COSTS_N_INSNS (17), /* ddiv */
593 static bool rs6000_function_ok_for_sibcall (tree, tree);
594 static int num_insns_constant_wide (HOST_WIDE_INT);
595 static void validate_condition_mode (enum rtx_code, enum machine_mode);
596 static rtx rs6000_generate_compare (enum rtx_code);
597 static void rs6000_maybe_dead (rtx);
598 static void rs6000_emit_stack_tie (void);
599 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
600 static rtx spe_synthesize_frame_save (rtx);
601 static bool spe_func_has_64bit_regs_p (void);
602 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
604 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
605 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
606 static unsigned rs6000_hash_constant (rtx);
607 static unsigned toc_hash_function (const void *);
608 static int toc_hash_eq (const void *, const void *);
609 static int constant_pool_expr_1 (rtx, int *, int *);
610 static bool constant_pool_expr_p (rtx);
611 static bool toc_relative_expr_p (rtx);
612 static bool legitimate_small_data_p (enum machine_mode, rtx);
613 static bool legitimate_indexed_address_p (rtx, int);
614 static bool legitimate_indirect_address_p (rtx, int);
615 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
616 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
617 static struct machine_function * rs6000_init_machine_status (void);
618 static bool rs6000_assemble_integer (rtx, unsigned int, int);
619 #ifdef HAVE_GAS_HIDDEN
620 static void rs6000_assemble_visibility (tree, int);
622 static int rs6000_ra_ever_killed (void);
623 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
624 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
625 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
626 static const char *rs6000_mangle_fundamental_type (tree);
627 extern const struct attribute_spec rs6000_attribute_table[];
628 static void rs6000_set_default_type_attributes (tree);
629 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
630 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
631 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
633 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
634 static bool rs6000_return_in_memory (tree, tree);
635 static void rs6000_file_start (void);
637 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
638 static void rs6000_elf_asm_out_constructor (rtx, int);
639 static void rs6000_elf_asm_out_destructor (rtx, int);
640 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
641 static void rs6000_elf_unique_section (tree, int);
642 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
643 unsigned HOST_WIDE_INT);
644 static void rs6000_elf_encode_section_info (tree, rtx, int)
646 static bool rs6000_elf_in_small_data_p (tree);
649 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
650 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
651 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
652 static void rs6000_xcoff_unique_section (tree, int);
653 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
654 unsigned HOST_WIDE_INT);
655 static const char * rs6000_xcoff_strip_name_encoding (const char *);
656 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
657 static void rs6000_xcoff_file_start (void);
658 static void rs6000_xcoff_file_end (void);
661 static bool rs6000_binds_local_p (tree);
663 static int rs6000_variable_issue (FILE *, int, rtx, int);
664 static bool rs6000_rtx_costs (rtx, int, int, int *);
665 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
666 static bool is_microcoded_insn (rtx);
667 static int is_dispatch_slot_restricted (rtx);
668 static bool is_cracked_insn (rtx);
669 static bool is_branch_slot_insn (rtx);
670 static int rs6000_adjust_priority (rtx, int);
671 static int rs6000_issue_rate (void);
672 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
673 static rtx get_next_active_insn (rtx, rtx);
674 static bool insn_terminates_group_p (rtx , enum group_termination);
675 static bool is_costly_group (rtx *, rtx);
676 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
677 static int redefine_groups (FILE *, int, rtx, rtx);
678 static int pad_groups (FILE *, int, rtx, rtx);
679 static void rs6000_sched_finish (FILE *, int);
680 static int rs6000_use_sched_lookahead (void);
682 static void rs6000_init_builtins (void);
683 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
684 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
685 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
686 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
687 static void altivec_init_builtins (void);
688 static void rs6000_common_init_builtins (void);
689 static void rs6000_init_libfuncs (void);
691 static void enable_mask_for_builtins (struct builtin_description *, int,
692 enum rs6000_builtins,
693 enum rs6000_builtins);
694 static tree build_opaque_vector_type (tree, int);
695 static void spe_init_builtins (void);
696 static rtx spe_expand_builtin (tree, rtx, bool *);
697 static rtx spe_expand_stv_builtin (enum insn_code, tree);
698 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
699 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
700 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
701 static rs6000_stack_t *rs6000_stack_info (void);
702 static void debug_stack_info (rs6000_stack_t *);
704 static rtx altivec_expand_builtin (tree, rtx, bool *);
705 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
706 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
707 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
708 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
709 static rtx altivec_expand_predicate_builtin (enum insn_code,
710 const char *, tree, rtx);
711 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
712 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
713 static void rs6000_parse_abi_options (void);
714 static void rs6000_parse_alignment_option (void);
715 static void rs6000_parse_tls_size_option (void);
716 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
717 static int first_altivec_reg_to_save (void);
718 static unsigned int compute_vrsave_mask (void);
719 static void compute_save_world_info(rs6000_stack_t *info_ptr);
720 static void is_altivec_return_reg (rtx, void *);
721 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
722 int easy_vector_constant (rtx, enum machine_mode);
723 static int easy_vector_same (rtx, enum machine_mode);
724 static int easy_vector_splat_const (int, enum machine_mode);
725 static bool is_ev64_opaque_type (tree);
726 static rtx rs6000_dwarf_register_span (rtx);
727 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
728 static rtx rs6000_tls_get_addr (void);
729 static rtx rs6000_got_sym (void);
730 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
731 static const char *rs6000_get_some_local_dynamic_name (void);
732 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
733 static rtx rs6000_complex_function_value (enum machine_mode);
734 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
735 enum machine_mode, tree);
736 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
737 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
738 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
739 enum machine_mode, tree,
741 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
744 static void macho_branch_islands (void);
745 static void add_compiler_branch_island (tree, tree, int);
746 static int no_previous_def (tree function_name);
747 static tree get_prev_label (tree function_name);
750 static tree rs6000_build_builtin_va_list (void);
751 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
752 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
754 static enum machine_mode rs6000_eh_return_filter_mode (void);
756 /* Hash table stuff for keeping track of TOC entries. */
758 struct toc_hash_struct GTY(())
760 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
761 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
763 enum machine_mode key_mode;
767 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
769 /* Default register names. */
770 char rs6000_reg_names[][8] =
772 "0", "1", "2", "3", "4", "5", "6", "7",
773 "8", "9", "10", "11", "12", "13", "14", "15",
774 "16", "17", "18", "19", "20", "21", "22", "23",
775 "24", "25", "26", "27", "28", "29", "30", "31",
776 "0", "1", "2", "3", "4", "5", "6", "7",
777 "8", "9", "10", "11", "12", "13", "14", "15",
778 "16", "17", "18", "19", "20", "21", "22", "23",
779 "24", "25", "26", "27", "28", "29", "30", "31",
780 "mq", "lr", "ctr","ap",
781 "0", "1", "2", "3", "4", "5", "6", "7",
783 /* AltiVec registers. */
784 "0", "1", "2", "3", "4", "5", "6", "7",
785 "8", "9", "10", "11", "12", "13", "14", "15",
786 "16", "17", "18", "19", "20", "21", "22", "23",
787 "24", "25", "26", "27", "28", "29", "30", "31",
793 #ifdef TARGET_REGNAMES
794 static const char alt_reg_names[][8] =
796 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
797 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
798 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
799 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
800 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
801 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
802 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
803 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
804 "mq", "lr", "ctr", "ap",
805 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
807 /* AltiVec registers. */
808 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
809 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
810 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
811 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
818 #ifndef MASK_STRICT_ALIGN
819 #define MASK_STRICT_ALIGN 0
821 #ifndef TARGET_PROFILE_KERNEL
822 #define TARGET_PROFILE_KERNEL 0
825 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
826 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
828 /* Return 1 for a symbol ref for a thread-local storage symbol. */
829 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
830 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
832 /* Initialize the GCC target structure. */
833 #undef TARGET_ATTRIBUTE_TABLE
834 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
835 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
836 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
838 #undef TARGET_ASM_ALIGNED_DI_OP
839 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
841 /* Default unaligned ops are only provided for ELF. Find the ops needed
842 for non-ELF systems. */
843 #ifndef OBJECT_FORMAT_ELF
845 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
847 #undef TARGET_ASM_UNALIGNED_HI_OP
848 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
849 #undef TARGET_ASM_UNALIGNED_SI_OP
850 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
851 #undef TARGET_ASM_UNALIGNED_DI_OP
852 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
855 #undef TARGET_ASM_UNALIGNED_HI_OP
856 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
857 #undef TARGET_ASM_UNALIGNED_SI_OP
858 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
859 #undef TARGET_ASM_UNALIGNED_DI_OP
860 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
861 #undef TARGET_ASM_ALIGNED_DI_OP
862 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
866 /* This hook deals with fixups for relocatable code and DI-mode objects
868 #undef TARGET_ASM_INTEGER
869 #define TARGET_ASM_INTEGER rs6000_assemble_integer
871 #ifdef HAVE_GAS_HIDDEN
872 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
873 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
876 #undef TARGET_HAVE_TLS
877 #define TARGET_HAVE_TLS HAVE_AS_TLS
879 #undef TARGET_CANNOT_FORCE_CONST_MEM
880 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
882 #undef TARGET_ASM_FUNCTION_PROLOGUE
883 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
884 #undef TARGET_ASM_FUNCTION_EPILOGUE
885 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
887 #undef TARGET_SCHED_VARIABLE_ISSUE
888 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
890 #undef TARGET_SCHED_ISSUE_RATE
891 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
892 #undef TARGET_SCHED_ADJUST_COST
893 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
894 #undef TARGET_SCHED_ADJUST_PRIORITY
895 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
896 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
897 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
898 #undef TARGET_SCHED_FINISH
899 #define TARGET_SCHED_FINISH rs6000_sched_finish
901 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
902 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
904 #undef TARGET_INIT_BUILTINS
905 #define TARGET_INIT_BUILTINS rs6000_init_builtins
907 #undef TARGET_EXPAND_BUILTIN
908 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
910 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
911 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
913 #undef TARGET_INIT_LIBFUNCS
914 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
917 #undef TARGET_BINDS_LOCAL_P
918 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
921 #undef TARGET_ASM_OUTPUT_MI_THUNK
922 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
924 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
925 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
927 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
928 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
930 #undef TARGET_RTX_COSTS
931 #define TARGET_RTX_COSTS rs6000_rtx_costs
932 #undef TARGET_ADDRESS_COST
933 #define TARGET_ADDRESS_COST hook_int_rtx_0
935 #undef TARGET_VECTOR_OPAQUE_P
936 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
938 #undef TARGET_DWARF_REGISTER_SPAN
939 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
941 /* On rs6000, function arguments are promoted, as are function return
943 #undef TARGET_PROMOTE_FUNCTION_ARGS
944 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
945 #undef TARGET_PROMOTE_FUNCTION_RETURN
946 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
948 #undef TARGET_RETURN_IN_MEMORY
949 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
951 #undef TARGET_SETUP_INCOMING_VARARGS
952 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
954 /* Always strict argument naming on rs6000. */
955 #undef TARGET_STRICT_ARGUMENT_NAMING
956 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
957 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
958 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
959 #undef TARGET_SPLIT_COMPLEX_ARG
960 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
961 #undef TARGET_MUST_PASS_IN_STACK
962 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
963 #undef TARGET_PASS_BY_REFERENCE
964 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
966 #undef TARGET_BUILD_BUILTIN_VA_LIST
967 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
969 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
970 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
972 #undef TARGET_EH_RETURN_FILTER_MODE
973 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
975 struct gcc_target targetm = TARGET_INITIALIZER;
978 /* Value is 1 if hard register REGNO can hold a value of machine-mode
981 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
983 /* The GPRs can hold any mode, but values bigger than one register
984 cannot go past R31. */
985 if (INT_REGNO_P (regno))
986 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
988 /* The float registers can only hold floating modes and DImode. */
989 if (FP_REGNO_P (regno))
991 (GET_MODE_CLASS (mode) == MODE_FLOAT
992 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
993 || (GET_MODE_CLASS (mode) == MODE_INT
994 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
996 /* The CR register can only hold CC modes. */
997 if (CR_REGNO_P (regno))
998 return GET_MODE_CLASS (mode) == MODE_CC;
1000 if (XER_REGNO_P (regno))
1001 return mode == PSImode;
1003 /* AltiVec only in AldyVec registers. */
1004 if (ALTIVEC_REGNO_P (regno))
1005 return ALTIVEC_VECTOR_MODE (mode);
1007 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1008 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1011 /* We cannot put TImode anywhere except general register and it must be
1012 able to fit within the register set. */
1014 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1017 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1019 rs6000_init_hard_regno_mode_ok (void)
1023 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1024 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1025 if (rs6000_hard_regno_mode_ok (r, m))
1026 rs6000_hard_regno_mode_ok_p[m][r] = true;
1029 /* If not otherwise specified by a target, make 'long double' equivalent to
1032 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1033 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1036 /* Override command line options. Mostly we process the processor
1037 type and sometimes adjust other TARGET_ options. */
1040 rs6000_override_options (const char *default_cpu)
1043 struct rs6000_cpu_select *ptr;
1046 /* Simplifications for entries below. */
1049 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1050 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1053 /* This table occasionally claims that a processor does not support
1054 a particular feature even though it does, but the feature is slower
1055 than the alternative. Thus, it shouldn't be relied on as a
1056 complete description of the processor's support.
1058 Please keep this list in order, and don't forget to update the
1059 documentation in invoke.texi when adding a new processor or
1063 const char *const name; /* Canonical processor name. */
1064 const enum processor_type processor; /* Processor type enum value. */
1065 const int target_enable; /* Target flags to enable. */
1066 } const processor_target_table[]
1067 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1068 {"403", PROCESSOR_PPC403,
1069 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1070 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1071 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1072 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1073 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1074 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1075 {"601", PROCESSOR_PPC601,
1076 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1077 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1078 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1079 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1080 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1081 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1082 {"620", PROCESSOR_PPC620,
1083 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1084 {"630", PROCESSOR_PPC630,
1085 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1086 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1087 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1088 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1089 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1090 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1091 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1092 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1093 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1094 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1095 {"970", PROCESSOR_POWER4,
1096 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1097 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1098 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1099 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1100 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1101 {"G5", PROCESSOR_POWER4,
1102 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1103 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1104 {"power2", PROCESSOR_POWER,
1105 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1106 {"power3", PROCESSOR_PPC630,
1107 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1108 {"power4", PROCESSOR_POWER4,
1109 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1110 {"power5", PROCESSOR_POWER5,
1111 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1112 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1113 {"powerpc64", PROCESSOR_POWERPC64,
1114 POWERPC_BASE_MASK | MASK_POWERPC64},
1115 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1116 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1117 {"rios2", PROCESSOR_RIOS2,
1118 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1119 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1120 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1121 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
1124 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1126 /* Some OSs don't support saving the high part of 64-bit registers on
1127 context switch. Other OSs don't support saving Altivec registers.
1128 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1129 settings; if the user wants either, the user must explicitly specify
1130 them and we won't interfere with the user's specification. */
1133 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1134 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1135 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1139 rs6000_init_hard_regno_mode_ok ();
1141 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1142 #ifdef OS_MISSING_POWERPC64
1143 if (OS_MISSING_POWERPC64)
1144 set_masks &= ~MASK_POWERPC64;
1146 #ifdef OS_MISSING_ALTIVEC
1147 if (OS_MISSING_ALTIVEC)
1148 set_masks &= ~MASK_ALTIVEC;
1151 /* Don't override these by the processor default if given explicitly. */
1152 set_masks &= ~(target_flags_explicit
1153 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
1155 /* Identify the processor type. */
1156 rs6000_select[0].string = default_cpu;
1157 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1159 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1161 ptr = &rs6000_select[i];
1162 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1164 for (j = 0; j < ptt_size; j++)
1165 if (! strcmp (ptr->string, processor_target_table[j].name))
1167 if (ptr->set_tune_p)
1168 rs6000_cpu = processor_target_table[j].processor;
1170 if (ptr->set_arch_p)
1172 target_flags &= ~set_masks;
1173 target_flags |= (processor_target_table[j].target_enable
1180 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1187 /* If we are optimizing big endian systems for space, use the load/store
1188 multiple and string instructions. */
1189 if (BYTES_BIG_ENDIAN && optimize_size)
1190 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1192 /* Don't allow -mmultiple or -mstring on little endian systems
1193 unless the cpu is a 750, because the hardware doesn't support the
1194 instructions used in little endian mode, and causes an alignment
1195 trap. The 750 does not cause an alignment trap (except when the
1196 target is unaligned). */
1198 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1200 if (TARGET_MULTIPLE)
1202 target_flags &= ~MASK_MULTIPLE;
1203 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1204 warning ("-mmultiple is not supported on little endian systems");
1209 target_flags &= ~MASK_STRING;
1210 if ((target_flags_explicit & MASK_STRING) != 0)
1211 warning ("-mstring is not supported on little endian systems");
1215 /* Set debug flags */
1216 if (rs6000_debug_name)
1218 if (! strcmp (rs6000_debug_name, "all"))
1219 rs6000_debug_stack = rs6000_debug_arg = 1;
1220 else if (! strcmp (rs6000_debug_name, "stack"))
1221 rs6000_debug_stack = 1;
1222 else if (! strcmp (rs6000_debug_name, "arg"))
1223 rs6000_debug_arg = 1;
1225 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1228 if (rs6000_traceback_name)
1230 if (! strncmp (rs6000_traceback_name, "full", 4))
1231 rs6000_traceback = traceback_full;
1232 else if (! strncmp (rs6000_traceback_name, "part", 4))
1233 rs6000_traceback = traceback_part;
1234 else if (! strncmp (rs6000_traceback_name, "no", 2))
1235 rs6000_traceback = traceback_none;
1237 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1238 rs6000_traceback_name);
1241 /* Set size of long double */
1242 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1243 if (rs6000_long_double_size_string)
1246 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1247 if (*tail != '\0' || (size != 64 && size != 128))
1248 error ("Unknown switch -mlong-double-%s",
1249 rs6000_long_double_size_string);
1251 rs6000_long_double_type_size = size;
1254 /* Set Altivec ABI as default for powerpc64 linux. */
1255 if (TARGET_ELF && TARGET_64BIT)
1257 rs6000_altivec_abi = 1;
1258 rs6000_altivec_vrsave = 1;
1261 /* Handle -mabi= options. */
1262 rs6000_parse_abi_options ();
1264 /* Handle -malign-XXXXX option. */
1265 rs6000_parse_alignment_option ();
1267 /* Handle generic -mFOO=YES/NO options. */
1268 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1269 &rs6000_altivec_vrsave);
1270 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1272 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1273 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
1274 &rs6000_float_gprs);
1276 /* Handle -mtls-size option. */
1277 rs6000_parse_tls_size_option ();
1279 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1280 SUBTARGET_OVERRIDE_OPTIONS;
1282 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1283 SUBSUBTARGET_OVERRIDE_OPTIONS;
1289 error ("AltiVec and E500 instructions cannot coexist");
1291 /* The e500 does not have string instructions, and we set
1292 MASK_STRING above when optimizing for size. */
1293 if ((target_flags & MASK_STRING) != 0)
1294 target_flags = target_flags & ~MASK_STRING;
1296 /* No SPE means 64-bit long doubles, even if an E500. */
1297 if (rs6000_spe_string != 0
1298 && !strcmp (rs6000_spe_string, "no"))
1299 rs6000_long_double_type_size = 64;
1301 else if (rs6000_select[1].string != NULL)
1303 /* For the powerpc-eabispe configuration, we set all these by
1304 default, so let's unset them if we manually set another
1305 CPU that is not the E500. */
1306 if (rs6000_abi_string == 0)
1308 if (rs6000_spe_string == 0)
1310 if (rs6000_float_gprs_string == 0)
1311 rs6000_float_gprs = 0;
1312 if (rs6000_isel_string == 0)
1314 if (rs6000_long_double_size_string == 0)
1315 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1318 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1319 && rs6000_cpu != PROCESSOR_POWER5);
1320 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1321 || rs6000_cpu == PROCESSOR_POWER5);
1323 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1324 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1325 bits in target_flags so TARGET_SWITCHES cannot be used.
1326 Assumption here is that rs6000_longcall_switch points into the
1327 text of the complete option, rather than being a copy, so we can
1328 scan back for the presence or absence of the no- modifier. */
1329 if (rs6000_longcall_switch)
1331 const char *base = rs6000_longcall_switch;
1332 while (base[-1] != 'm') base--;
1334 if (*rs6000_longcall_switch != '\0')
1335 error ("invalid option `%s'", base);
1336 rs6000_default_long_calls = (base[0] != 'n');
1339 /* Handle -m(no-)warn-altivec-long similarly. */
1340 if (rs6000_warn_altivec_long_switch)
1342 const char *base = rs6000_warn_altivec_long_switch;
1343 while (base[-1] != 'm') base--;
1345 if (*rs6000_warn_altivec_long_switch != '\0')
1346 error ("invalid option `%s'", base);
1347 rs6000_warn_altivec_long = (base[0] != 'n');
1350 /* Handle -mprioritize-restricted-insns option. */
1351 rs6000_sched_restricted_insns_priority
1352 = (rs6000_sched_groups ? 1 : 0);
1353 if (rs6000_sched_restricted_insns_priority_str)
1354 rs6000_sched_restricted_insns_priority =
1355 atoi (rs6000_sched_restricted_insns_priority_str);
1357 /* Handle -msched-costly-dep option. */
1358 rs6000_sched_costly_dep
1359 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1360 if (rs6000_sched_costly_dep_str)
1362 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1363 rs6000_sched_costly_dep = no_dep_costly;
1364 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1365 rs6000_sched_costly_dep = all_deps_costly;
1366 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1367 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1368 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1369 rs6000_sched_costly_dep = store_to_load_dep_costly;
1371 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1374 /* Handle -minsert-sched-nops option. */
1375 rs6000_sched_insert_nops
1376 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1377 if (rs6000_sched_insert_nops_str)
1379 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1380 rs6000_sched_insert_nops = sched_finish_none;
1381 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1382 rs6000_sched_insert_nops = sched_finish_pad_groups;
1383 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1384 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1386 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1389 #ifdef TARGET_REGNAMES
1390 /* If the user desires alternate register names, copy in the
1391 alternate names now. */
1392 if (TARGET_REGNAMES)
1393 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1396 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1397 If -maix-struct-return or -msvr4-struct-return was explicitly
1398 used, don't override with the ABI default. */
1399 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1401 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1402 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1404 target_flags |= MASK_AIX_STRUCT_RET;
1407 if (TARGET_LONG_DOUBLE_128
1408 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1409 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1411 /* Allocate an alias set for register saves & restores from stack. */
1412 rs6000_sr_alias_set = new_alias_set ();
1415 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1417 /* We can only guarantee the availability of DI pseudo-ops when
1418 assembling for 64-bit targets. */
1421 targetm.asm_out.aligned_op.di = NULL;
1422 targetm.asm_out.unaligned_op.di = NULL;
1425 /* Set maximum branch target alignment at two instructions, eight bytes. */
1426 align_jumps_max_skip = 8;
1427 align_loops_max_skip = 8;
1429 /* Arrange to save and restore machine status around nested functions. */
1430 init_machine_status = rs6000_init_machine_status;
1432 /* We should always be splitting complex arguments, but we can't break
1433 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1434 if (DEFAULT_ABI != ABI_AIX)
1435 targetm.calls.split_complex_arg = NULL;
1437 /* Initialize rs6000_cost with the appropriate target costs. */
1439 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1443 case PROCESSOR_RIOS1:
1444 rs6000_cost = &rios1_cost;
1447 case PROCESSOR_RIOS2:
1448 rs6000_cost = &rios2_cost;
1451 case PROCESSOR_RS64A:
1452 rs6000_cost = &rs64a_cost;
1455 case PROCESSOR_MPCCORE:
1456 rs6000_cost = &mpccore_cost;
1459 case PROCESSOR_PPC403:
1460 rs6000_cost = &ppc403_cost;
1463 case PROCESSOR_PPC405:
1464 rs6000_cost = &ppc405_cost;
1467 case PROCESSOR_PPC440:
1468 rs6000_cost = &ppc440_cost;
1471 case PROCESSOR_PPC601:
1472 rs6000_cost = &ppc601_cost;
1475 case PROCESSOR_PPC603:
1476 rs6000_cost = &ppc603_cost;
1479 case PROCESSOR_PPC604:
1480 rs6000_cost = &ppc604_cost;
1483 case PROCESSOR_PPC604e:
1484 rs6000_cost = &ppc604e_cost;
1487 case PROCESSOR_PPC620:
1488 rs6000_cost = &ppc620_cost;
1491 case PROCESSOR_PPC630:
1492 rs6000_cost = &ppc630_cost;
1495 case PROCESSOR_PPC750:
1496 case PROCESSOR_PPC7400:
1497 rs6000_cost = &ppc750_cost;
1500 case PROCESSOR_PPC7450:
1501 rs6000_cost = &ppc7450_cost;
1504 case PROCESSOR_PPC8540:
1505 rs6000_cost = &ppc8540_cost;
1508 case PROCESSOR_POWER4:
1509 case PROCESSOR_POWER5:
1510 rs6000_cost = &power4_cost;
1518 /* Handle generic options of the form -mfoo=yes/no.
1519 NAME is the option name.
1520 VALUE is the option value.
1521 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1522 whether the option value is 'yes' or 'no' respectively. */
1524 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1528 else if (!strcmp (value, "yes"))
1530 else if (!strcmp (value, "no"))
1533 error ("unknown -m%s= option specified: '%s'", name, value);
1536 /* Handle -mabi= options. */
1538 rs6000_parse_abi_options (void)
1540 if (rs6000_abi_string == 0)
1542 else if (! strcmp (rs6000_abi_string, "altivec"))
1544 rs6000_altivec_abi = 1;
1547 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1548 rs6000_altivec_abi = 0;
1549 else if (! strcmp (rs6000_abi_string, "spe"))
1552 rs6000_altivec_abi = 0;
1553 if (!TARGET_SPE_ABI)
1554 error ("not configured for ABI: '%s'", rs6000_abi_string);
1557 else if (! strcmp (rs6000_abi_string, "no-spe"))
1560 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1563 /* Handle -malign-XXXXXX options. */
1565 rs6000_parse_alignment_option (void)
1567 if (rs6000_alignment_string == 0)
1569 else if (! strcmp (rs6000_alignment_string, "power"))
1570 rs6000_alignment_flags = MASK_ALIGN_POWER;
1571 else if (! strcmp (rs6000_alignment_string, "natural"))
1572 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1574 error ("unknown -malign-XXXXX option specified: '%s'",
1575 rs6000_alignment_string);
1578 /* Validate and record the size specified with the -mtls-size option. */
1581 rs6000_parse_tls_size_option (void)
1583 if (rs6000_tls_size_string == 0)
1585 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1586 rs6000_tls_size = 16;
1587 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1588 rs6000_tls_size = 32;
1589 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1590 rs6000_tls_size = 64;
1592 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1596 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1600 /* Do anything needed at the start of the asm file. */
1603 rs6000_file_start (void)
1607 const char *start = buffer;
1608 struct rs6000_cpu_select *ptr;
1609 const char *default_cpu = TARGET_CPU_DEFAULT;
1610 FILE *file = asm_out_file;
1612 default_file_start ();
1614 #ifdef TARGET_BI_ARCH
1615 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1619 if (flag_verbose_asm)
1621 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1622 rs6000_select[0].string = default_cpu;
1624 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1626 ptr = &rs6000_select[i];
1627 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1629 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1634 #ifdef USING_ELFOS_H
1635 switch (rs6000_sdata)
1637 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1638 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1639 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1640 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1643 if (rs6000_sdata && g_switch_value)
1645 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1656 /* Return nonzero if this function is known to have a null epilogue. */
1659 direct_return (void)
1661 if (reload_completed)
1663 rs6000_stack_t *info = rs6000_stack_info ();
1665 if (info->first_gp_reg_save == 32
1666 && info->first_fp_reg_save == 64
1667 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1668 && ! info->lr_save_p
1669 && ! info->cr_save_p
1670 && info->vrsave_mask == 0
1678 /* Returns 1 always. */
1681 any_operand (rtx op ATTRIBUTE_UNUSED,
1682 enum machine_mode mode ATTRIBUTE_UNUSED)
1687 /* Returns 1 always. */
1690 any_parallel_operand (rtx op ATTRIBUTE_UNUSED,
1691 enum machine_mode mode ATTRIBUTE_UNUSED)
1696 /* Returns 1 if op is the count register. */
1699 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1701 if (GET_CODE (op) != REG)
1704 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1707 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1713 /* Returns 1 if op is an altivec register. */
1716 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1719 return (register_operand (op, mode)
1720 && (GET_CODE (op) != REG
1721 || REGNO (op) > FIRST_PSEUDO_REGISTER
1722 || ALTIVEC_REGNO_P (REGNO (op))));
1726 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1728 if (GET_CODE (op) != REG)
1731 if (XER_REGNO_P (REGNO (op)))
1737 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1738 by such constants completes more quickly. */
1741 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1743 return ( GET_CODE (op) == CONST_INT
1744 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1747 /* Return 1 if OP is a constant that can fit in a D field. */
1750 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1752 return (GET_CODE (op) == CONST_INT
1753 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1756 /* Similar for an unsigned D field. */
1759 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1761 return (GET_CODE (op) == CONST_INT
1762 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1765 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1768 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1770 return (GET_CODE (op) == CONST_INT
1771 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1774 /* Returns 1 if OP is a CONST_INT that is a positive value
1775 and an exact power of 2. */
1778 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1780 return (GET_CODE (op) == CONST_INT
1782 && exact_log2 (INTVAL (op)) >= 0);
1785 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1789 gpc_reg_operand (rtx op, enum machine_mode mode)
1791 return (register_operand (op, mode)
1792 && (GET_CODE (op) != REG
1793 || (REGNO (op) >= ARG_POINTER_REGNUM
1794 && !XER_REGNO_P (REGNO (op)))
1795 || REGNO (op) < MQ_REGNO));
1798 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1802 cc_reg_operand (rtx op, enum machine_mode mode)
1804 return (register_operand (op, mode)
1805 && (GET_CODE (op) != REG
1806 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1807 || CR_REGNO_P (REGNO (op))));
1810 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1811 CR field that isn't CR0. */
1814 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1816 return (register_operand (op, mode)
1817 && (GET_CODE (op) != REG
1818 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1819 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1822 /* Returns 1 if OP is either a constant integer valid for a D-field or
1823 a non-special register. If a register, it must be in the proper
1824 mode unless MODE is VOIDmode. */
1827 reg_or_short_operand (rtx op, enum machine_mode mode)
1829 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1832 /* Similar, except check if the negation of the constant would be
1833 valid for a D-field. Don't allow a constant zero, since all the
1834 patterns that call this predicate use "addic r1,r2,-constant" on
1835 a constant value to set a carry when r2 is greater or equal to
1836 "constant". That doesn't work for zero. */
1839 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1841 if (GET_CODE (op) == CONST_INT)
1842 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1844 return gpc_reg_operand (op, mode);
1847 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1848 a non-special register. If a register, it must be in the proper
1849 mode unless MODE is VOIDmode. */
1852 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1854 if (gpc_reg_operand (op, mode))
1856 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1863 /* Return 1 if the operand is either a register or an integer whose
1864 high-order 16 bits are zero. */
1867 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1869 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1872 /* Return 1 is the operand is either a non-special register or ANY
1873 constant integer. */
1876 reg_or_cint_operand (rtx op, enum machine_mode mode)
1878 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1881 /* Return 1 is the operand is either a non-special register or ANY
1882 32-bit signed constant integer. */
1885 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1887 return (gpc_reg_operand (op, mode)
1888 || (GET_CODE (op) == CONST_INT
1889 #if HOST_BITS_PER_WIDE_INT != 32
1890 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1891 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1896 /* Return 1 is the operand is either a non-special register or a 32-bit
1897 signed constant integer valid for 64-bit addition. */
1900 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1902 return (gpc_reg_operand (op, mode)
1903 || (GET_CODE (op) == CONST_INT
1904 #if HOST_BITS_PER_WIDE_INT == 32
1905 && INTVAL (op) < 0x7fff8000
1907 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1913 /* Return 1 is the operand is either a non-special register or a 32-bit
1914 signed constant integer valid for 64-bit subtraction. */
1917 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1919 return (gpc_reg_operand (op, mode)
1920 || (GET_CODE (op) == CONST_INT
1921 #if HOST_BITS_PER_WIDE_INT == 32
1922 && (- INTVAL (op)) < 0x7fff8000
1924 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1930 /* Return 1 is the operand is either a non-special register or ANY
1931 32-bit unsigned constant integer. */
1934 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1936 if (GET_CODE (op) == CONST_INT)
1938 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1940 if (GET_MODE_BITSIZE (mode) <= 32)
1943 if (INTVAL (op) < 0)
1947 return ((INTVAL (op) & GET_MODE_MASK (mode)
1948 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1950 else if (GET_CODE (op) == CONST_DOUBLE)
1952 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1956 return CONST_DOUBLE_HIGH (op) == 0;
1959 return gpc_reg_operand (op, mode);
1962 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1965 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1967 return (GET_CODE (op) == SYMBOL_REF
1968 || GET_CODE (op) == CONST
1969 || GET_CODE (op) == LABEL_REF);
1972 /* Return 1 if the operand is a simple references that can be loaded via
1973 the GOT (labels involving addition aren't allowed). */
1976 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1978 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1981 /* Return the number of instructions it takes to form a constant in an
1982 integer register. */
1985 num_insns_constant_wide (HOST_WIDE_INT value)
1987 /* signed constant loadable with {cal|addi} */
1988 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1991 /* constant loadable with {cau|addis} */
1992 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1995 #if HOST_BITS_PER_WIDE_INT == 64
1996 else if (TARGET_POWERPC64)
1998 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1999 HOST_WIDE_INT high = value >> 31;
2001 if (high == 0 || high == -1)
2007 return num_insns_constant_wide (high) + 1;
2009 return (num_insns_constant_wide (high)
2010 + num_insns_constant_wide (low) + 1);
2019 num_insns_constant (rtx op, enum machine_mode mode)
2021 if (GET_CODE (op) == CONST_INT)
2023 #if HOST_BITS_PER_WIDE_INT == 64
2024 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2025 && mask64_operand (op, mode))
2029 return num_insns_constant_wide (INTVAL (op));
2032 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
2037 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2038 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2039 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2042 else if (GET_CODE (op) == CONST_DOUBLE)
2048 int endian = (WORDS_BIG_ENDIAN == 0);
2050 if (mode == VOIDmode || mode == DImode)
2052 high = CONST_DOUBLE_HIGH (op);
2053 low = CONST_DOUBLE_LOW (op);
2057 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2058 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2060 low = l[1 - endian];
2064 return (num_insns_constant_wide (low)
2065 + num_insns_constant_wide (high));
2069 if (high == 0 && low >= 0)
2070 return num_insns_constant_wide (low);
2072 else if (high == -1 && low < 0)
2073 return num_insns_constant_wide (low);
2075 else if (mask64_operand (op, mode))
2079 return num_insns_constant_wide (high) + 1;
2082 return (num_insns_constant_wide (high)
2083 + num_insns_constant_wide (low) + 1);
2091 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2092 register with one instruction per word. We only do this if we can
2093 safely read CONST_DOUBLE_{LOW,HIGH}. */
2096 easy_fp_constant (rtx op, enum machine_mode mode)
2098 if (GET_CODE (op) != CONST_DOUBLE
2099 || GET_MODE (op) != mode
2100 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
2103 /* Consider all constants with -msoft-float to be easy. */
2104 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
2108 /* If we are using V.4 style PIC, consider all constants to be hard. */
2109 if (flag_pic && DEFAULT_ABI == ABI_V4)
2112 #ifdef TARGET_RELOCATABLE
2113 /* Similarly if we are using -mrelocatable, consider all constants
2115 if (TARGET_RELOCATABLE)
2124 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2125 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
2127 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2128 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
2129 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
2130 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
2133 else if (mode == DFmode)
2138 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2139 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
2141 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2142 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
2145 else if (mode == SFmode)
2150 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2151 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2153 return num_insns_constant_wide (l) == 1;
2156 else if (mode == DImode)
2157 return ((TARGET_POWERPC64
2158 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
2159 || (num_insns_constant (op, DImode) <= 2));
2161 else if (mode == SImode)
2167 /* Returns the constant for the splat instruction, if exists. */
2170 easy_vector_splat_const (int cst, enum machine_mode mode)
2175 if (EASY_VECTOR_15 (cst)
2176 || EASY_VECTOR_15_ADD_SELF (cst))
2178 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2182 if (EASY_VECTOR_15 (cst)
2183 || EASY_VECTOR_15_ADD_SELF (cst))
2185 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2189 if (EASY_VECTOR_15 (cst)
2190 || EASY_VECTOR_15_ADD_SELF (cst))
2199 /* Return nonzero if all elements of a vector have the same value. */
2202 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2206 units = CONST_VECTOR_NUNITS (op);
2208 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2209 for (i = 1; i < units; ++i)
2210 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2212 if (i == units && easy_vector_splat_const (cst, mode))
2217 /* Return 1 if the operand is a CONST_INT and can be put into a
2218 register without using memory. */
2221 easy_vector_constant (rtx op, enum machine_mode mode)
2225 if (GET_CODE (op) != CONST_VECTOR
2230 if (zero_constant (op, mode)
2231 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
2232 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
2235 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2238 if (TARGET_SPE && mode == V1DImode)
2241 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2242 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
2244 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2246 evmergelo r0, r0, r0
2249 I don't know how efficient it would be to allow bigger constants,
2250 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2251 instructions is better than a 64-bit memory load, but I don't
2252 have the e500 timing specs. */
2253 if (TARGET_SPE && mode == V2SImode
2254 && cst >= -0x7fff && cst <= 0x7fff
2255 && cst2 >= -0x7fff && cst2 <= 0x7fff)
2259 && easy_vector_same (op, mode))
2261 cst = easy_vector_splat_const (cst, mode);
2262 if (EASY_VECTOR_15_ADD_SELF (cst)
2263 || EASY_VECTOR_15 (cst))
2269 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2272 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
2276 && GET_CODE (op) == CONST_VECTOR
2277 && easy_vector_same (op, mode))
2279 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
2280 if (EASY_VECTOR_15_ADD_SELF (cst))
2286 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2289 gen_easy_vector_constant_add_self (rtx op)
2293 units = GET_MODE_NUNITS (GET_MODE (op));
2294 v = rtvec_alloc (units);
2296 for (i = 0; i < units; i++)
2298 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2299 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2303 output_vec_const_move (rtx *operands)
2306 enum machine_mode mode;
2312 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2313 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2314 mode = GET_MODE (dest);
2318 if (zero_constant (vec, mode))
2319 return "vxor %0,%0,%0";
2320 else if (easy_vector_constant (vec, mode))
2322 operands[1] = GEN_INT (cst);
2326 if (EASY_VECTOR_15 (cst))
2328 operands[1] = GEN_INT (cst);
2329 return "vspltisw %0,%1";
2331 else if (EASY_VECTOR_15_ADD_SELF (cst))
2335 if (EASY_VECTOR_15 (cst))
2337 operands[1] = GEN_INT (cst);
2338 return "vspltish %0,%1";
2340 else if (EASY_VECTOR_15_ADD_SELF (cst))
2344 if (EASY_VECTOR_15 (cst))
2346 operands[1] = GEN_INT (cst);
2347 return "vspltisb %0,%1";
2349 else if (EASY_VECTOR_15_ADD_SELF (cst))
2361 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2362 pattern of V1DI, V4HI, and V2SF.
2364 FIXME: We should probably return # and add post reload
2365 splitters for these, but this way is so easy ;-).
2367 operands[1] = GEN_INT (cst);
2368 operands[2] = GEN_INT (cst2);
2370 return "li %0,%1\n\tevmergelo %0,%0,%0";
2372 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2378 /* Return 1 if the operand is the constant 0. This works for scalars
2379 as well as vectors. */
2381 zero_constant (rtx op, enum machine_mode mode)
2383 return op == CONST0_RTX (mode);
2386 /* Return 1 if the operand is 0.0. */
2388 zero_fp_constant (rtx op, enum machine_mode mode)
2390 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
2393 /* Return 1 if the operand is in volatile memory. Note that during
2394 the RTL generation phase, memory_operand does not return TRUE for
2395 volatile memory references. So this function allows us to
2396 recognize volatile references where its safe. */
2399 volatile_mem_operand (rtx op, enum machine_mode mode)
2401 if (GET_CODE (op) != MEM)
2404 if (!MEM_VOLATILE_P (op))
2407 if (mode != GET_MODE (op))
2410 if (reload_completed)
2411 return memory_operand (op, mode);
2413 if (reload_in_progress)
2414 return strict_memory_address_p (mode, XEXP (op, 0));
2416 return memory_address_p (mode, XEXP (op, 0));
2419 /* Return 1 if the operand is an offsettable memory operand. */
2422 offsettable_mem_operand (rtx op, enum machine_mode mode)
2424 return ((GET_CODE (op) == MEM)
2425 && offsettable_address_p (reload_completed || reload_in_progress,
2426 mode, XEXP (op, 0)));
2429 /* Return 1 if the operand is either an easy FP constant (see above) or
2433 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2435 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2438 /* Return 1 if the operand is either a non-special register or an item
2439 that can be used as the operand of a `mode' add insn. */
2442 add_operand (rtx op, enum machine_mode mode)
2444 if (GET_CODE (op) == CONST_INT)
2445 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2446 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2448 return gpc_reg_operand (op, mode);
2451 /* Return 1 if OP is a constant but not a valid add_operand. */
2454 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2456 return (GET_CODE (op) == CONST_INT
2457 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2458 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2461 /* Return 1 if the operand is a non-special register or a constant that
2462 can be used as the operand of an OR or XOR insn on the RS/6000. */
2465 logical_operand (rtx op, enum machine_mode mode)
2467 HOST_WIDE_INT opl, oph;
2469 if (gpc_reg_operand (op, mode))
2472 if (GET_CODE (op) == CONST_INT)
2474 opl = INTVAL (op) & GET_MODE_MASK (mode);
2476 #if HOST_BITS_PER_WIDE_INT <= 32
2477 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2481 else if (GET_CODE (op) == CONST_DOUBLE)
2483 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2486 opl = CONST_DOUBLE_LOW (op);
2487 oph = CONST_DOUBLE_HIGH (op);
2494 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2495 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2498 /* Return 1 if C is a constant that is not a logical operand (as
2499 above), but could be split into one. */
2502 non_logical_cint_operand (rtx op, enum machine_mode mode)
2504 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2505 && ! logical_operand (op, mode)
2506 && reg_or_logical_cint_operand (op, mode));
2509 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2510 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2511 Reject all ones and all zeros, since these should have been optimized
2512 away and confuse the making of MB and ME. */
2515 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2517 HOST_WIDE_INT c, lsb;
2519 if (GET_CODE (op) != CONST_INT)
2524 /* Fail in 64-bit mode if the mask wraps around because the upper
2525 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2526 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2529 /* We don't change the number of transitions by inverting,
2530 so make sure we start with the LS bit zero. */
2534 /* Reject all zeros or all ones. */
2538 /* Find the first transition. */
2541 /* Invert to look for a second transition. */
2544 /* Erase first transition. */
2547 /* Find the second transition (if any). */
2550 /* Match if all the bits above are 1's (or c is zero). */
2554 /* Return 1 for the PowerPC64 rlwinm corner case. */
2557 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2559 HOST_WIDE_INT c, lsb;
2561 if (GET_CODE (op) != CONST_INT)
2566 if ((c & 0x80000001) != 0x80000001)
2580 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2581 It is if there are no more than one 1->0 or 0->1 transitions.
2582 Reject all zeros, since zero should have been optimized away and
2583 confuses the making of MB and ME. */
2586 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2588 if (GET_CODE (op) == CONST_INT)
2590 HOST_WIDE_INT c, lsb;
2594 /* Reject all zeros. */
2598 /* We don't change the number of transitions by inverting,
2599 so make sure we start with the LS bit zero. */
2603 /* Find the transition, and check that all bits above are 1's. */
2606 /* Match if all the bits above are 1's (or c is zero). */
2612 /* Like mask64_operand, but allow up to three transitions. This
2613 predicate is used by insn patterns that generate two rldicl or
2614 rldicr machine insns. */
2617 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2619 if (GET_CODE (op) == CONST_INT)
2621 HOST_WIDE_INT c, lsb;
2625 /* Disallow all zeros. */
2629 /* We don't change the number of transitions by inverting,
2630 so make sure we start with the LS bit zero. */
2634 /* Find the first transition. */
2637 /* Invert to look for a second transition. */
2640 /* Erase first transition. */
2643 /* Find the second transition. */
2646 /* Invert to look for a third transition. */
2649 /* Erase second transition. */
2652 /* Find the third transition (if any). */
2655 /* Match if all the bits above are 1's (or c is zero). */
2661 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2662 implement ANDing by the mask IN. */
2664 build_mask64_2_operands (rtx in, rtx *out)
2666 #if HOST_BITS_PER_WIDE_INT >= 64
2667 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2670 if (GET_CODE (in) != CONST_INT)
2676 /* Assume c initially something like 0x00fff000000fffff. The idea
2677 is to rotate the word so that the middle ^^^^^^ group of zeros
2678 is at the MS end and can be cleared with an rldicl mask. We then
2679 rotate back and clear off the MS ^^ group of zeros with a
2681 c = ~c; /* c == 0xff000ffffff00000 */
2682 lsb = c & -c; /* lsb == 0x0000000000100000 */
2683 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2684 c = ~c; /* c == 0x00fff000000fffff */
2685 c &= -lsb; /* c == 0x00fff00000000000 */
2686 lsb = c & -c; /* lsb == 0x0000100000000000 */
2687 c = ~c; /* c == 0xff000fffffffffff */
2688 c &= -lsb; /* c == 0xff00000000000000 */
2690 while ((lsb >>= 1) != 0)
2691 shift++; /* shift == 44 on exit from loop */
2692 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2693 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2694 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2698 /* Assume c initially something like 0xff000f0000000000. The idea
2699 is to rotate the word so that the ^^^ middle group of zeros
2700 is at the LS end and can be cleared with an rldicr mask. We then
2701 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2703 lsb = c & -c; /* lsb == 0x0000010000000000 */
2704 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2705 c = ~c; /* c == 0x00fff0ffffffffff */
2706 c &= -lsb; /* c == 0x00fff00000000000 */
2707 lsb = c & -c; /* lsb == 0x0000100000000000 */
2708 c = ~c; /* c == 0xff000fffffffffff */
2709 c &= -lsb; /* c == 0xff00000000000000 */
2711 while ((lsb >>= 1) != 0)
2712 shift++; /* shift == 44 on exit from loop */
2713 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2714 m1 >>= shift; /* m1 == 0x0000000000000fff */
2715 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2718 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2719 masks will be all 1's. We are guaranteed more than one transition. */
2720 out[0] = GEN_INT (64 - shift);
2721 out[1] = GEN_INT (m1);
2722 out[2] = GEN_INT (shift);
2723 out[3] = GEN_INT (m2);
2731 /* Return 1 if the operand is either a non-special register or a constant
2732 that can be used as the operand of a PowerPC64 logical AND insn. */
2735 and64_operand (rtx op, enum machine_mode mode)
2737 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2738 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2740 return (logical_operand (op, mode) || mask64_operand (op, mode));
2743 /* Like the above, but also match constants that can be implemented
2744 with two rldicl or rldicr insns. */
2747 and64_2_operand (rtx op, enum machine_mode mode)
2749 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2750 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2752 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2755 /* Return 1 if the operand is either a non-special register or a
2756 constant that can be used as the operand of an RS/6000 logical AND insn. */
2759 and_operand (rtx op, enum machine_mode mode)
2761 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2762 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2764 return (logical_operand (op, mode) || mask_operand (op, mode));
2767 /* Return 1 if the operand is a general register or memory operand. */
2770 reg_or_mem_operand (rtx op, enum machine_mode mode)
2772 return (gpc_reg_operand (op, mode)
2773 || memory_operand (op, mode)
2774 || macho_lo_sum_memory_operand (op, mode)
2775 || volatile_mem_operand (op, mode));
2778 /* Return 1 if the operand is a general register or memory operand without
2779 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2783 lwa_operand (rtx op, enum machine_mode mode)
2787 if (reload_completed && GET_CODE (inner) == SUBREG)
2788 inner = SUBREG_REG (inner);
2790 return gpc_reg_operand (inner, mode)
2791 || (memory_operand (inner, mode)
2792 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2793 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2794 && (GET_CODE (XEXP (inner, 0)) != PLUS
2795 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2796 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2799 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2802 symbol_ref_operand (rtx op, enum machine_mode mode)
2804 if (mode != VOIDmode && GET_MODE (op) != mode)
2807 return (GET_CODE (op) == SYMBOL_REF
2808 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2811 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2812 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2815 call_operand (rtx op, enum machine_mode mode)
2817 if (mode != VOIDmode && GET_MODE (op) != mode)
2820 return (GET_CODE (op) == SYMBOL_REF
2821 || (GET_CODE (op) == REG
2822 && (REGNO (op) == LINK_REGISTER_REGNUM
2823 || REGNO (op) == COUNT_REGISTER_REGNUM
2824 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2827 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2831 current_file_function_operand (rtx op,
2832 enum machine_mode mode ATTRIBUTE_UNUSED)
2834 return (GET_CODE (op) == SYMBOL_REF
2835 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2836 && (SYMBOL_REF_LOCAL_P (op)
2837 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2840 /* Return 1 if this operand is a valid input for a move insn. */
2843 input_operand (rtx op, enum machine_mode mode)
2845 /* Memory is always valid. */
2846 if (memory_operand (op, mode))
2849 /* For floating-point, easy constants are valid. */
2850 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2852 && easy_fp_constant (op, mode))
2855 /* Allow any integer constant. */
2856 if (GET_MODE_CLASS (mode) == MODE_INT
2857 && (GET_CODE (op) == CONST_INT
2858 || GET_CODE (op) == CONST_DOUBLE))
2861 /* Allow easy vector constants. */
2862 if (GET_CODE (op) == CONST_VECTOR
2863 && easy_vector_constant (op, mode))
2866 /* For floating-point or multi-word mode, the only remaining valid type
2868 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2869 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2870 return register_operand (op, mode);
2872 /* The only cases left are integral modes one word or smaller (we
2873 do not get called for MODE_CC values). These can be in any
2875 if (register_operand (op, mode))
2878 /* A SYMBOL_REF referring to the TOC is valid. */
2879 if (legitimate_constant_pool_address_p (op))
2882 /* A constant pool expression (relative to the TOC) is valid */
2883 if (toc_relative_expr_p (op))
2886 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2888 if (DEFAULT_ABI == ABI_V4
2889 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2890 && small_data_operand (op, Pmode))
2897 /* Darwin, AIX increases natural record alignment to doubleword if the first
2898 field is an FP double while the FP fields remain word aligned. */
2901 rs6000_special_round_type_align (tree type, int computed, int specified)
2903 tree field = TYPE_FIELDS (type);
2905 /* Skip all the static variables only if ABI is greater than
2907 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2908 field = TREE_CHAIN (field);
2910 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2911 return MAX (computed, specified);
2913 return MAX (MAX (computed, specified), 64);
2916 /* Return 1 for an operand in small memory on V.4/eabi. */
2919 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2920 enum machine_mode mode ATTRIBUTE_UNUSED)
2925 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2928 if (DEFAULT_ABI != ABI_V4)
2931 if (GET_CODE (op) == SYMBOL_REF)
2934 else if (GET_CODE (op) != CONST
2935 || GET_CODE (XEXP (op, 0)) != PLUS
2936 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2937 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2942 rtx sum = XEXP (op, 0);
2943 HOST_WIDE_INT summand;
2945 /* We have to be careful here, because it is the referenced address
2946 that must be 32k from _SDA_BASE_, not just the symbol. */
2947 summand = INTVAL (XEXP (sum, 1));
2948 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2951 sym_ref = XEXP (sum, 0);
2954 return SYMBOL_REF_SMALL_P (sym_ref);
2960 /* Return true, if operand is a memory operand and has a
2961 displacement divisible by 4. */
2964 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2969 if (!memory_operand (op, mode))
2972 addr = XEXP (op, 0);
2973 if (GET_CODE (addr) == PLUS
2974 && GET_CODE (XEXP (addr, 0)) == REG
2975 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2976 off = INTVAL (XEXP (addr, 1));
2978 return (off % 4) == 0;
2981 /* Return true if either operand is a general purpose register. */
2984 gpr_or_gpr_p (rtx op0, rtx op1)
2986 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2987 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2991 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2994 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2996 switch (GET_CODE(op))
2999 if (RS6000_SYMBOL_REF_TLS_P (op))
3001 else if (CONSTANT_POOL_ADDRESS_P (op))
3003 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3011 else if (! strcmp (XSTR (op, 0), toc_label_name))
3020 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3021 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
3023 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
3032 constant_pool_expr_p (rtx op)
3036 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3040 toc_relative_expr_p (rtx op)
3044 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3048 legitimate_constant_pool_address_p (rtx x)
3051 && GET_CODE (x) == PLUS
3052 && GET_CODE (XEXP (x, 0)) == REG
3053 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3054 && constant_pool_expr_p (XEXP (x, 1)));
3058 legitimate_small_data_p (enum machine_mode mode, rtx x)
3060 return (DEFAULT_ABI == ABI_V4
3061 && !flag_pic && !TARGET_TOC
3062 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3063 && small_data_operand (x, mode));
3066 /* SPE offset addressing is limited to 5-bits worth of double words. */
3067 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3070 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3072 unsigned HOST_WIDE_INT offset, extra;
3074 if (GET_CODE (x) != PLUS)
3076 if (GET_CODE (XEXP (x, 0)) != REG)
3078 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3080 if (legitimate_constant_pool_address_p (x))
3082 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3085 offset = INTVAL (XEXP (x, 1));
3093 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3094 which leaves the only valid constant offset of zero, which by
3095 canonicalization rules is also invalid. */
3102 /* SPE vector modes. */
3103 return SPE_CONST_OFFSET_OK (offset);
3107 if (mode == DFmode || !TARGET_POWERPC64)
3109 else if (offset & 3)
3115 if (mode == TFmode || !TARGET_POWERPC64)
3117 else if (offset & 3)
3128 return (offset < 0x10000) && (offset + extra < 0x10000);
3132 legitimate_indexed_address_p (rtx x, int strict)
3136 if (GET_CODE (x) != PLUS)
3141 if (!REG_P (op0) || !REG_P (op1))
3144 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
3145 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3146 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3147 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
3151 legitimate_indirect_address_p (rtx x, int strict)
3153 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3157 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3159 if (!TARGET_MACHO || !flag_pic
3160 || mode != SImode || GET_CODE(x) != MEM)
3164 if (GET_CODE (x) != LO_SUM)
3166 if (GET_CODE (XEXP (x, 0)) != REG)
3168 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3172 return CONSTANT_P (x);
3176 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3178 if (GET_CODE (x) != LO_SUM)
3180 if (GET_CODE (XEXP (x, 0)) != REG)
3182 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3186 if (TARGET_ELF || TARGET_MACHO)
3188 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3192 if (GET_MODE_NUNITS (mode) != 1)
3194 if (GET_MODE_BITSIZE (mode) > 64)
3197 return CONSTANT_P (x);
3204 /* Try machine-dependent ways of modifying an illegitimate address
3205 to be legitimate. If we find one, return the new, valid address.
3206 This is used from only one place: `memory_address' in explow.c.
3208 OLDX is the address as it was before break_out_memory_refs was
3209 called. In some cases it is useful to look at this to decide what
3212 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3214 It is always safe for this function to do nothing. It exists to
3215 recognize opportunities to optimize the output.
3217 On RS/6000, first check for the sum of a register with a constant
3218 integer that is out of range. If so, generate code to add the
3219 constant with the low-order 16 bits masked to the register and force
3220 this result into another register (this can be done with `cau').
3221 Then generate an address of REG+(CONST&0xffff), allowing for the
3222 possibility of bit 16 being a one.
3224 Then check for the sum of a register and something not constant, try to
3225 load the other things into a register and return the sum. */
3228 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3229 enum machine_mode mode)
3231 if (GET_CODE (x) == SYMBOL_REF)
3233 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3235 return rs6000_legitimize_tls_address (x, model);
3238 if (GET_CODE (x) == PLUS
3239 && GET_CODE (XEXP (x, 0)) == REG
3240 && GET_CODE (XEXP (x, 1)) == CONST_INT
3241 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
3243 HOST_WIDE_INT high_int, low_int;
3245 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3246 high_int = INTVAL (XEXP (x, 1)) - low_int;
3247 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3248 GEN_INT (high_int)), 0);
3249 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3251 else if (GET_CODE (x) == PLUS
3252 && GET_CODE (XEXP (x, 0)) == REG
3253 && GET_CODE (XEXP (x, 1)) != CONST_INT
3254 && GET_MODE_NUNITS (mode) == 1
3255 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3257 || (mode != DFmode && mode != TFmode))
3258 && (TARGET_POWERPC64 || mode != DImode)
3261 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3262 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3264 else if (ALTIVEC_VECTOR_MODE (mode))
3268 /* Make sure both operands are registers. */
3269 if (GET_CODE (x) == PLUS)
3270 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3271 force_reg (Pmode, XEXP (x, 1)));
3273 reg = force_reg (Pmode, x);
3276 else if (SPE_VECTOR_MODE (mode))
3278 /* We accept [reg + reg] and [reg + OFFSET]. */
3280 if (GET_CODE (x) == PLUS)
3282 rtx op1 = XEXP (x, 0);
3283 rtx op2 = XEXP (x, 1);
3285 op1 = force_reg (Pmode, op1);
3287 if (GET_CODE (op2) != REG
3288 && (GET_CODE (op2) != CONST_INT
3289 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3290 op2 = force_reg (Pmode, op2);
3292 return gen_rtx_PLUS (Pmode, op1, op2);
3295 return force_reg (Pmode, x);
3301 && GET_CODE (x) != CONST_INT
3302 && GET_CODE (x) != CONST_DOUBLE
3304 && GET_MODE_NUNITS (mode) == 1
3305 && (GET_MODE_BITSIZE (mode) <= 32
3306 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3308 rtx reg = gen_reg_rtx (Pmode);
3309 emit_insn (gen_elf_high (reg, x));
3310 return gen_rtx_LO_SUM (Pmode, reg, x);
3312 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3315 && ! MACHO_DYNAMIC_NO_PIC_P
3317 && GET_CODE (x) != CONST_INT
3318 && GET_CODE (x) != CONST_DOUBLE
3320 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3324 rtx reg = gen_reg_rtx (Pmode);
3325 emit_insn (gen_macho_high (reg, x));
3326 return gen_rtx_LO_SUM (Pmode, reg, x);
3329 && constant_pool_expr_p (x)
3330 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3332 return create_TOC_reference (x);
3338 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3339 We need to emit DTP-relative relocations. */
3342 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3347 fputs ("\t.long\t", file);
3350 fputs (DOUBLE_INT_ASM_OP, file);
3355 output_addr_const (file, x);
3356 fputs ("@dtprel+0x8000", file);
3359 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3361 static GTY(()) rtx rs6000_tls_symbol;
3363 rs6000_tls_get_addr (void)
3365 if (!rs6000_tls_symbol)
3366 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3368 return rs6000_tls_symbol;
3371 /* Construct the SYMBOL_REF for TLS GOT references. */
3373 static GTY(()) rtx rs6000_got_symbol;
3375 rs6000_got_sym (void)
3377 if (!rs6000_got_symbol)
3379 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3380 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3381 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3384 return rs6000_got_symbol;
3387 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3388 this (thread-local) address. */
3391 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3395 dest = gen_reg_rtx (Pmode);
3396 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3402 tlsreg = gen_rtx_REG (Pmode, 13);
3403 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3407 tlsreg = gen_rtx_REG (Pmode, 2);
3408 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3412 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3416 tmp = gen_reg_rtx (Pmode);
3419 tlsreg = gen_rtx_REG (Pmode, 13);
3420 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3424 tlsreg = gen_rtx_REG (Pmode, 2);
3425 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3429 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3431 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3436 rtx r3, got, tga, tmp1, tmp2, eqv;
3439 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3443 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3446 rtx gsym = rs6000_got_sym ();
3447 got = gen_reg_rtx (Pmode);
3449 rs6000_emit_move (got, gsym, Pmode);
3453 static int tls_got_labelno = 0;
3454 rtx tempLR, lab, tmp3, mem;
3457 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3458 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3459 tempLR = gen_reg_rtx (Pmode);
3460 tmp1 = gen_reg_rtx (Pmode);
3461 tmp2 = gen_reg_rtx (Pmode);
3462 tmp3 = gen_reg_rtx (Pmode);
3463 mem = gen_const_mem (Pmode, tmp1);
3465 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3467 emit_move_insn (tmp1, tempLR);
3468 emit_move_insn (tmp2, mem);
3469 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3470 last = emit_move_insn (got, tmp3);
3471 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3473 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3475 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3481 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3483 r3 = gen_rtx_REG (Pmode, 3);
3485 insn = gen_tls_gd_64 (r3, got, addr);
3487 insn = gen_tls_gd_32 (r3, got, addr);
3490 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3491 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3492 insn = emit_call_insn (insn);
3493 CONST_OR_PURE_CALL_P (insn) = 1;
3494 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3495 insn = get_insns ();
3497 emit_libcall_block (insn, dest, r3, addr);
3499 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3501 r3 = gen_rtx_REG (Pmode, 3);
3503 insn = gen_tls_ld_64 (r3, got);
3505 insn = gen_tls_ld_32 (r3, got);
3508 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3509 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3510 insn = emit_call_insn (insn);
3511 CONST_OR_PURE_CALL_P (insn) = 1;
3512 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3513 insn = get_insns ();
3515 tmp1 = gen_reg_rtx (Pmode);
3516 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3518 emit_libcall_block (insn, tmp1, r3, eqv);
3519 if (rs6000_tls_size == 16)
3522 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3524 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3526 else if (rs6000_tls_size == 32)
3528 tmp2 = gen_reg_rtx (Pmode);
3530 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3532 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3535 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3537 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3541 tmp2 = gen_reg_rtx (Pmode);
3543 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3545 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3547 insn = gen_rtx_SET (Pmode, dest,
3548 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3554 /* IE, or 64 bit offset LE. */
3555 tmp2 = gen_reg_rtx (Pmode);
3557 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3559 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3562 insn = gen_tls_tls_64 (dest, tmp2, addr);
3564 insn = gen_tls_tls_32 (dest, tmp2, addr);
3572 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3573 instruction definitions. */
3576 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3578 return RS6000_SYMBOL_REF_TLS_P (x);
3581 /* Return 1 if X contains a thread-local symbol. */
3584 rs6000_tls_referenced_p (rtx x)
3586 if (! TARGET_HAVE_TLS)
3589 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3592 /* Return 1 if *X is a thread-local symbol. This is the same as
3593 rs6000_tls_symbol_ref except for the type of the unused argument. */
3596 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3598 return RS6000_SYMBOL_REF_TLS_P (*x);
3601 /* The convention appears to be to define this wherever it is used.
3602 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3603 is now used here. */
3604 #ifndef REG_MODE_OK_FOR_BASE_P
3605 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3608 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3609 replace the input X, or the original X if no replacement is called for.
3610 The output parameter *WIN is 1 if the calling macro should goto WIN,
3613 For RS/6000, we wish to handle large displacements off a base
3614 register by splitting the addend across an addiu/addis and the mem insn.
3615 This cuts number of extra insns needed from 3 to 1.
3617 On Darwin, we use this to generate code for floating point constants.
3618 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3619 The Darwin code is inside #if TARGET_MACHO because only then is
3620 machopic_function_base_name() defined. */
3622 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3623 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3625 /* We must recognize output that we have already generated ourselves. */
3626 if (GET_CODE (x) == PLUS
3627 && GET_CODE (XEXP (x, 0)) == PLUS
3628 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3629 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3630 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3632 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3633 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3634 opnum, (enum reload_type)type);
3640 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3641 && GET_CODE (x) == LO_SUM
3642 && GET_CODE (XEXP (x, 0)) == PLUS
3643 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3644 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3645 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3646 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3647 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3648 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3649 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3651 /* Result of previous invocation of this function on Darwin
3652 floating point constant. */
3653 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3654 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3655 opnum, (enum reload_type)type);
3660 if (GET_CODE (x) == PLUS
3661 && GET_CODE (XEXP (x, 0)) == REG
3662 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3663 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3664 && GET_CODE (XEXP (x, 1)) == CONST_INT
3665 && !SPE_VECTOR_MODE (mode)
3666 && !ALTIVEC_VECTOR_MODE (mode))
3668 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3669 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3671 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3673 /* Check for 32-bit overflow. */
3674 if (high + low != val)
3680 /* Reload the high part into a base reg; leave the low part
3681 in the mem directly. */
3683 x = gen_rtx_PLUS (GET_MODE (x),
3684 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3688 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3689 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3690 opnum, (enum reload_type)type);
3695 if (GET_CODE (x) == SYMBOL_REF
3696 && DEFAULT_ABI == ABI_DARWIN
3697 && !ALTIVEC_VECTOR_MODE (mode)
3698 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3699 /* Don't do this for TFmode, since the result isn't offsettable. */
3704 rtx offset = gen_rtx_CONST (Pmode,
3705 gen_rtx_MINUS (Pmode, x,
3706 machopic_function_base_sym ()));
3707 x = gen_rtx_LO_SUM (GET_MODE (x),
3708 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3709 gen_rtx_HIGH (Pmode, offset)), offset);
3712 x = gen_rtx_LO_SUM (GET_MODE (x),
3713 gen_rtx_HIGH (Pmode, x), x);
3715 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3716 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3717 opnum, (enum reload_type)type);
3723 && constant_pool_expr_p (x)
3724 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3726 (x) = create_TOC_reference (x);
3734 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3735 that is a valid memory address for an instruction.
3736 The MODE argument is the machine mode for the MEM expression
3737 that wants to use this address.
3739 On the RS/6000, there are four valid address: a SYMBOL_REF that
3740 refers to a constant pool entry of an address (or the sum of it
3741 plus a constant), a short (16-bit signed) constant plus a register,
3742 the sum of two registers, or a register indirect, possibly with an
3743 auto-increment. For DFmode and DImode with a constant plus register,
3744 we must ensure that both words are addressable or PowerPC64 with offset
3747 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3748 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3749 adjacent memory cells are accessed by adding word-sized offsets
3750 during assembly output. */
3752 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3754 if (RS6000_SYMBOL_REF_TLS_P (x))
3756 if (legitimate_indirect_address_p (x, reg_ok_strict))
3758 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3759 && !ALTIVEC_VECTOR_MODE (mode)
3760 && !SPE_VECTOR_MODE (mode)
3762 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3764 if (legitimate_small_data_p (mode, x))
3766 if (legitimate_constant_pool_address_p (x))
3768 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3770 && GET_CODE (x) == PLUS
3771 && GET_CODE (XEXP (x, 0)) == REG
3772 && (XEXP (x, 0) == virtual_stack_vars_rtx
3773 || XEXP (x, 0) == arg_pointer_rtx)
3774 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3776 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3780 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3782 || (mode != DFmode && mode != TFmode))
3783 && (TARGET_POWERPC64 || mode != DImode)
3784 && legitimate_indexed_address_p (x, reg_ok_strict))
3786 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3791 /* Go to LABEL if ADDR (a legitimate address expression)
3792 has an effect that depends on the machine mode it is used for.
3794 On the RS/6000 this is true of all integral offsets (since AltiVec
3795 modes don't allow them) or is a pre-increment or decrement.
3797 ??? Except that due to conceptual problems in offsettable_address_p
3798 we can't really report the problems of integral offsets. So leave
3799 this assuming that the adjustable offset must be valid for the
3800 sub-words of a TFmode operand, which is what we had before. */
3803 rs6000_mode_dependent_address (rtx addr)
3805 switch (GET_CODE (addr))
3808 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3810 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3811 return val + 12 + 0x8000 >= 0x10000;
3820 return TARGET_UPDATE;
3829 /* Return number of consecutive hard regs needed starting at reg REGNO
3830 to hold something of mode MODE.
3831 This is ordinarily the length in words of a value of mode MODE
3832 but can be less for certain modes in special long registers.
3834 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3835 scalar instructions. The upper 32 bits are only available to the
3838 POWER and PowerPC GPRs hold 32 bits worth;
3839 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3842 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3844 if (FP_REGNO_P (regno))
3845 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3847 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3848 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3850 if (ALTIVEC_REGNO_P (regno))
3852 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3854 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3857 /* Change register usage conditional on target flags. */
3859 rs6000_conditional_register_usage (void)
3863 /* Set MQ register fixed (already call_used) if not POWER
3864 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3869 /* 64-bit AIX reserves GPR13 for thread-private data. */
3871 fixed_regs[13] = call_used_regs[13]
3872 = call_really_used_regs[13] = 1;
3874 /* Conditionally disable FPRs. */
3875 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3876 for (i = 32; i < 64; i++)
3877 fixed_regs[i] = call_used_regs[i]
3878 = call_really_used_regs[i] = 1;
3880 if (DEFAULT_ABI == ABI_V4
3881 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3883 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3885 if (DEFAULT_ABI == ABI_V4
3886 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3888 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3889 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3890 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3892 if (DEFAULT_ABI == ABI_DARWIN
3893 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3894 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3895 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3896 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3897 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3899 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3900 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3901 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3904 global_regs[VSCR_REGNO] = 1;
3908 global_regs[SPEFSCR_REGNO] = 1;
3909 fixed_regs[FIXED_SCRATCH]
3910 = call_used_regs[FIXED_SCRATCH]
3911 = call_really_used_regs[FIXED_SCRATCH] = 1;
3914 if (! TARGET_ALTIVEC)
3916 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3917 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3918 call_really_used_regs[VRSAVE_REGNO] = 1;
3921 if (TARGET_ALTIVEC_ABI)
3922 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3923 call_used_regs[i] = call_really_used_regs[i] = 1;
3926 /* Try to output insns to set TARGET equal to the constant C if it can
3927 be done in less than N insns. Do all computations in MODE.
3928 Returns the place where the output has been placed if it can be
3929 done and the insns have been emitted. If it would take more than N
3930 insns, zero is returned and no insns and emitted. */
3933 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3934 rtx source, int n ATTRIBUTE_UNUSED)
3936 rtx result, insn, set;
3937 HOST_WIDE_INT c0, c1;
3939 if (mode == QImode || mode == HImode)
3942 dest = gen_reg_rtx (mode);
3943 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3946 else if (mode == SImode)
3948 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3950 emit_insn (gen_rtx_SET (VOIDmode, result,
3951 GEN_INT (INTVAL (source)
3952 & (~ (HOST_WIDE_INT) 0xffff))));
3953 emit_insn (gen_rtx_SET (VOIDmode, dest,
3954 gen_rtx_IOR (SImode, result,
3955 GEN_INT (INTVAL (source) & 0xffff))));
3958 else if (mode == DImode)
3960 if (GET_CODE (source) == CONST_INT)
3962 c0 = INTVAL (source);
3965 else if (GET_CODE (source) == CONST_DOUBLE)
3967 #if HOST_BITS_PER_WIDE_INT >= 64
3968 c0 = CONST_DOUBLE_LOW (source);
3971 c0 = CONST_DOUBLE_LOW (source);
3972 c1 = CONST_DOUBLE_HIGH (source);
3978 result = rs6000_emit_set_long_const (dest, c0, c1);
3983 insn = get_last_insn ();
3984 set = single_set (insn);
3985 if (! CONSTANT_P (SET_SRC (set)))
3986 set_unique_reg_note (insn, REG_EQUAL, source);
3991 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3992 fall back to a straight forward decomposition. We do this to avoid
3993 exponential run times encountered when looking for longer sequences
3994 with rs6000_emit_set_const. */
3996 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3998 if (!TARGET_POWERPC64)
4000 rtx operand1, operand2;
4002 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4004 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
4006 emit_move_insn (operand1, GEN_INT (c1));
4007 emit_move_insn (operand2, GEN_INT (c2));
4011 HOST_WIDE_INT ud1, ud2, ud3, ud4;
4014 ud2 = (c1 & 0xffff0000) >> 16;
4015 #if HOST_BITS_PER_WIDE_INT >= 64
4019 ud4 = (c2 & 0xffff0000) >> 16;
4021 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4022 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4025 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4027 emit_move_insn (dest, GEN_INT (ud1));
4030 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4031 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4034 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4037 emit_move_insn (dest, GEN_INT (ud2 << 16));
4039 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4041 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4042 || (ud4 == 0 && ! (ud3 & 0x8000)))
4045 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4048 emit_move_insn (dest, GEN_INT (ud3 << 16));
4051 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
4052 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
4054 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4059 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4062 emit_move_insn (dest, GEN_INT (ud4 << 16));
4065 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
4067 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
4069 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
4070 GEN_INT (ud2 << 16)));
4072 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4078 /* Helper for the following. Get rid of [r+r] memory refs
4079 in cases where it won't work (TImode, TFmode). */
4082 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4084 if (GET_CODE (operands[0]) == MEM
4085 && GET_CODE (XEXP (operands[0], 0)) != REG
4086 && ! reload_in_progress)
4088 = replace_equiv_address (operands[0],
4089 copy_addr_to_reg (XEXP (operands[0], 0)));
4091 if (GET_CODE (operands[1]) == MEM
4092 && GET_CODE (XEXP (operands[1], 0)) != REG
4093 && ! reload_in_progress)
4095 = replace_equiv_address (operands[1],
4096 copy_addr_to_reg (XEXP (operands[1], 0)));
4099 /* Emit a move from SOURCE to DEST in mode MODE. */
4101 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4105 operands[1] = source;
4107 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4108 if (GET_CODE (operands[1]) == CONST_DOUBLE
4109 && ! FLOAT_MODE_P (mode)
4110 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4112 /* FIXME. This should never happen. */
4113 /* Since it seems that it does, do the safe thing and convert
4115 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4117 if (GET_CODE (operands[1]) == CONST_DOUBLE
4118 && ! FLOAT_MODE_P (mode)
4119 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
4120 && CONST_DOUBLE_LOW (operands[1]) >= 0)
4121 || (CONST_DOUBLE_HIGH (operands[1]) == -1
4122 && CONST_DOUBLE_LOW (operands[1]) < 0)))
4125 /* Check if GCC is setting up a block move that will end up using FP
4126 registers as temporaries. We must make sure this is acceptable. */
4127 if (GET_CODE (operands[0]) == MEM
4128 && GET_CODE (operands[1]) == MEM
4130 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4131 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4132 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4133 ? 32 : MEM_ALIGN (operands[0])))
4134 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4136 : MEM_ALIGN (operands[1]))))
4137 && ! MEM_VOLATILE_P (operands [0])
4138 && ! MEM_VOLATILE_P (operands [1]))
4140 emit_move_insn (adjust_address (operands[0], SImode, 0),
4141 adjust_address (operands[1], SImode, 0));
4142 emit_move_insn (adjust_address (operands[0], SImode, 4),
4143 adjust_address (operands[1], SImode, 4));
4147 if (!no_new_pseudos)
4149 if (GET_CODE (operands[1]) == MEM && optimize > 0
4150 && (mode == QImode || mode == HImode || mode == SImode)
4151 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
4153 rtx reg = gen_reg_rtx (word_mode);
4155 emit_insn (gen_rtx_SET (word_mode, reg,
4156 gen_rtx_ZERO_EXTEND (word_mode,
4158 operands[1] = gen_lowpart (mode, reg);
4160 if (GET_CODE (operands[0]) != REG)
4161 operands[1] = force_reg (mode, operands[1]);
4164 if (mode == SFmode && ! TARGET_POWERPC
4165 && TARGET_HARD_FLOAT && TARGET_FPRS
4166 && GET_CODE (operands[0]) == MEM)
4170 if (reload_in_progress || reload_completed)
4171 regnum = true_regnum (operands[1]);
4172 else if (GET_CODE (operands[1]) == REG)
4173 regnum = REGNO (operands[1]);
4177 /* If operands[1] is a register, on POWER it may have
4178 double-precision data in it, so truncate it to single
4180 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4183 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4184 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4185 operands[1] = newreg;
4189 /* Recognize the case where operand[1] is a reference to thread-local
4190 data and load its address to a register. */
4191 if (GET_CODE (operands[1]) == SYMBOL_REF)
4193 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4195 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
4198 /* Handle the case where reload calls us with an invalid address. */
4199 if (reload_in_progress && mode == Pmode
4200 && (! general_operand (operands[1], mode)
4201 || ! nonimmediate_operand (operands[0], mode)))
4204 /* 128-bit constant floating-point values on Darwin should really be
4205 loaded as two parts. */
4206 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
4207 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4208 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4210 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4211 know how to get a DFmode SUBREG of a TFmode. */
4212 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4213 simplify_gen_subreg (DImode, operands[1], mode, 0),
4215 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4216 GET_MODE_SIZE (DImode)),
4217 simplify_gen_subreg (DImode, operands[1], mode,
4218 GET_MODE_SIZE (DImode)),
4223 /* FIXME: In the long term, this switch statement should go away
4224 and be replaced by a sequence of tests based on things like
4230 if (CONSTANT_P (operands[1])
4231 && GET_CODE (operands[1]) != CONST_INT)
4232 operands[1] = force_const_mem (mode, operands[1]);
4236 rs6000_eliminate_indexed_memrefs (operands);
4241 if (CONSTANT_P (operands[1])
4242 && ! easy_fp_constant (operands[1], mode))
4243 operands[1] = force_const_mem (mode, operands[1]);
4254 if (CONSTANT_P (operands[1])
4255 && !easy_vector_constant (operands[1], mode))
4256 operands[1] = force_const_mem (mode, operands[1]);
4261 /* Use default pattern for address of ELF small data */
4264 && DEFAULT_ABI == ABI_V4
4265 && (GET_CODE (operands[1]) == SYMBOL_REF
4266 || GET_CODE (operands[1]) == CONST)
4267 && small_data_operand (operands[1], mode))
4269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4273 if (DEFAULT_ABI == ABI_V4
4274 && mode == Pmode && mode == SImode
4275 && flag_pic == 1 && got_operand (operands[1], mode))
4277 emit_insn (gen_movsi_got (operands[0], operands[1]));
4281 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4285 && CONSTANT_P (operands[1])
4286 && GET_CODE (operands[1]) != HIGH
4287 && GET_CODE (operands[1]) != CONST_INT)
4289 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4291 /* If this is a function address on -mcall-aixdesc,
4292 convert it to the address of the descriptor. */
4293 if (DEFAULT_ABI == ABI_AIX
4294 && GET_CODE (operands[1]) == SYMBOL_REF
4295 && XSTR (operands[1], 0)[0] == '.')
4297 const char *name = XSTR (operands[1], 0);
4299 while (*name == '.')
4301 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4302 CONSTANT_POOL_ADDRESS_P (new_ref)
4303 = CONSTANT_POOL_ADDRESS_P (operands[1]);
4304 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4305 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4306 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
4307 operands[1] = new_ref;
4310 if (DEFAULT_ABI == ABI_DARWIN)
4313 if (MACHO_DYNAMIC_NO_PIC_P)
4315 /* Take care of any required data indirection. */
4316 operands[1] = rs6000_machopic_legitimize_pic_address (
4317 operands[1], mode, operands[0]);
4318 if (operands[0] != operands[1])
4319 emit_insn (gen_rtx_SET (VOIDmode,
4320 operands[0], operands[1]));
4326 emit_insn (gen_macho_high_di (target, operands[1]));
4327 emit_insn (gen_macho_low_di (operands[0], target, operands[1]));
4331 emit_insn (gen_macho_high (target, operands[1]));
4332 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4337 emit_insn (gen_elf_high (target, operands[1]));
4338 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4342 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4343 and we have put it in the TOC, we just need to make a TOC-relative
4346 && GET_CODE (operands[1]) == SYMBOL_REF
4347 && constant_pool_expr_p (operands[1])
4348 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4349 get_pool_mode (operands[1])))
4351 operands[1] = create_TOC_reference (operands[1]);
4353 else if (mode == Pmode
4354 && CONSTANT_P (operands[1])
4355 && ((GET_CODE (operands[1]) != CONST_INT
4356 && ! easy_fp_constant (operands[1], mode))
4357 || (GET_CODE (operands[1]) == CONST_INT
4358 && num_insns_constant (operands[1], mode) > 2)
4359 || (GET_CODE (operands[0]) == REG
4360 && FP_REGNO_P (REGNO (operands[0]))))
4361 && GET_CODE (operands[1]) != HIGH
4362 && ! legitimate_constant_pool_address_p (operands[1])
4363 && ! toc_relative_expr_p (operands[1]))
4365 /* Emit a USE operation so that the constant isn't deleted if
4366 expensive optimizations are turned on because nobody
4367 references it. This should only be done for operands that
4368 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4369 This should not be done for operands that contain LABEL_REFs.
4370 For now, we just handle the obvious case. */
4371 if (GET_CODE (operands[1]) != LABEL_REF)
4372 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4375 /* Darwin uses a special PIC legitimizer. */
4376 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4379 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4381 if (operands[0] != operands[1])
4382 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4387 /* If we are to limit the number of things we put in the TOC and
4388 this is a symbol plus a constant we can add in one insn,
4389 just put the symbol in the TOC and add the constant. Don't do
4390 this if reload is in progress. */
4391 if (GET_CODE (operands[1]) == CONST
4392 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4393 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4394 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4395 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4396 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4397 && ! side_effects_p (operands[0]))
4400 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4401 rtx other = XEXP (XEXP (operands[1], 0), 1);
4403 sym = force_reg (mode, sym);
4405 emit_insn (gen_addsi3 (operands[0], sym, other));
4407 emit_insn (gen_adddi3 (operands[0], sym, other));
4411 operands[1] = force_const_mem (mode, operands[1]);
4414 && constant_pool_expr_p (XEXP (operands[1], 0))
4415 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4416 get_pool_constant (XEXP (operands[1], 0)),
4417 get_pool_mode (XEXP (operands[1], 0))))
4420 = gen_const_mem (mode,
4421 create_TOC_reference (XEXP (operands[1], 0)));
4422 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4428 rs6000_eliminate_indexed_memrefs (operands);
4432 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4434 gen_rtx_SET (VOIDmode,
4435 operands[0], operands[1]),
4436 gen_rtx_CLOBBER (VOIDmode,
4437 gen_rtx_SCRATCH (SImode)))));
4446 /* Above, we may have called force_const_mem which may have returned
4447 an invalid address. If we can, fix this up; otherwise, reload will
4448 have to deal with it. */
4449 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4450 operands[1] = validize_mem (operands[1]);
4453 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4456 /* Nonzero if we can use a floating-point register to pass this arg. */
4457 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4458 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4459 && (CUM)->fregno <= FP_ARG_MAX_REG \
4460 && TARGET_HARD_FLOAT && TARGET_FPRS)
4462 /* Nonzero if we can use an AltiVec register to pass this arg. */
4463 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4464 (ALTIVEC_VECTOR_MODE (MODE) \
4465 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4466 && TARGET_ALTIVEC_ABI \
4469 /* Return a nonzero value to say to return the function value in
4470 memory, just as large structures are always returned. TYPE will be
4471 the data type of the value, and FNTYPE will be the type of the
4472 function doing the returning, or @code{NULL} for libcalls.
4474 The AIX ABI for the RS/6000 specifies that all structures are
4475 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4476 specifies that structures <= 8 bytes are returned in r3/r4, but a
4477 draft put them in memory, and GCC used to implement the draft
4478 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4479 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4480 compatibility can change DRAFT_V4_STRUCT_RET to override the
4481 default, and -m switches get the final word. See
4482 rs6000_override_options for more details.
4484 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4485 long double support is enabled. These values are returned in memory.
4487 int_size_in_bytes returns -1 for variable size objects, which go in
4488 memory always. The cast to unsigned makes -1 > 8. */
4491 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4493 if (AGGREGATE_TYPE_P (type)
4494 && (TARGET_AIX_STRUCT_RET
4495 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4497 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4502 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4503 for a call to a function whose data type is FNTYPE.
4504 For a library call, FNTYPE is 0.
4506 For incoming args we set the number of arguments in the prototype large
4507 so we never return a PARALLEL. */
4510 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4511 rtx libname ATTRIBUTE_UNUSED, int incoming,
4512 int libcall, int n_named_args)
4514 static CUMULATIVE_ARGS zero_cumulative;
4516 *cum = zero_cumulative;
4518 cum->fregno = FP_ARG_MIN_REG;
4519 cum->vregno = ALTIVEC_ARG_MIN_REG;
4520 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4521 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4522 ? CALL_LIBCALL : CALL_NORMAL);
4523 cum->sysv_gregno = GP_ARG_MIN_REG;
4524 cum->stdarg = fntype
4525 && (TYPE_ARG_TYPES (fntype) != 0
4526 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4527 != void_type_node));
4529 cum->nargs_prototype = 0;
4530 if (incoming || cum->prototype)
4531 cum->nargs_prototype = n_named_args;
4533 /* Check for a longcall attribute. */
4535 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4536 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4537 cum->call_cookie = CALL_LONG;
4539 if (TARGET_DEBUG_ARG)
4541 fprintf (stderr, "\ninit_cumulative_args:");
4544 tree ret_type = TREE_TYPE (fntype);
4545 fprintf (stderr, " ret code = %s,",
4546 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4549 if (cum->call_cookie & CALL_LONG)
4550 fprintf (stderr, " longcall,");
4552 fprintf (stderr, " proto = %d, nargs = %d\n",
4553 cum->prototype, cum->nargs_prototype);
4558 && TARGET_ALTIVEC_ABI
4559 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4561 error ("Cannot return value in vector register because"
4562 " altivec instructions are disabled, use -maltivec"
4563 " to enable them.");
4567 /* Return true if TYPE must be passed on the stack and not in registers. */
4570 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4572 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4573 return must_pass_in_stack_var_size (mode, type);
4575 return must_pass_in_stack_var_size_or_pad (mode, type);
4578 /* If defined, a C expression which determines whether, and in which
4579 direction, to pad out an argument with extra space. The value
4580 should be of type `enum direction': either `upward' to pad above
4581 the argument, `downward' to pad below, or `none' to inhibit
4584 For the AIX ABI structs are always stored left shifted in their
4588 function_arg_padding (enum machine_mode mode, tree type)
4590 #ifndef AGGREGATE_PADDING_FIXED
4591 #define AGGREGATE_PADDING_FIXED 0
4593 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4594 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4597 if (!AGGREGATE_PADDING_FIXED)
4599 /* GCC used to pass structures of the same size as integer types as
4600 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4601 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4602 passed padded downward, except that -mstrict-align further
4603 muddied the water in that multi-component structures of 2 and 4
4604 bytes in size were passed padded upward.
4606 The following arranges for best compatibility with previous
4607 versions of gcc, but removes the -mstrict-align dependency. */
4608 if (BYTES_BIG_ENDIAN)
4610 HOST_WIDE_INT size = 0;
4612 if (mode == BLKmode)
4614 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4615 size = int_size_in_bytes (type);
4618 size = GET_MODE_SIZE (mode);
4620 if (size == 1 || size == 2 || size == 4)
4626 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4628 if (type != 0 && AGGREGATE_TYPE_P (type))
4632 /* Fall back to the default. */
4633 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4636 /* If defined, a C expression that gives the alignment boundary, in bits,
4637 of an argument with the specified mode and type. If it is not defined,
4638 PARM_BOUNDARY is used for all arguments.
4640 V.4 wants long longs to be double word aligned. */
4643 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4645 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4647 else if (SPE_VECTOR_MODE (mode))
4649 else if (ALTIVEC_VECTOR_MODE (mode))
4652 return PARM_BOUNDARY;
4655 /* Compute the size (in words) of a function argument. */
4657 static unsigned long
4658 rs6000_arg_size (enum machine_mode mode, tree type)
4662 if (mode != BLKmode)
4663 size = GET_MODE_SIZE (mode);
4665 size = int_size_in_bytes (type);
4668 return (size + 3) >> 2;
4670 return (size + 7) >> 3;
4673 /* Update the data in CUM to advance over an argument
4674 of mode MODE and data type TYPE.
4675 (TYPE is null for libcalls where that information may not be available.)
4677 Note that for args passed by reference, function_arg will be called
4678 with MODE and TYPE set to that of the pointer to the arg, not the arg
4682 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4683 tree type, int named)
4685 cum->nargs_prototype--;
4687 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4691 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4694 if (!TARGET_ALTIVEC)
4695 error ("Cannot pass argument in vector register because"
4696 " altivec instructions are disabled, use -maltivec"
4697 " to enable them.");
4699 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4700 even if it is going to be passed in a vector register.
4701 Darwin does the same for variable-argument functions. */
4702 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4703 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4713 /* Vector parameters must be 16-byte aligned. This places
4714 them at 2 mod 4 in terms of words in 32-bit mode, since
4715 the parameter save area starts at offset 24 from the
4716 stack. In 64-bit mode, they just have to start on an
4717 even word, since the parameter save area is 16-byte
4718 aligned. Space for GPRs is reserved even if the argument
4719 will be passed in memory. */
4721 align = (2 - cum->words) & 3;
4723 align = cum->words & 1;
4724 cum->words += align + rs6000_arg_size (mode, type);
4726 if (TARGET_DEBUG_ARG)
4728 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4730 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4731 cum->nargs_prototype, cum->prototype,
4732 GET_MODE_NAME (mode));
4736 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4738 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4740 else if (DEFAULT_ABI == ABI_V4)
4742 if (TARGET_HARD_FLOAT && TARGET_FPRS
4743 && (mode == SFmode || mode == DFmode))
4745 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4750 cum->words += cum->words & 1;
4751 cum->words += rs6000_arg_size (mode, type);
4756 int n_words = rs6000_arg_size (mode, type);
4757 int gregno = cum->sysv_gregno;
4759 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4760 (r7,r8) or (r9,r10). As does any other 2 word item such
4761 as complex int due to a historical mistake. */
4763 gregno += (1 - gregno) & 1;
4765 /* Multi-reg args are not split between registers and stack. */
4766 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4768 /* Long long and SPE vectors are aligned on the stack.
4769 So are other 2 word items such as complex int due to
4770 a historical mistake. */
4772 cum->words += cum->words & 1;
4773 cum->words += n_words;
4776 /* Note: continuing to accumulate gregno past when we've started
4777 spilling to the stack indicates the fact that we've started
4778 spilling to the stack to expand_builtin_saveregs. */
4779 cum->sysv_gregno = gregno + n_words;
4782 if (TARGET_DEBUG_ARG)
4784 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4785 cum->words, cum->fregno);
4786 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4787 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4788 fprintf (stderr, "mode = %4s, named = %d\n",
4789 GET_MODE_NAME (mode), named);
4794 int n_words = rs6000_arg_size (mode, type);
4795 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4797 /* The simple alignment calculation here works because
4798 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4799 If we ever want to handle alignments larger than 8 bytes for
4800 32-bit or 16 bytes for 64-bit, then we'll need to take into
4801 account the offset to the start of the parm save area. */
4802 align &= cum->words;
4803 cum->words += align + n_words;
4805 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4806 && TARGET_HARD_FLOAT && TARGET_FPRS)
4807 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4809 if (TARGET_DEBUG_ARG)
4811 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4812 cum->words, cum->fregno);
4813 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4814 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4815 fprintf (stderr, "named = %d, align = %d\n", named, align);
4820 /* Determine where to put a SIMD argument on the SPE. */
4823 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4828 int gregno = cum->sysv_gregno;
4829 int n_words = rs6000_arg_size (mode, type);
4831 /* SPE vectors are put in odd registers. */
4832 if (n_words == 2 && (gregno & 1) == 0)
4835 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4838 enum machine_mode m = SImode;
4840 r1 = gen_rtx_REG (m, gregno);
4841 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4842 r2 = gen_rtx_REG (m, gregno + 1);
4843 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4844 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4851 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4852 return gen_rtx_REG (mode, cum->sysv_gregno);
4858 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4861 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4865 rtx rvec[GP_ARG_NUM_REG + 1];
4867 if (align_words >= GP_ARG_NUM_REG)
4870 n_units = rs6000_arg_size (mode, type);
4872 /* Optimize the simple case where the arg fits in one gpr, except in
4873 the case of BLKmode due to assign_parms assuming that registers are
4874 BITS_PER_WORD wide. */
4876 || (n_units == 1 && mode != BLKmode))
4877 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4880 if (align_words + n_units > GP_ARG_NUM_REG)
4881 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4882 using a magic NULL_RTX component.
4883 FIXME: This is not strictly correct. Only some of the arg
4884 belongs in memory, not all of it. However, there isn't any way
4885 to do this currently, apart from building rtx descriptions for
4886 the pieces of memory we want stored. Due to bugs in the generic
4887 code we can't use the normal function_arg_partial_nregs scheme
4888 with the PARALLEL arg description we emit here.
4889 In any case, the code to store the whole arg to memory is often
4890 more efficient than code to store pieces, and we know that space
4891 is available in the right place for the whole arg. */
4892 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4897 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4898 rtx off = GEN_INT (i++ * 4);
4899 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4901 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4903 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4906 /* Determine where to put an argument to a function.
4907 Value is zero to push the argument on the stack,
4908 or a hard register in which to store the argument.
4910 MODE is the argument's machine mode.
4911 TYPE is the data type of the argument (as a tree).
4912 This is null for libcalls where that information may
4914 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4915 the preceding args and about the function being called.
4916 NAMED is nonzero if this argument is a named parameter
4917 (otherwise it is an extra parameter matching an ellipsis).
4919 On RS/6000 the first eight words of non-FP are normally in registers
4920 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4921 Under V.4, the first 8 FP args are in registers.
4923 If this is floating-point and no prototype is specified, we use
4924 both an FP and integer register (or possibly FP reg and stack). Library
4925 functions (when CALL_LIBCALL is set) always have the proper types for args,
4926 so we can pass the FP value just in one register. emit_library_function
4927 doesn't support PARALLEL anyway.
4929 Note that for args passed by reference, function_arg will be called
4930 with MODE and TYPE set to that of the pointer to the arg, not the arg
4934 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4935 tree type, int named)
4937 enum rs6000_abi abi = DEFAULT_ABI;
4939 /* Return a marker to indicate whether CR1 needs to set or clear the
4940 bit that V.4 uses to say fp args were passed in registers.
4941 Assume that we don't need the marker for software floating point,
4942 or compiler generated library calls. */
4943 if (mode == VOIDmode)
4946 && cum->nargs_prototype < 0
4947 && (cum->call_cookie & CALL_LIBCALL) == 0
4948 && (cum->prototype || TARGET_NO_PROTOTYPE))
4950 /* For the SPE, we need to crxor CR6 always. */
4952 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4953 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4954 return GEN_INT (cum->call_cookie
4955 | ((cum->fregno == FP_ARG_MIN_REG)
4956 ? CALL_V4_SET_FP_ARGS
4957 : CALL_V4_CLEAR_FP_ARGS));
4960 return GEN_INT (cum->call_cookie);
4963 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4964 if (TARGET_64BIT && ! cum->prototype)
4966 /* Vector parameters get passed in vector register
4967 and also in GPRs or memory, in absence of prototype. */
4970 align_words = (cum->words + 1) & ~1;
4972 if (align_words >= GP_ARG_NUM_REG)
4978 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4980 return gen_rtx_PARALLEL (mode,
4982 gen_rtx_EXPR_LIST (VOIDmode,
4984 gen_rtx_EXPR_LIST (VOIDmode,
4985 gen_rtx_REG (mode, cum->vregno),
4989 return gen_rtx_REG (mode, cum->vregno);
4990 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4992 if (named || abi == ABI_V4)
4996 /* Vector parameters to varargs functions under AIX or Darwin
4997 get passed in memory and possibly also in GPRs. */
4998 int align, align_words, n_words;
4999 enum machine_mode part_mode;
5001 /* Vector parameters must be 16-byte aligned. This places them at
5002 2 mod 4 in terms of words in 32-bit mode, since the parameter
5003 save area starts at offset 24 from the stack. In 64-bit mode,
5004 they just have to start on an even word, since the parameter
5005 save area is 16-byte aligned. */
5007 align = (2 - cum->words) & 3;
5009 align = cum->words & 1;
5010 align_words = cum->words + align;
5012 /* Out of registers? Memory, then. */
5013 if (align_words >= GP_ARG_NUM_REG)
5016 if (TARGET_32BIT && TARGET_POWERPC64)
5017 return rs6000_mixed_function_arg (mode, type, align_words);
5019 /* The vector value goes in GPRs. Only the part of the
5020 value in GPRs is reported here. */
5022 n_words = rs6000_arg_size (mode, type);
5023 if (align_words + n_words > GP_ARG_NUM_REG)
5024 /* Fortunately, there are only two possibilities, the value
5025 is either wholly in GPRs or half in GPRs and half not. */
5028 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5031 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
5032 return rs6000_spe_function_arg (cum, mode, type);
5033 else if (abi == ABI_V4)
5035 if (TARGET_HARD_FLOAT && TARGET_FPRS
5036 && (mode == SFmode || mode == DFmode))
5038 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5039 return gen_rtx_REG (mode, cum->fregno);
5045 int n_words = rs6000_arg_size (mode, type);
5046 int gregno = cum->sysv_gregno;
5048 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5049 (r7,r8) or (r9,r10). As does any other 2 word item such
5050 as complex int due to a historical mistake. */
5052 gregno += (1 - gregno) & 1;
5054 /* Multi-reg args are not split between registers and stack. */
5055 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5058 if (TARGET_32BIT && TARGET_POWERPC64)
5059 return rs6000_mixed_function_arg (mode, type,
5060 gregno - GP_ARG_MIN_REG);
5061 return gen_rtx_REG (mode, gregno);
5066 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5067 int align_words = cum->words + (cum->words & align);
5069 if (USE_FP_FOR_ARG_P (cum, mode, type))
5071 rtx rvec[GP_ARG_NUM_REG + 1];
5075 enum machine_mode fmode = mode;
5076 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5078 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5080 /* Currently, we only ever need one reg here because complex
5081 doubles are split. */
5082 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
5085 /* Long double split over regs and memory. */
5089 /* Do we also need to pass this arg in the parameter save
5092 && (cum->nargs_prototype <= 0
5093 || (DEFAULT_ABI == ABI_AIX
5095 && align_words >= GP_ARG_NUM_REG)));
5097 if (!needs_psave && mode == fmode)
5098 return gen_rtx_REG (fmode, cum->fregno);
5103 /* Describe the part that goes in gprs or the stack.
5104 This piece must come first, before the fprs. */
5105 if (align_words < GP_ARG_NUM_REG)
5107 unsigned long n_words = rs6000_arg_size (mode, type);
5109 if (align_words + n_words > GP_ARG_NUM_REG
5110 || (TARGET_32BIT && TARGET_POWERPC64))
5112 /* If this is partially on the stack, then we only
5113 include the portion actually in registers here. */
5114 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5118 r = gen_rtx_REG (rmode,
5119 GP_ARG_MIN_REG + align_words);
5120 off = GEN_INT (k * GET_MODE_SIZE (rmode));
5121 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5123 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5127 /* The whole arg fits in gprs. */
5128 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5129 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5133 /* It's entirely in memory. */
5134 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5137 /* Describe where this piece goes in the fprs. */
5138 r = gen_rtx_REG (fmode, cum->fregno);
5139 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5141 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5143 else if (align_words < GP_ARG_NUM_REG)
5145 if (TARGET_32BIT && TARGET_POWERPC64)
5146 return rs6000_mixed_function_arg (mode, type, align_words);
5148 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5155 /* For an arg passed partly in registers and partly in memory, this is
5156 the number of registers used. For args passed entirely in registers
5157 or entirely in memory, zero. When an arg is described by a PARALLEL,
5158 perhaps using more than one register type, this function returns the
5159 number of registers used by the first element of the PARALLEL. */
5162 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5163 tree type, int named)
5170 if (DEFAULT_ABI == ABI_V4)
5173 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5174 && cum->nargs_prototype >= 0)
5177 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5178 parm_offset = TARGET_32BIT ? 2 : 0;
5179 align_words = cum->words + ((parm_offset - cum->words) & align);
5181 if (USE_FP_FOR_ARG_P (cum, mode, type)
5182 /* If we are passing this arg in gprs as well, then this function
5183 should return the number of gprs (or memory) partially passed,
5184 *not* the number of fprs. */
5186 && (cum->nargs_prototype <= 0
5187 || (DEFAULT_ABI == ABI_AIX
5189 && align_words >= GP_ARG_NUM_REG))))
5191 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5192 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
5193 else if (cum->nargs_prototype >= 0)
5197 if (align_words < GP_ARG_NUM_REG
5198 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5199 ret = GP_ARG_NUM_REG - align_words;
5201 if (ret != 0 && TARGET_DEBUG_ARG)
5202 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
5207 /* A C expression that indicates when an argument must be passed by
5208 reference. If nonzero for an argument, a copy of that argument is
5209 made in memory and a pointer to the argument is passed instead of
5210 the argument itself. The pointer is passed in whatever way is
5211 appropriate for passing a pointer to that type.
5213 Under V.4, aggregates and long double are passed by reference.
5215 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5216 reference unless the AltiVec vector extension ABI is in force.
5218 As an extension to all ABIs, variable sized types are passed by
5222 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5223 enum machine_mode mode ATTRIBUTE_UNUSED,
5224 tree type, bool named ATTRIBUTE_UNUSED)
5226 if ((DEFAULT_ABI == ABI_V4
5227 && ((type && AGGREGATE_TYPE_P (type))
5229 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5230 || (type && int_size_in_bytes (type) < 0))
5232 if (TARGET_DEBUG_ARG)
5233 fprintf (stderr, "function_arg_pass_by_reference\n");
5241 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5244 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5249 for (i = 0; i < nregs; i++)
5251 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
5252 if (reload_completed)
5254 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5257 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5258 i * GET_MODE_SIZE(reg_mode));
5261 tem = replace_equiv_address (tem, XEXP (tem, 0));
5263 if (tem == NULL_RTX)
5266 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5271 /* Perform any needed actions needed for a function that is receiving a
5272 variable number of arguments.
5276 MODE and TYPE are the mode and type of the current parameter.
5278 PRETEND_SIZE is a variable that should be set to the amount of stack
5279 that must be pushed by the prolog to pretend that our caller pushed
5282 Normally, this macro will push all remaining incoming registers on the
5283 stack and set PRETEND_SIZE to the length of the registers pushed. */
5286 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5287 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
5289 CUMULATIVE_ARGS next_cum;
5290 int reg_size = TARGET_32BIT ? 4 : 8;
5291 rtx save_area = NULL_RTX, mem;
5292 int first_reg_offset, set;
5294 /* Skip the last named argument. */
5296 function_arg_advance (&next_cum, mode, type, 1);
5298 if (DEFAULT_ABI == ABI_V4)
5300 /* Indicate to allocate space on the stack for varargs save area. */
5301 cfun->machine->sysv_varargs_p = 1;
5303 save_area = plus_constant (virtual_stack_vars_rtx,
5304 - RS6000_VARARGS_SIZE);
5306 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5310 first_reg_offset = next_cum.words;
5311 save_area = virtual_incoming_args_rtx;
5312 cfun->machine->sysv_varargs_p = 0;
5314 if (targetm.calls.must_pass_in_stack (mode, type))
5315 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5318 set = get_varargs_alias_set ();
5319 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
5321 mem = gen_rtx_MEM (BLKmode,
5322 plus_constant (save_area,
5323 first_reg_offset * reg_size)),
5324 set_mem_alias_set (mem, set);
5325 set_mem_align (mem, BITS_PER_WORD);
5327 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5328 GP_ARG_NUM_REG - first_reg_offset);
5331 /* Save FP registers if needed. */
5332 if (DEFAULT_ABI == ABI_V4
5333 && TARGET_HARD_FLOAT && TARGET_FPRS
5335 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
5337 int fregno = next_cum.fregno;
5338 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5339 rtx lab = gen_label_rtx ();
5340 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5342 emit_jump_insn (gen_rtx_SET (VOIDmode,
5344 gen_rtx_IF_THEN_ELSE (VOIDmode,
5345 gen_rtx_NE (VOIDmode, cr1,
5347 gen_rtx_LABEL_REF (VOIDmode, lab),
5350 while (fregno <= FP_ARG_V4_MAX_REG)
5352 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5353 set_mem_alias_set (mem, set);
5354 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5363 /* Create the va_list data type. */
5366 rs6000_build_builtin_va_list (void)
5368 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5370 /* For AIX, prefer 'char *' because that's what the system
5371 header files like. */
5372 if (DEFAULT_ABI != ABI_V4)
5373 return build_pointer_type (char_type_node);
5375 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5376 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5378 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5379 unsigned_char_type_node);
5380 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5381 unsigned_char_type_node);
5382 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5384 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5385 short_unsigned_type_node);
5386 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5388 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5391 DECL_FIELD_CONTEXT (f_gpr) = record;
5392 DECL_FIELD_CONTEXT (f_fpr) = record;
5393 DECL_FIELD_CONTEXT (f_res) = record;
5394 DECL_FIELD_CONTEXT (f_ovf) = record;
5395 DECL_FIELD_CONTEXT (f_sav) = record;
5397 TREE_CHAIN (record) = type_decl;
5398 TYPE_NAME (record) = type_decl;
5399 TYPE_FIELDS (record) = f_gpr;
5400 TREE_CHAIN (f_gpr) = f_fpr;
5401 TREE_CHAIN (f_fpr) = f_res;
5402 TREE_CHAIN (f_res) = f_ovf;
5403 TREE_CHAIN (f_ovf) = f_sav;
5405 layout_type (record);
5407 /* The correct type is an array type of one element. */
5408 return build_array_type (record, build_index_type (size_zero_node));
5411 /* Implement va_start. */
5414 rs6000_va_start (tree valist, rtx nextarg)
5416 HOST_WIDE_INT words, n_gpr, n_fpr;
5417 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5418 tree gpr, fpr, ovf, sav, t;
5420 /* Only SVR4 needs something special. */
5421 if (DEFAULT_ABI != ABI_V4)
5423 std_expand_builtin_va_start (valist, nextarg);
5427 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5428 f_fpr = TREE_CHAIN (f_gpr);
5429 f_res = TREE_CHAIN (f_fpr);
5430 f_ovf = TREE_CHAIN (f_res);
5431 f_sav = TREE_CHAIN (f_ovf);
5433 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5434 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5435 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5436 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5437 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5439 /* Count number of gp and fp argument registers used. */
5440 words = current_function_args_info.words;
5441 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5442 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5444 if (TARGET_DEBUG_ARG)
5445 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5446 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5447 words, n_gpr, n_fpr);
5449 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5450 build_int_cst (NULL_TREE, n_gpr, 0));
5451 TREE_SIDE_EFFECTS (t) = 1;
5452 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5454 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5455 build_int_cst (NULL_TREE, n_fpr, 0));
5456 TREE_SIDE_EFFECTS (t) = 1;
5457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5459 /* Find the overflow area. */
5460 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5462 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5463 build_int_cst (NULL_TREE, words * UNITS_PER_WORD, 0));
5464 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5465 TREE_SIDE_EFFECTS (t) = 1;
5466 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5468 /* Find the register save area. */
5469 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5470 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5471 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE, -1));
5472 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5473 TREE_SIDE_EFFECTS (t) = 1;
5474 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5477 /* Implement va_arg. */
5480 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5482 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5483 tree gpr, fpr, ovf, sav, reg, t, u;
5484 int size, rsize, n_reg, sav_ofs, sav_scale;
5485 tree lab_false, lab_over, addr;
5487 tree ptrtype = build_pointer_type (type);
5489 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5491 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5492 return build_fold_indirect_ref (t);
5495 if (DEFAULT_ABI != ABI_V4)
5497 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5499 tree elem_type = TREE_TYPE (type);
5500 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5501 int elem_size = GET_MODE_SIZE (elem_mode);
5503 if (elem_size < UNITS_PER_WORD)
5505 tree real_part, imag_part;
5506 tree post = NULL_TREE;
5508 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5510 /* Copy the value into a temporary, lest the formal temporary
5511 be reused out from under us. */
5512 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5513 append_to_statement_list (post, pre_p);
5515 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5518 return build (COMPLEX_EXPR, type, real_part, imag_part);
5522 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5525 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5526 f_fpr = TREE_CHAIN (f_gpr);
5527 f_res = TREE_CHAIN (f_fpr);
5528 f_ovf = TREE_CHAIN (f_res);
5529 f_sav = TREE_CHAIN (f_ovf);
5531 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5532 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5533 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5534 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5535 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5537 size = int_size_in_bytes (type);
5538 rsize = (size + 3) / 4;
5541 if (TARGET_HARD_FLOAT && TARGET_FPRS
5542 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5544 /* FP args go in FP registers, if present. */
5549 if (TYPE_MODE (type) == DFmode)
5554 /* Otherwise into GP registers. */
5563 /* Pull the value out of the saved registers.... */
5566 addr = create_tmp_var (ptr_type_node, "addr");
5567 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5569 /* AltiVec vectors never go in registers when -mabi=altivec. */
5570 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5574 lab_false = create_artificial_label ();
5575 lab_over = create_artificial_label ();
5577 /* Long long and SPE vectors are aligned in the registers.
5578 As are any other 2 gpr item such as complex int due to a
5579 historical mistake. */
5583 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5584 size_int (n_reg - 1));
5585 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5588 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5589 t = build2 (GE_EXPR, boolean_type_node, u, t);
5590 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5591 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5592 gimplify_and_add (t, pre_p);
5596 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5598 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5599 u = build1 (CONVERT_EXPR, integer_type_node, u);
5600 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5601 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5603 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5604 gimplify_and_add (t, pre_p);
5606 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5607 gimplify_and_add (t, pre_p);
5609 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5610 append_to_statement_list (t, pre_p);
5614 /* Ensure that we don't find any more args in regs.
5615 Alignment has taken care of the n_reg == 2 case. */
5616 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5617 gimplify_and_add (t, pre_p);
5621 /* ... otherwise out of the overflow area. */
5623 /* Care for on-stack alignment if needed. */
5627 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5628 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5629 build_int_cst (NULL_TREE, -align, -1));
5631 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5633 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5634 gimplify_and_add (u, pre_p);
5636 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5637 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5638 gimplify_and_add (t, pre_p);
5642 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5643 append_to_statement_list (t, pre_p);
5646 addr = fold_convert (ptrtype, addr);
5647 return build_fold_indirect_ref (addr);
5652 #define def_builtin(MASK, NAME, TYPE, CODE) \
5654 if ((MASK) & target_flags) \
5655 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5659 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5661 static const struct builtin_description bdesc_3arg[] =
5663 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5664 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5665 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5666 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5667 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5668 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5673 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5675 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5685 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5688 /* DST operations: void foo (void *, const int, const char). */
5690 static const struct builtin_description bdesc_dst[] =
5692 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5693 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5694 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5695 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5698 /* Simple binary operations: VECc = foo (VECa, VECb). */
5700 static struct builtin_description bdesc_2arg[] =
5702 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5703 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5704 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5705 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5706 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5707 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5708 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5709 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5710 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5711 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5712 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5713 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5714 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5715 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5716 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5717 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5718 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5719 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5720 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5721 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5722 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5723 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5724 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5725 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5726 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5727 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5728 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5729 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5730 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5731 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5732 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5733 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5734 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5735 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5736 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5737 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5738 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5739 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5740 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5741 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5742 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5743 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5744 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5745 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5746 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5747 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5748 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5749 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5750 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5751 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5752 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5753 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5754 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5755 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5756 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5757 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5758 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5759 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5760 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5761 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5762 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5763 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5764 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5765 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5766 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5767 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5768 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5769 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5770 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5771 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5772 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5773 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5774 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5775 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5776 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5777 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5778 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5779 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5780 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5781 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5782 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5783 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5784 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5785 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5786 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5787 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5788 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5789 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5790 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5791 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5792 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5793 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5794 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5795 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5796 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5797 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5798 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5799 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5800 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5801 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5802 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5803 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5804 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5805 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5806 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5807 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5808 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5809 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5810 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5811 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5812 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5813 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5814 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5816 /* Place holder, leave as first spe builtin. */
5817 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5818 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5819 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5820 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5821 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5822 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5823 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5824 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5825 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5826 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5827 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5828 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5829 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5830 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5831 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5832 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5833 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5834 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5835 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5836 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5837 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5838 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5839 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5840 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5841 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5842 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5843 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5844 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5845 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5846 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5847 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5848 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5849 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5850 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5851 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5852 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5853 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5854 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5855 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5856 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5857 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5858 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5859 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5860 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5861 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5862 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5863 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5864 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5865 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5866 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5867 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5868 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5869 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5870 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5871 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5872 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5873 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5874 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5875 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5876 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5877 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5878 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5879 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5880 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5881 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5882 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5883 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5884 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5885 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5886 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5887 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5888 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5889 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5890 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5891 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5892 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5893 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5894 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5895 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5896 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5897 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5898 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5899 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5900 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5901 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5902 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5903 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5904 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5905 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5906 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5907 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5908 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5909 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5910 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5911 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5912 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5913 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5914 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5915 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5916 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5917 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5918 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5919 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5920 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5921 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5922 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5923 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5924 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5925 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5927 /* SPE binary operations expecting a 5-bit unsigned literal. */
5928 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5930 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5931 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5932 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5933 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5934 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5935 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5936 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5937 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5938 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5939 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5940 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5941 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5942 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5943 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5944 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5945 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5946 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5947 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5948 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5949 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5950 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5951 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5952 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5953 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5954 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5955 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5957 /* Place-holder. Leave as last binary SPE builtin. */
5958 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5961 /* AltiVec predicates. */
5963 struct builtin_description_predicates
5965 const unsigned int mask;
5966 const enum insn_code icode;
5968 const char *const name;
5969 const enum rs6000_builtins code;
5972 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5974 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5975 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5976 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5977 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5982 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5983 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5984 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5985 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5986 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5989 /* SPE predicates. */
5990 static struct builtin_description bdesc_spe_predicates[] =
5992 /* Place-holder. Leave as first. */
5993 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5994 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5995 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5996 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5997 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5998 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5999 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6000 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6001 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6002 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6003 /* Place-holder. Leave as last. */
6004 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6007 /* SPE evsel predicates. */
6008 static struct builtin_description bdesc_spe_evsel[] =
6010 /* Place-holder. Leave as first. */
6011 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6012 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6013 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6014 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6015 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6016 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6017 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6018 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6019 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6020 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6021 /* Place-holder. Leave as last. */
6022 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6025 /* ABS* operations. */
6027 static const struct builtin_description bdesc_abs[] =
6029 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6030 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6031 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6032 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6033 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6034 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6035 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6038 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6041 static struct builtin_description bdesc_1arg[] =
6043 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6044 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6045 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6046 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6049 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6051 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6052 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6053 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6054 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6055 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6056 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6057 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6058 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6059 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6061 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6062 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6063 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6064 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6065 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6066 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6067 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6068 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6069 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6070 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6071 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6072 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6073 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6074 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6075 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6076 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6077 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6078 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6079 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6080 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6081 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6082 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6083 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6084 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6085 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6086 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6087 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6088 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6089 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6090 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6092 /* Place-holder. Leave as last unary SPE builtin. */
6093 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
6097 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6100 tree arg0 = TREE_VALUE (arglist);
6101 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6102 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6103 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6105 if (icode == CODE_FOR_nothing)
6106 /* Builtin not supported on this processor. */
6109 /* If we got invalid arguments bail out before generating bad rtl. */
6110 if (arg0 == error_mark_node)
6113 if (icode == CODE_FOR_altivec_vspltisb
6114 || icode == CODE_FOR_altivec_vspltish
6115 || icode == CODE_FOR_altivec_vspltisw
6116 || icode == CODE_FOR_spe_evsplatfi
6117 || icode == CODE_FOR_spe_evsplati)
6119 /* Only allow 5-bit *signed* literals. */
6120 if (GET_CODE (op0) != CONST_INT
6121 || INTVAL (op0) > 0x1f
6122 || INTVAL (op0) < -0x1f)
6124 error ("argument 1 must be a 5-bit signed literal");
6130 || GET_MODE (target) != tmode
6131 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6132 target = gen_reg_rtx (tmode);
6134 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6135 op0 = copy_to_mode_reg (mode0, op0);
6137 pat = GEN_FCN (icode) (target, op0);
6146 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6148 rtx pat, scratch1, scratch2;
6149 tree arg0 = TREE_VALUE (arglist);
6150 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6151 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6152 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6154 /* If we have invalid arguments, bail out before generating bad rtl. */
6155 if (arg0 == error_mark_node)
6159 || GET_MODE (target) != tmode
6160 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6161 target = gen_reg_rtx (tmode);
6163 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6164 op0 = copy_to_mode_reg (mode0, op0);
6166 scratch1 = gen_reg_rtx (mode0);
6167 scratch2 = gen_reg_rtx (mode0);
6169 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6178 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6181 tree arg0 = TREE_VALUE (arglist);
6182 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6183 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6184 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6185 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6186 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6187 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6189 if (icode == CODE_FOR_nothing)
6190 /* Builtin not supported on this processor. */
6193 /* If we got invalid arguments bail out before generating bad rtl. */
6194 if (arg0 == error_mark_node || arg1 == error_mark_node)
6197 if (icode == CODE_FOR_altivec_vcfux
6198 || icode == CODE_FOR_altivec_vcfsx
6199 || icode == CODE_FOR_altivec_vctsxs
6200 || icode == CODE_FOR_altivec_vctuxs
6201 || icode == CODE_FOR_altivec_vspltb
6202 || icode == CODE_FOR_altivec_vsplth
6203 || icode == CODE_FOR_altivec_vspltw
6204 || icode == CODE_FOR_spe_evaddiw
6205 || icode == CODE_FOR_spe_evldd
6206 || icode == CODE_FOR_spe_evldh
6207 || icode == CODE_FOR_spe_evldw
6208 || icode == CODE_FOR_spe_evlhhesplat
6209 || icode == CODE_FOR_spe_evlhhossplat
6210 || icode == CODE_FOR_spe_evlhhousplat
6211 || icode == CODE_FOR_spe_evlwhe
6212 || icode == CODE_FOR_spe_evlwhos
6213 || icode == CODE_FOR_spe_evlwhou
6214 || icode == CODE_FOR_spe_evlwhsplat
6215 || icode == CODE_FOR_spe_evlwwsplat
6216 || icode == CODE_FOR_spe_evrlwi
6217 || icode == CODE_FOR_spe_evslwi
6218 || icode == CODE_FOR_spe_evsrwis
6219 || icode == CODE_FOR_spe_evsubifw
6220 || icode == CODE_FOR_spe_evsrwiu)
6222 /* Only allow 5-bit unsigned literals. */
6224 if (TREE_CODE (arg1) != INTEGER_CST
6225 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6227 error ("argument 2 must be a 5-bit unsigned literal");
6233 || GET_MODE (target) != tmode
6234 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6235 target = gen_reg_rtx (tmode);
6237 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6238 op0 = copy_to_mode_reg (mode0, op0);
6239 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6240 op1 = copy_to_mode_reg (mode1, op1);
6242 pat = GEN_FCN (icode) (target, op0, op1);
6251 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6252 tree arglist, rtx target)
6255 tree cr6_form = TREE_VALUE (arglist);
6256 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6257 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6258 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6259 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6260 enum machine_mode tmode = SImode;
6261 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6262 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6265 if (TREE_CODE (cr6_form) != INTEGER_CST)
6267 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6271 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6276 /* If we have invalid arguments, bail out before generating bad rtl. */
6277 if (arg0 == error_mark_node || arg1 == error_mark_node)
6281 || GET_MODE (target) != tmode
6282 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6283 target = gen_reg_rtx (tmode);
6285 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6286 op0 = copy_to_mode_reg (mode0, op0);
6287 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6288 op1 = copy_to_mode_reg (mode1, op1);
6290 scratch = gen_reg_rtx (mode0);
6292 pat = GEN_FCN (icode) (scratch, op0, op1,
6293 gen_rtx_SYMBOL_REF (Pmode, opcode));
6298 /* The vec_any* and vec_all* predicates use the same opcodes for two
6299 different operations, but the bits in CR6 will be different
6300 depending on what information we want. So we have to play tricks
6301 with CR6 to get the right bits out.
6303 If you think this is disgusting, look at the specs for the
6304 AltiVec predicates. */
6306 switch (cr6_form_int)
6309 emit_insn (gen_cr6_test_for_zero (target));
6312 emit_insn (gen_cr6_test_for_zero_reverse (target));
6315 emit_insn (gen_cr6_test_for_lt (target));
6318 emit_insn (gen_cr6_test_for_lt_reverse (target));
6321 error ("argument 1 of __builtin_altivec_predicate is out of range");
6329 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6332 tree arg0 = TREE_VALUE (arglist);
6333 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6334 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6335 enum machine_mode mode0 = Pmode;
6336 enum machine_mode mode1 = Pmode;
6337 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6338 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6340 if (icode == CODE_FOR_nothing)
6341 /* Builtin not supported on this processor. */
6344 /* If we got invalid arguments bail out before generating bad rtl. */
6345 if (arg0 == error_mark_node || arg1 == error_mark_node)
6349 || GET_MODE (target) != tmode
6350 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6351 target = gen_reg_rtx (tmode);
6353 op1 = copy_to_mode_reg (mode1, op1);
6355 if (op0 == const0_rtx)
6357 addr = gen_rtx_MEM (tmode, op1);
6361 op0 = copy_to_mode_reg (mode0, op0);
6362 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6365 pat = GEN_FCN (icode) (target, addr);
6375 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6377 tree arg0 = TREE_VALUE (arglist);
6378 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6379 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6380 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6381 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6382 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6384 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6385 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6386 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6388 /* Invalid arguments. Bail before doing anything stoopid! */
6389 if (arg0 == error_mark_node
6390 || arg1 == error_mark_node
6391 || arg2 == error_mark_node)
6394 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6395 op0 = copy_to_mode_reg (mode2, op0);
6396 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6397 op1 = copy_to_mode_reg (mode0, op1);
6398 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6399 op2 = copy_to_mode_reg (mode1, op2);
6401 pat = GEN_FCN (icode) (op1, op2, op0);
6408 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6410 tree arg0 = TREE_VALUE (arglist);
6411 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6412 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6413 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6414 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6415 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6417 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6418 enum machine_mode mode1 = Pmode;
6419 enum machine_mode mode2 = Pmode;
6421 /* Invalid arguments. Bail before doing anything stoopid! */
6422 if (arg0 == error_mark_node
6423 || arg1 == error_mark_node
6424 || arg2 == error_mark_node)
6427 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6428 op0 = copy_to_mode_reg (tmode, op0);
6430 op2 = copy_to_mode_reg (mode2, op2);
6432 if (op1 == const0_rtx)
6434 addr = gen_rtx_MEM (tmode, op2);
6438 op1 = copy_to_mode_reg (mode1, op1);
6439 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6442 pat = GEN_FCN (icode) (addr, op0);
6449 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6452 tree arg0 = TREE_VALUE (arglist);
6453 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6454 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6455 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6456 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6457 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6458 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6459 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6460 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6461 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6463 if (icode == CODE_FOR_nothing)
6464 /* Builtin not supported on this processor. */
6467 /* If we got invalid arguments bail out before generating bad rtl. */
6468 if (arg0 == error_mark_node
6469 || arg1 == error_mark_node
6470 || arg2 == error_mark_node)
6473 if (icode == CODE_FOR_altivec_vsldoi_4sf
6474 || icode == CODE_FOR_altivec_vsldoi_4si
6475 || icode == CODE_FOR_altivec_vsldoi_8hi
6476 || icode == CODE_FOR_altivec_vsldoi_16qi)
6478 /* Only allow 4-bit unsigned literals. */
6480 if (TREE_CODE (arg2) != INTEGER_CST
6481 || TREE_INT_CST_LOW (arg2) & ~0xf)
6483 error ("argument 3 must be a 4-bit unsigned literal");
6489 || GET_MODE (target) != tmode
6490 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6491 target = gen_reg_rtx (tmode);
6493 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6494 op0 = copy_to_mode_reg (mode0, op0);
6495 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6496 op1 = copy_to_mode_reg (mode1, op1);
6497 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6498 op2 = copy_to_mode_reg (mode2, op2);
6500 pat = GEN_FCN (icode) (target, op0, op1, op2);
6508 /* Expand the lvx builtins. */
6510 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6512 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6513 tree arglist = TREE_OPERAND (exp, 1);
6514 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6516 enum machine_mode tmode, mode0;
6518 enum insn_code icode;
6522 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6523 icode = CODE_FOR_altivec_lvx_16qi;
6525 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6526 icode = CODE_FOR_altivec_lvx_8hi;
6528 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6529 icode = CODE_FOR_altivec_lvx_4si;
6531 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6532 icode = CODE_FOR_altivec_lvx_4sf;
6541 arg0 = TREE_VALUE (arglist);
6542 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6543 tmode = insn_data[icode].operand[0].mode;
6544 mode0 = insn_data[icode].operand[1].mode;
6547 || GET_MODE (target) != tmode
6548 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6549 target = gen_reg_rtx (tmode);
6551 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6552 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6554 pat = GEN_FCN (icode) (target, op0);
6561 /* Expand the stvx builtins. */
6563 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6566 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6567 tree arglist = TREE_OPERAND (exp, 1);
6568 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6570 enum machine_mode mode0, mode1;
6572 enum insn_code icode;
6576 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6577 icode = CODE_FOR_altivec_stvx_16qi;
6579 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6580 icode = CODE_FOR_altivec_stvx_8hi;
6582 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6583 icode = CODE_FOR_altivec_stvx_4si;
6585 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6586 icode = CODE_FOR_altivec_stvx_4sf;
6593 arg0 = TREE_VALUE (arglist);
6594 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6595 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6596 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6597 mode0 = insn_data[icode].operand[0].mode;
6598 mode1 = insn_data[icode].operand[1].mode;
6600 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6601 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6602 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6603 op1 = copy_to_mode_reg (mode1, op1);
6605 pat = GEN_FCN (icode) (op0, op1);
6613 /* Expand the dst builtins. */
6615 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6618 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6619 tree arglist = TREE_OPERAND (exp, 1);
6620 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6621 tree arg0, arg1, arg2;
6622 enum machine_mode mode0, mode1, mode2;
6623 rtx pat, op0, op1, op2;
6624 struct builtin_description *d;
6629 /* Handle DST variants. */
6630 d = (struct builtin_description *) bdesc_dst;
6631 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6632 if (d->code == fcode)
6634 arg0 = TREE_VALUE (arglist);
6635 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6636 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6637 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6638 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6639 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6640 mode0 = insn_data[d->icode].operand[0].mode;
6641 mode1 = insn_data[d->icode].operand[1].mode;
6642 mode2 = insn_data[d->icode].operand[2].mode;
6644 /* Invalid arguments, bail out before generating bad rtl. */
6645 if (arg0 == error_mark_node
6646 || arg1 == error_mark_node
6647 || arg2 == error_mark_node)
6652 if (TREE_CODE (arg2) != INTEGER_CST
6653 || TREE_INT_CST_LOW (arg2) & ~0x3)
6655 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6659 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6660 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6661 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6662 op1 = copy_to_mode_reg (mode1, op1);
6664 pat = GEN_FCN (d->icode) (op0, op1, op2);
6674 /* Expand the builtin in EXP and store the result in TARGET. Store
6675 true in *EXPANDEDP if we found a builtin to expand. */
6677 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6679 struct builtin_description *d;
6680 struct builtin_description_predicates *dp;
6682 enum insn_code icode;
6683 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6684 tree arglist = TREE_OPERAND (exp, 1);
6687 enum machine_mode tmode, mode0;
6688 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6690 target = altivec_expand_ld_builtin (exp, target, expandedp);
6694 target = altivec_expand_st_builtin (exp, target, expandedp);
6698 target = altivec_expand_dst_builtin (exp, target, expandedp);
6706 case ALTIVEC_BUILTIN_STVX:
6707 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6708 case ALTIVEC_BUILTIN_STVEBX:
6709 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6710 case ALTIVEC_BUILTIN_STVEHX:
6711 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6712 case ALTIVEC_BUILTIN_STVEWX:
6713 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6714 case ALTIVEC_BUILTIN_STVXL:
6715 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6717 case ALTIVEC_BUILTIN_MFVSCR:
6718 icode = CODE_FOR_altivec_mfvscr;
6719 tmode = insn_data[icode].operand[0].mode;
6722 || GET_MODE (target) != tmode
6723 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6724 target = gen_reg_rtx (tmode);
6726 pat = GEN_FCN (icode) (target);
6732 case ALTIVEC_BUILTIN_MTVSCR:
6733 icode = CODE_FOR_altivec_mtvscr;
6734 arg0 = TREE_VALUE (arglist);
6735 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6736 mode0 = insn_data[icode].operand[0].mode;
6738 /* If we got invalid arguments bail out before generating bad rtl. */
6739 if (arg0 == error_mark_node)
6742 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6743 op0 = copy_to_mode_reg (mode0, op0);
6745 pat = GEN_FCN (icode) (op0);
6750 case ALTIVEC_BUILTIN_DSSALL:
6751 emit_insn (gen_altivec_dssall ());
6754 case ALTIVEC_BUILTIN_DSS:
6755 icode = CODE_FOR_altivec_dss;
6756 arg0 = TREE_VALUE (arglist);
6758 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6759 mode0 = insn_data[icode].operand[0].mode;
6761 /* If we got invalid arguments bail out before generating bad rtl. */
6762 if (arg0 == error_mark_node)
6765 if (TREE_CODE (arg0) != INTEGER_CST
6766 || TREE_INT_CST_LOW (arg0) & ~0x3)
6768 error ("argument to dss must be a 2-bit unsigned literal");
6772 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6773 op0 = copy_to_mode_reg (mode0, op0);
6775 emit_insn (gen_altivec_dss (op0));
6778 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6779 arg0 = TREE_VALUE (arglist);
6780 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6781 arg0 = TREE_OPERAND (arg0, 0);
6782 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6783 TREE_STRING_POINTER (arg0));
6788 /* Expand abs* operations. */
6789 d = (struct builtin_description *) bdesc_abs;
6790 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6791 if (d->code == fcode)
6792 return altivec_expand_abs_builtin (d->icode, arglist, target);
6794 /* Expand the AltiVec predicates. */
6795 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6796 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6797 if (dp->code == fcode)
6798 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6800 /* LV* are funky. We initialized them differently. */
6803 case ALTIVEC_BUILTIN_LVSL:
6804 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6806 case ALTIVEC_BUILTIN_LVSR:
6807 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6809 case ALTIVEC_BUILTIN_LVEBX:
6810 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6812 case ALTIVEC_BUILTIN_LVEHX:
6813 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6815 case ALTIVEC_BUILTIN_LVEWX:
6816 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6818 case ALTIVEC_BUILTIN_LVXL:
6819 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6821 case ALTIVEC_BUILTIN_LVX:
6822 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6833 /* Binops that need to be initialized manually, but can be expanded
6834 automagically by rs6000_expand_binop_builtin. */
6835 static struct builtin_description bdesc_2arg_spe[] =
6837 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6838 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6839 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6840 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6841 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6842 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6843 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6844 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6845 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6846 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6847 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6848 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6849 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6850 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6851 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6852 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6853 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6854 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6855 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6856 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6857 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6858 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6861 /* Expand the builtin in EXP and store the result in TARGET. Store
6862 true in *EXPANDEDP if we found a builtin to expand.
6864 This expands the SPE builtins that are not simple unary and binary
6867 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6869 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6870 tree arglist = TREE_OPERAND (exp, 1);
6872 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6873 enum insn_code icode;
6874 enum machine_mode tmode, mode0;
6876 struct builtin_description *d;
6881 /* Syntax check for a 5-bit unsigned immediate. */
6884 case SPE_BUILTIN_EVSTDD:
6885 case SPE_BUILTIN_EVSTDH:
6886 case SPE_BUILTIN_EVSTDW:
6887 case SPE_BUILTIN_EVSTWHE:
6888 case SPE_BUILTIN_EVSTWHO:
6889 case SPE_BUILTIN_EVSTWWE:
6890 case SPE_BUILTIN_EVSTWWO:
6891 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6892 if (TREE_CODE (arg1) != INTEGER_CST
6893 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6895 error ("argument 2 must be a 5-bit unsigned literal");
6903 /* The evsplat*i instructions are not quite generic. */
6906 case SPE_BUILTIN_EVSPLATFI:
6907 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6909 case SPE_BUILTIN_EVSPLATI:
6910 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6916 d = (struct builtin_description *) bdesc_2arg_spe;
6917 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6918 if (d->code == fcode)
6919 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6921 d = (struct builtin_description *) bdesc_spe_predicates;
6922 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6923 if (d->code == fcode)
6924 return spe_expand_predicate_builtin (d->icode, arglist, target);
6926 d = (struct builtin_description *) bdesc_spe_evsel;
6927 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6928 if (d->code == fcode)
6929 return spe_expand_evsel_builtin (d->icode, arglist, target);
6933 case SPE_BUILTIN_EVSTDDX:
6934 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6935 case SPE_BUILTIN_EVSTDHX:
6936 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6937 case SPE_BUILTIN_EVSTDWX:
6938 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6939 case SPE_BUILTIN_EVSTWHEX:
6940 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6941 case SPE_BUILTIN_EVSTWHOX:
6942 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6943 case SPE_BUILTIN_EVSTWWEX:
6944 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6945 case SPE_BUILTIN_EVSTWWOX:
6946 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6947 case SPE_BUILTIN_EVSTDD:
6948 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6949 case SPE_BUILTIN_EVSTDH:
6950 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6951 case SPE_BUILTIN_EVSTDW:
6952 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6953 case SPE_BUILTIN_EVSTWHE:
6954 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6955 case SPE_BUILTIN_EVSTWHO:
6956 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6957 case SPE_BUILTIN_EVSTWWE:
6958 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6959 case SPE_BUILTIN_EVSTWWO:
6960 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6961 case SPE_BUILTIN_MFSPEFSCR:
6962 icode = CODE_FOR_spe_mfspefscr;
6963 tmode = insn_data[icode].operand[0].mode;
6966 || GET_MODE (target) != tmode
6967 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6968 target = gen_reg_rtx (tmode);
6970 pat = GEN_FCN (icode) (target);
6975 case SPE_BUILTIN_MTSPEFSCR:
6976 icode = CODE_FOR_spe_mtspefscr;
6977 arg0 = TREE_VALUE (arglist);
6978 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6979 mode0 = insn_data[icode].operand[0].mode;
6981 if (arg0 == error_mark_node)
6984 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6985 op0 = copy_to_mode_reg (mode0, op0);
6987 pat = GEN_FCN (icode) (op0);
7000 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7002 rtx pat, scratch, tmp;
7003 tree form = TREE_VALUE (arglist);
7004 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7005 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7006 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7007 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7008 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7009 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7013 if (TREE_CODE (form) != INTEGER_CST)
7015 error ("argument 1 of __builtin_spe_predicate must be a constant");
7019 form_int = TREE_INT_CST_LOW (form);
7024 if (arg0 == error_mark_node || arg1 == error_mark_node)
7028 || GET_MODE (target) != SImode
7029 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7030 target = gen_reg_rtx (SImode);
7032 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7033 op0 = copy_to_mode_reg (mode0, op0);
7034 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7035 op1 = copy_to_mode_reg (mode1, op1);
7037 scratch = gen_reg_rtx (CCmode);
7039 pat = GEN_FCN (icode) (scratch, op0, op1);
7044 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7045 _lower_. We use one compare, but look in different bits of the
7046 CR for each variant.
7048 There are 2 elements in each SPE simd type (upper/lower). The CR
7049 bits are set as follows:
7051 BIT0 | BIT 1 | BIT 2 | BIT 3
7052 U | L | (U | L) | (U & L)
7054 So, for an "all" relationship, BIT 3 would be set.
7055 For an "any" relationship, BIT 2 would be set. Etc.
7057 Following traditional nomenclature, these bits map to:
7059 BIT0 | BIT 1 | BIT 2 | BIT 3
7062 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7067 /* All variant. OV bit. */
7069 /* We need to get to the OV bit, which is the ORDERED bit. We
7070 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7071 that's ugly and will trigger a validate_condition_mode abort.
7072 So let's just use another pattern. */
7073 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7075 /* Any variant. EQ bit. */
7079 /* Upper variant. LT bit. */
7083 /* Lower variant. GT bit. */
7088 error ("argument 1 of __builtin_spe_predicate is out of range");
7092 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7093 emit_move_insn (target, tmp);
7098 /* The evsel builtins look like this:
7100 e = __builtin_spe_evsel_OP (a, b, c, d);
7104 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7105 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7109 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7112 tree arg0 = TREE_VALUE (arglist);
7113 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7114 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7115 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7116 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7117 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7118 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7119 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7120 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7121 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7126 if (arg0 == error_mark_node || arg1 == error_mark_node
7127 || arg2 == error_mark_node || arg3 == error_mark_node)
7131 || GET_MODE (target) != mode0
7132 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7133 target = gen_reg_rtx (mode0);
7135 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7136 op0 = copy_to_mode_reg (mode0, op0);
7137 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7138 op1 = copy_to_mode_reg (mode0, op1);
7139 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7140 op2 = copy_to_mode_reg (mode0, op2);
7141 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7142 op3 = copy_to_mode_reg (mode0, op3);
7144 /* Generate the compare. */
7145 scratch = gen_reg_rtx (CCmode);
7146 pat = GEN_FCN (icode) (scratch, op0, op1);
7151 if (mode0 == V2SImode)
7152 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7154 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7159 /* Expand an expression EXP that calls a built-in function,
7160 with result going to TARGET if that's convenient
7161 (and in mode MODE if that's convenient).
7162 SUBTARGET may be used as the target for computing one of EXP's operands.
7163 IGNORE is nonzero if the value is to be ignored. */
7166 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7167 enum machine_mode mode ATTRIBUTE_UNUSED,
7168 int ignore ATTRIBUTE_UNUSED)
7170 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7171 tree arglist = TREE_OPERAND (exp, 1);
7172 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7173 struct builtin_description *d;
7180 ret = altivec_expand_builtin (exp, target, &success);
7187 ret = spe_expand_builtin (exp, target, &success);
7193 if (TARGET_ALTIVEC || TARGET_SPE)
7195 /* Handle simple unary operations. */
7196 d = (struct builtin_description *) bdesc_1arg;
7197 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7198 if (d->code == fcode)
7199 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7201 /* Handle simple binary operations. */
7202 d = (struct builtin_description *) bdesc_2arg;
7203 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7204 if (d->code == fcode)
7205 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7207 /* Handle simple ternary operations. */
7208 d = (struct builtin_description *) bdesc_3arg;
7209 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7210 if (d->code == fcode)
7211 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7219 build_opaque_vector_type (tree node, int nunits)
7221 node = copy_node (node);
7222 TYPE_MAIN_VARIANT (node) = node;
7223 return build_vector_type (node, nunits);
7227 rs6000_init_builtins (void)
7229 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7230 V2SF_type_node = build_vector_type (float_type_node, 2);
7231 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7232 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7233 V4SF_type_node = build_vector_type (float_type_node, 4);
7234 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7235 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7237 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7238 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7239 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7241 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7242 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7243 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7245 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7246 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7247 'vector unsigned short'. */
7249 bool_char_type_node = copy_node (unsigned_intQI_type_node);
7250 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
7251 bool_short_type_node = copy_node (unsigned_intHI_type_node);
7252 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
7253 bool_int_type_node = copy_node (unsigned_intSI_type_node);
7254 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
7255 pixel_type_node = copy_node (unsigned_intHI_type_node);
7256 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
7258 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7259 get_identifier ("__bool char"),
7260 bool_char_type_node));
7261 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7262 get_identifier ("__bool short"),
7263 bool_short_type_node));
7264 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7265 get_identifier ("__bool int"),
7266 bool_int_type_node));
7267 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7268 get_identifier ("__pixel"),
7271 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7272 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7273 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7274 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7276 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7277 get_identifier ("__vector unsigned char"),
7278 unsigned_V16QI_type_node));
7279 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7280 get_identifier ("__vector signed char"),
7282 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7283 get_identifier ("__vector __bool char"),
7284 bool_V16QI_type_node));
7286 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7287 get_identifier ("__vector unsigned short"),
7288 unsigned_V8HI_type_node));
7289 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7290 get_identifier ("__vector signed short"),
7292 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7293 get_identifier ("__vector __bool short"),
7294 bool_V8HI_type_node));
7296 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7297 get_identifier ("__vector unsigned int"),
7298 unsigned_V4SI_type_node));
7299 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7300 get_identifier ("__vector signed int"),
7302 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7303 get_identifier ("__vector __bool int"),
7304 bool_V4SI_type_node));
7306 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7307 get_identifier ("__vector float"),
7309 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7310 get_identifier ("__vector __pixel"),
7311 pixel_V8HI_type_node));
7314 spe_init_builtins ();
7316 altivec_init_builtins ();
7317 if (TARGET_ALTIVEC || TARGET_SPE)
7318 rs6000_common_init_builtins ();
7321 /* Search through a set of builtins and enable the mask bits.
7322 DESC is an array of builtins.
7323 SIZE is the total number of builtins.
7324 START is the builtin enum at which to start.
7325 END is the builtin enum at which to end. */
7327 enable_mask_for_builtins (struct builtin_description *desc, int size,
7328 enum rs6000_builtins start,
7329 enum rs6000_builtins end)
7333 for (i = 0; i < size; ++i)
7334 if (desc[i].code == start)
7340 for (; i < size; ++i)
7342 /* Flip all the bits on. */
7343 desc[i].mask = target_flags;
7344 if (desc[i].code == end)
7350 spe_init_builtins (void)
7352 tree endlink = void_list_node;
7353 tree puint_type_node = build_pointer_type (unsigned_type_node);
7354 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7355 struct builtin_description *d;
7358 tree v2si_ftype_4_v2si
7359 = build_function_type
7360 (opaque_V2SI_type_node,
7361 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7362 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7363 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7364 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7367 tree v2sf_ftype_4_v2sf
7368 = build_function_type
7369 (opaque_V2SF_type_node,
7370 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7371 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7372 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7373 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7376 tree int_ftype_int_v2si_v2si
7377 = build_function_type
7379 tree_cons (NULL_TREE, integer_type_node,
7380 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7381 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7384 tree int_ftype_int_v2sf_v2sf
7385 = build_function_type
7387 tree_cons (NULL_TREE, integer_type_node,
7388 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7389 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7392 tree void_ftype_v2si_puint_int
7393 = build_function_type (void_type_node,
7394 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7395 tree_cons (NULL_TREE, puint_type_node,
7396 tree_cons (NULL_TREE,
7400 tree void_ftype_v2si_puint_char
7401 = build_function_type (void_type_node,
7402 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7403 tree_cons (NULL_TREE, puint_type_node,
7404 tree_cons (NULL_TREE,
7408 tree void_ftype_v2si_pv2si_int
7409 = build_function_type (void_type_node,
7410 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7411 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7412 tree_cons (NULL_TREE,
7416 tree void_ftype_v2si_pv2si_char
7417 = build_function_type (void_type_node,
7418 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7419 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7420 tree_cons (NULL_TREE,
7425 = build_function_type (void_type_node,
7426 tree_cons (NULL_TREE, integer_type_node, endlink));
7429 = build_function_type (integer_type_node, endlink);
7431 tree v2si_ftype_pv2si_int
7432 = build_function_type (opaque_V2SI_type_node,
7433 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7434 tree_cons (NULL_TREE, integer_type_node,
7437 tree v2si_ftype_puint_int
7438 = build_function_type (opaque_V2SI_type_node,
7439 tree_cons (NULL_TREE, puint_type_node,
7440 tree_cons (NULL_TREE, integer_type_node,
7443 tree v2si_ftype_pushort_int
7444 = build_function_type (opaque_V2SI_type_node,
7445 tree_cons (NULL_TREE, pushort_type_node,
7446 tree_cons (NULL_TREE, integer_type_node,
7449 tree v2si_ftype_signed_char
7450 = build_function_type (opaque_V2SI_type_node,
7451 tree_cons (NULL_TREE, signed_char_type_node,
7454 /* The initialization of the simple binary and unary builtins is
7455 done in rs6000_common_init_builtins, but we have to enable the
7456 mask bits here manually because we have run out of `target_flags'
7457 bits. We really need to redesign this mask business. */
7459 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7460 ARRAY_SIZE (bdesc_2arg),
7463 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7464 ARRAY_SIZE (bdesc_1arg),
7466 SPE_BUILTIN_EVSUBFUSIAAW);
7467 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7468 ARRAY_SIZE (bdesc_spe_predicates),
7469 SPE_BUILTIN_EVCMPEQ,
7470 SPE_BUILTIN_EVFSTSTLT);
7471 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7472 ARRAY_SIZE (bdesc_spe_evsel),
7473 SPE_BUILTIN_EVSEL_CMPGTS,
7474 SPE_BUILTIN_EVSEL_FSTSTEQ);
7476 (*lang_hooks.decls.pushdecl)
7477 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7478 opaque_V2SI_type_node));
7480 /* Initialize irregular SPE builtins. */
7482 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7483 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7484 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7485 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7486 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7487 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7488 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7489 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7490 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7491 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7492 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7493 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7494 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7495 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7496 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7497 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7498 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7499 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7502 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7503 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7504 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7505 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7506 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7507 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7508 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7509 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7510 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7511 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7512 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7513 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7514 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7515 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7516 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7517 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7518 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7519 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7520 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7521 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7522 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7523 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7526 d = (struct builtin_description *) bdesc_spe_predicates;
7527 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7531 switch (insn_data[d->icode].operand[1].mode)
7534 type = int_ftype_int_v2si_v2si;
7537 type = int_ftype_int_v2sf_v2sf;
7543 def_builtin (d->mask, d->name, type, d->code);
7546 /* Evsel predicates. */
7547 d = (struct builtin_description *) bdesc_spe_evsel;
7548 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7552 switch (insn_data[d->icode].operand[1].mode)
7555 type = v2si_ftype_4_v2si;
7558 type = v2sf_ftype_4_v2sf;
7564 def_builtin (d->mask, d->name, type, d->code);
7569 altivec_init_builtins (void)
7571 struct builtin_description *d;
7572 struct builtin_description_predicates *dp;
7574 tree pfloat_type_node = build_pointer_type (float_type_node);
7575 tree pint_type_node = build_pointer_type (integer_type_node);
7576 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7577 tree pchar_type_node = build_pointer_type (char_type_node);
7579 tree pvoid_type_node = build_pointer_type (void_type_node);
7581 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7582 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7583 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7584 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7586 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7588 tree int_ftype_int_v4si_v4si
7589 = build_function_type_list (integer_type_node,
7590 integer_type_node, V4SI_type_node,
7591 V4SI_type_node, NULL_TREE);
7592 tree v4sf_ftype_pcfloat
7593 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7594 tree void_ftype_pfloat_v4sf
7595 = build_function_type_list (void_type_node,
7596 pfloat_type_node, V4SF_type_node, NULL_TREE);
7597 tree v4si_ftype_pcint
7598 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7599 tree void_ftype_pint_v4si
7600 = build_function_type_list (void_type_node,
7601 pint_type_node, V4SI_type_node, NULL_TREE);
7602 tree v8hi_ftype_pcshort
7603 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7604 tree void_ftype_pshort_v8hi
7605 = build_function_type_list (void_type_node,
7606 pshort_type_node, V8HI_type_node, NULL_TREE);
7607 tree v16qi_ftype_pcchar
7608 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7609 tree void_ftype_pchar_v16qi
7610 = build_function_type_list (void_type_node,
7611 pchar_type_node, V16QI_type_node, NULL_TREE);
7612 tree void_ftype_v4si
7613 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7614 tree v8hi_ftype_void
7615 = build_function_type (V8HI_type_node, void_list_node);
7616 tree void_ftype_void
7617 = build_function_type (void_type_node, void_list_node);
7619 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7621 tree v16qi_ftype_long_pcvoid
7622 = build_function_type_list (V16QI_type_node,
7623 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7624 tree v8hi_ftype_long_pcvoid
7625 = build_function_type_list (V8HI_type_node,
7626 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7627 tree v4si_ftype_long_pcvoid
7628 = build_function_type_list (V4SI_type_node,
7629 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7631 tree void_ftype_v4si_long_pvoid
7632 = build_function_type_list (void_type_node,
7633 V4SI_type_node, long_integer_type_node,
7634 pvoid_type_node, NULL_TREE);
7635 tree void_ftype_v16qi_long_pvoid
7636 = build_function_type_list (void_type_node,
7637 V16QI_type_node, long_integer_type_node,
7638 pvoid_type_node, NULL_TREE);
7639 tree void_ftype_v8hi_long_pvoid
7640 = build_function_type_list (void_type_node,
7641 V8HI_type_node, long_integer_type_node,
7642 pvoid_type_node, NULL_TREE);
7643 tree int_ftype_int_v8hi_v8hi
7644 = build_function_type_list (integer_type_node,
7645 integer_type_node, V8HI_type_node,
7646 V8HI_type_node, NULL_TREE);
7647 tree int_ftype_int_v16qi_v16qi
7648 = build_function_type_list (integer_type_node,
7649 integer_type_node, V16QI_type_node,
7650 V16QI_type_node, NULL_TREE);
7651 tree int_ftype_int_v4sf_v4sf
7652 = build_function_type_list (integer_type_node,
7653 integer_type_node, V4SF_type_node,
7654 V4SF_type_node, NULL_TREE);
7655 tree v4si_ftype_v4si
7656 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7657 tree v8hi_ftype_v8hi
7658 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7659 tree v16qi_ftype_v16qi
7660 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7661 tree v4sf_ftype_v4sf
7662 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7663 tree void_ftype_pcvoid_int_int
7664 = build_function_type_list (void_type_node,
7665 pcvoid_type_node, integer_type_node,
7666 integer_type_node, NULL_TREE);
7667 tree int_ftype_pcchar
7668 = build_function_type_list (integer_type_node,
7669 pcchar_type_node, NULL_TREE);
7671 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7672 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7673 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7674 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7676 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7678 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7680 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7682 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7684 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7686 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7690 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7693 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7694 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7695 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7696 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7697 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7698 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7699 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7700 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7701 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7702 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7704 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7705 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7706 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7708 /* Add the DST variants. */
7709 d = (struct builtin_description *) bdesc_dst;
7710 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7711 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7713 /* Initialize the predicates. */
7714 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7715 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7717 enum machine_mode mode1;
7720 mode1 = insn_data[dp->icode].operand[1].mode;
7725 type = int_ftype_int_v4si_v4si;
7728 type = int_ftype_int_v8hi_v8hi;
7731 type = int_ftype_int_v16qi_v16qi;
7734 type = int_ftype_int_v4sf_v4sf;
7740 def_builtin (dp->mask, dp->name, type, dp->code);
7743 /* Initialize the abs* operators. */
7744 d = (struct builtin_description *) bdesc_abs;
7745 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7747 enum machine_mode mode0;
7750 mode0 = insn_data[d->icode].operand[0].mode;
7755 type = v4si_ftype_v4si;
7758 type = v8hi_ftype_v8hi;
7761 type = v16qi_ftype_v16qi;
7764 type = v4sf_ftype_v4sf;
7770 def_builtin (d->mask, d->name, type, d->code);
7775 rs6000_common_init_builtins (void)
7777 struct builtin_description *d;
7780 tree v4sf_ftype_v4sf_v4sf_v16qi
7781 = build_function_type_list (V4SF_type_node,
7782 V4SF_type_node, V4SF_type_node,
7783 V16QI_type_node, NULL_TREE);
7784 tree v4si_ftype_v4si_v4si_v16qi
7785 = build_function_type_list (V4SI_type_node,
7786 V4SI_type_node, V4SI_type_node,
7787 V16QI_type_node, NULL_TREE);
7788 tree v8hi_ftype_v8hi_v8hi_v16qi
7789 = build_function_type_list (V8HI_type_node,
7790 V8HI_type_node, V8HI_type_node,
7791 V16QI_type_node, NULL_TREE);
7792 tree v16qi_ftype_v16qi_v16qi_v16qi
7793 = build_function_type_list (V16QI_type_node,
7794 V16QI_type_node, V16QI_type_node,
7795 V16QI_type_node, NULL_TREE);
7797 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7799 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7800 tree v16qi_ftype_int
7801 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7802 tree v8hi_ftype_v16qi
7803 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7804 tree v4sf_ftype_v4sf
7805 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7807 tree v2si_ftype_v2si_v2si
7808 = build_function_type_list (opaque_V2SI_type_node,
7809 opaque_V2SI_type_node,
7810 opaque_V2SI_type_node, NULL_TREE);
7812 tree v2sf_ftype_v2sf_v2sf
7813 = build_function_type_list (opaque_V2SF_type_node,
7814 opaque_V2SF_type_node,
7815 opaque_V2SF_type_node, NULL_TREE);
7817 tree v2si_ftype_int_int
7818 = build_function_type_list (opaque_V2SI_type_node,
7819 integer_type_node, integer_type_node,
7822 tree v2si_ftype_v2si
7823 = build_function_type_list (opaque_V2SI_type_node,
7824 opaque_V2SI_type_node, NULL_TREE);
7826 tree v2sf_ftype_v2sf
7827 = build_function_type_list (opaque_V2SF_type_node,
7828 opaque_V2SF_type_node, NULL_TREE);
7830 tree v2sf_ftype_v2si
7831 = build_function_type_list (opaque_V2SF_type_node,
7832 opaque_V2SI_type_node, NULL_TREE);
7834 tree v2si_ftype_v2sf
7835 = build_function_type_list (opaque_V2SI_type_node,
7836 opaque_V2SF_type_node, NULL_TREE);
7838 tree v2si_ftype_v2si_char
7839 = build_function_type_list (opaque_V2SI_type_node,
7840 opaque_V2SI_type_node,
7841 char_type_node, NULL_TREE);
7843 tree v2si_ftype_int_char
7844 = build_function_type_list (opaque_V2SI_type_node,
7845 integer_type_node, char_type_node, NULL_TREE);
7847 tree v2si_ftype_char
7848 = build_function_type_list (opaque_V2SI_type_node,
7849 char_type_node, NULL_TREE);
7851 tree int_ftype_int_int
7852 = build_function_type_list (integer_type_node,
7853 integer_type_node, integer_type_node,
7856 tree v4si_ftype_v4si_v4si
7857 = build_function_type_list (V4SI_type_node,
7858 V4SI_type_node, V4SI_type_node, NULL_TREE);
7859 tree v4sf_ftype_v4si_int
7860 = build_function_type_list (V4SF_type_node,
7861 V4SI_type_node, integer_type_node, NULL_TREE);
7862 tree v4si_ftype_v4sf_int
7863 = build_function_type_list (V4SI_type_node,
7864 V4SF_type_node, integer_type_node, NULL_TREE);
7865 tree v4si_ftype_v4si_int
7866 = build_function_type_list (V4SI_type_node,
7867 V4SI_type_node, integer_type_node, NULL_TREE);
7868 tree v8hi_ftype_v8hi_int
7869 = build_function_type_list (V8HI_type_node,
7870 V8HI_type_node, integer_type_node, NULL_TREE);
7871 tree v16qi_ftype_v16qi_int
7872 = build_function_type_list (V16QI_type_node,
7873 V16QI_type_node, integer_type_node, NULL_TREE);
7874 tree v16qi_ftype_v16qi_v16qi_int
7875 = build_function_type_list (V16QI_type_node,
7876 V16QI_type_node, V16QI_type_node,
7877 integer_type_node, NULL_TREE);
7878 tree v8hi_ftype_v8hi_v8hi_int
7879 = build_function_type_list (V8HI_type_node,
7880 V8HI_type_node, V8HI_type_node,
7881 integer_type_node, NULL_TREE);
7882 tree v4si_ftype_v4si_v4si_int
7883 = build_function_type_list (V4SI_type_node,
7884 V4SI_type_node, V4SI_type_node,
7885 integer_type_node, NULL_TREE);
7886 tree v4sf_ftype_v4sf_v4sf_int
7887 = build_function_type_list (V4SF_type_node,
7888 V4SF_type_node, V4SF_type_node,
7889 integer_type_node, NULL_TREE);
7890 tree v4sf_ftype_v4sf_v4sf
7891 = build_function_type_list (V4SF_type_node,
7892 V4SF_type_node, V4SF_type_node, NULL_TREE);
7893 tree v4sf_ftype_v4sf_v4sf_v4si
7894 = build_function_type_list (V4SF_type_node,
7895 V4SF_type_node, V4SF_type_node,
7896 V4SI_type_node, NULL_TREE);
7897 tree v4sf_ftype_v4sf_v4sf_v4sf
7898 = build_function_type_list (V4SF_type_node,
7899 V4SF_type_node, V4SF_type_node,
7900 V4SF_type_node, NULL_TREE);
7901 tree v4si_ftype_v4si_v4si_v4si
7902 = build_function_type_list (V4SI_type_node,
7903 V4SI_type_node, V4SI_type_node,
7904 V4SI_type_node, NULL_TREE);
7905 tree v8hi_ftype_v8hi_v8hi
7906 = build_function_type_list (V8HI_type_node,
7907 V8HI_type_node, V8HI_type_node, NULL_TREE);
7908 tree v8hi_ftype_v8hi_v8hi_v8hi
7909 = build_function_type_list (V8HI_type_node,
7910 V8HI_type_node, V8HI_type_node,
7911 V8HI_type_node, NULL_TREE);
7912 tree v4si_ftype_v8hi_v8hi_v4si
7913 = build_function_type_list (V4SI_type_node,
7914 V8HI_type_node, V8HI_type_node,
7915 V4SI_type_node, NULL_TREE);
7916 tree v4si_ftype_v16qi_v16qi_v4si
7917 = build_function_type_list (V4SI_type_node,
7918 V16QI_type_node, V16QI_type_node,
7919 V4SI_type_node, NULL_TREE);
7920 tree v16qi_ftype_v16qi_v16qi
7921 = build_function_type_list (V16QI_type_node,
7922 V16QI_type_node, V16QI_type_node, NULL_TREE);
7923 tree v4si_ftype_v4sf_v4sf
7924 = build_function_type_list (V4SI_type_node,
7925 V4SF_type_node, V4SF_type_node, NULL_TREE);
7926 tree v8hi_ftype_v16qi_v16qi
7927 = build_function_type_list (V8HI_type_node,
7928 V16QI_type_node, V16QI_type_node, NULL_TREE);
7929 tree v4si_ftype_v8hi_v8hi
7930 = build_function_type_list (V4SI_type_node,
7931 V8HI_type_node, V8HI_type_node, NULL_TREE);
7932 tree v8hi_ftype_v4si_v4si
7933 = build_function_type_list (V8HI_type_node,
7934 V4SI_type_node, V4SI_type_node, NULL_TREE);
7935 tree v16qi_ftype_v8hi_v8hi
7936 = build_function_type_list (V16QI_type_node,
7937 V8HI_type_node, V8HI_type_node, NULL_TREE);
7938 tree v4si_ftype_v16qi_v4si
7939 = build_function_type_list (V4SI_type_node,
7940 V16QI_type_node, V4SI_type_node, NULL_TREE);
7941 tree v4si_ftype_v16qi_v16qi
7942 = build_function_type_list (V4SI_type_node,
7943 V16QI_type_node, V16QI_type_node, NULL_TREE);
7944 tree v4si_ftype_v8hi_v4si
7945 = build_function_type_list (V4SI_type_node,
7946 V8HI_type_node, V4SI_type_node, NULL_TREE);
7947 tree v4si_ftype_v8hi
7948 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7949 tree int_ftype_v4si_v4si
7950 = build_function_type_list (integer_type_node,
7951 V4SI_type_node, V4SI_type_node, NULL_TREE);
7952 tree int_ftype_v4sf_v4sf
7953 = build_function_type_list (integer_type_node,
7954 V4SF_type_node, V4SF_type_node, NULL_TREE);
7955 tree int_ftype_v16qi_v16qi
7956 = build_function_type_list (integer_type_node,
7957 V16QI_type_node, V16QI_type_node, NULL_TREE);
7958 tree int_ftype_v8hi_v8hi
7959 = build_function_type_list (integer_type_node,
7960 V8HI_type_node, V8HI_type_node, NULL_TREE);
7962 /* Add the simple ternary operators. */
7963 d = (struct builtin_description *) bdesc_3arg;
7964 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7967 enum machine_mode mode0, mode1, mode2, mode3;
7970 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7973 mode0 = insn_data[d->icode].operand[0].mode;
7974 mode1 = insn_data[d->icode].operand[1].mode;
7975 mode2 = insn_data[d->icode].operand[2].mode;
7976 mode3 = insn_data[d->icode].operand[3].mode;
7978 /* When all four are of the same mode. */
7979 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7984 type = v4si_ftype_v4si_v4si_v4si;
7987 type = v4sf_ftype_v4sf_v4sf_v4sf;
7990 type = v8hi_ftype_v8hi_v8hi_v8hi;
7993 type = v16qi_ftype_v16qi_v16qi_v16qi;
7999 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8004 type = v4si_ftype_v4si_v4si_v16qi;
8007 type = v4sf_ftype_v4sf_v4sf_v16qi;
8010 type = v8hi_ftype_v8hi_v8hi_v16qi;
8013 type = v16qi_ftype_v16qi_v16qi_v16qi;
8019 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8020 && mode3 == V4SImode)
8021 type = v4si_ftype_v16qi_v16qi_v4si;
8022 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8023 && mode3 == V4SImode)
8024 type = v4si_ftype_v8hi_v8hi_v4si;
8025 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8026 && mode3 == V4SImode)
8027 type = v4sf_ftype_v4sf_v4sf_v4si;
8029 /* vchar, vchar, vchar, 4 bit literal. */
8030 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8032 type = v16qi_ftype_v16qi_v16qi_int;
8034 /* vshort, vshort, vshort, 4 bit literal. */
8035 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8037 type = v8hi_ftype_v8hi_v8hi_int;
8039 /* vint, vint, vint, 4 bit literal. */
8040 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8042 type = v4si_ftype_v4si_v4si_int;
8044 /* vfloat, vfloat, vfloat, 4 bit literal. */
8045 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8047 type = v4sf_ftype_v4sf_v4sf_int;
8052 def_builtin (d->mask, d->name, type, d->code);
8055 /* Add the simple binary operators. */
8056 d = (struct builtin_description *) bdesc_2arg;
8057 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8059 enum machine_mode mode0, mode1, mode2;
8062 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8065 mode0 = insn_data[d->icode].operand[0].mode;
8066 mode1 = insn_data[d->icode].operand[1].mode;
8067 mode2 = insn_data[d->icode].operand[2].mode;
8069 /* When all three operands are of the same mode. */
8070 if (mode0 == mode1 && mode1 == mode2)
8075 type = v4sf_ftype_v4sf_v4sf;
8078 type = v4si_ftype_v4si_v4si;
8081 type = v16qi_ftype_v16qi_v16qi;
8084 type = v8hi_ftype_v8hi_v8hi;
8087 type = v2si_ftype_v2si_v2si;
8090 type = v2sf_ftype_v2sf_v2sf;
8093 type = int_ftype_int_int;
8100 /* A few other combos we really don't want to do manually. */
8102 /* vint, vfloat, vfloat. */
8103 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8104 type = v4si_ftype_v4sf_v4sf;
8106 /* vshort, vchar, vchar. */
8107 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8108 type = v8hi_ftype_v16qi_v16qi;
8110 /* vint, vshort, vshort. */
8111 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8112 type = v4si_ftype_v8hi_v8hi;
8114 /* vshort, vint, vint. */
8115 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8116 type = v8hi_ftype_v4si_v4si;
8118 /* vchar, vshort, vshort. */
8119 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8120 type = v16qi_ftype_v8hi_v8hi;
8122 /* vint, vchar, vint. */
8123 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8124 type = v4si_ftype_v16qi_v4si;
8126 /* vint, vchar, vchar. */
8127 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8128 type = v4si_ftype_v16qi_v16qi;
8130 /* vint, vshort, vint. */
8131 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8132 type = v4si_ftype_v8hi_v4si;
8134 /* vint, vint, 5 bit literal. */
8135 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8136 type = v4si_ftype_v4si_int;
8138 /* vshort, vshort, 5 bit literal. */
8139 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8140 type = v8hi_ftype_v8hi_int;
8142 /* vchar, vchar, 5 bit literal. */
8143 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8144 type = v16qi_ftype_v16qi_int;
8146 /* vfloat, vint, 5 bit literal. */
8147 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8148 type = v4sf_ftype_v4si_int;
8150 /* vint, vfloat, 5 bit literal. */
8151 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8152 type = v4si_ftype_v4sf_int;
8154 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8155 type = v2si_ftype_int_int;
8157 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8158 type = v2si_ftype_v2si_char;
8160 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8161 type = v2si_ftype_int_char;
8164 else if (mode0 == SImode)
8169 type = int_ftype_v4si_v4si;
8172 type = int_ftype_v4sf_v4sf;
8175 type = int_ftype_v16qi_v16qi;
8178 type = int_ftype_v8hi_v8hi;
8188 def_builtin (d->mask, d->name, type, d->code);
8191 /* Add the simple unary operators. */
8192 d = (struct builtin_description *) bdesc_1arg;
8193 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8195 enum machine_mode mode0, mode1;
8198 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8201 mode0 = insn_data[d->icode].operand[0].mode;
8202 mode1 = insn_data[d->icode].operand[1].mode;
8204 if (mode0 == V4SImode && mode1 == QImode)
8205 type = v4si_ftype_int;
8206 else if (mode0 == V8HImode && mode1 == QImode)
8207 type = v8hi_ftype_int;
8208 else if (mode0 == V16QImode && mode1 == QImode)
8209 type = v16qi_ftype_int;
8210 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8211 type = v4sf_ftype_v4sf;
8212 else if (mode0 == V8HImode && mode1 == V16QImode)
8213 type = v8hi_ftype_v16qi;
8214 else if (mode0 == V4SImode && mode1 == V8HImode)
8215 type = v4si_ftype_v8hi;
8216 else if (mode0 == V2SImode && mode1 == V2SImode)
8217 type = v2si_ftype_v2si;
8218 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8219 type = v2sf_ftype_v2sf;
8220 else if (mode0 == V2SFmode && mode1 == V2SImode)
8221 type = v2sf_ftype_v2si;
8222 else if (mode0 == V2SImode && mode1 == V2SFmode)
8223 type = v2si_ftype_v2sf;
8224 else if (mode0 == V2SImode && mode1 == QImode)
8225 type = v2si_ftype_char;
8229 def_builtin (d->mask, d->name, type, d->code);
8234 rs6000_init_libfuncs (void)
8236 if (!TARGET_HARD_FLOAT)
8239 if (DEFAULT_ABI != ABI_V4)
8241 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8243 /* AIX library routines for float->int conversion. */
8244 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8245 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8246 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8247 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8250 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8251 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8252 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8253 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8254 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8258 /* 32-bit SVR4 quad floating point routines. */
8260 set_optab_libfunc (add_optab, TFmode, "_q_add");
8261 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8262 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8263 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8264 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8265 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8266 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8268 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8269 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8270 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8271 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8272 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8273 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8275 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8276 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8277 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8278 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8279 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8280 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8281 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8286 /* Expand a block clear operation, and return 1 if successful. Return 0
8287 if we should let the compiler generate normal code.
8289 operands[0] is the destination
8290 operands[1] is the length
8291 operands[2] is the alignment */
8294 expand_block_clear (rtx operands[])
8296 rtx orig_dest = operands[0];
8297 rtx bytes_rtx = operands[1];
8298 rtx align_rtx = operands[2];
8299 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8305 /* If this is not a fixed size move, just call memcpy */
8309 /* If this is not a fixed size alignment, abort */
8310 if (GET_CODE (align_rtx) != CONST_INT)
8312 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8314 /* Anything to clear? */
8315 bytes = INTVAL (bytes_rtx);
8319 if (bytes > (TARGET_POWERPC64 && align >= 32 ? 64 : 32))
8322 if (optimize_size && bytes > 16)
8325 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8327 rtx (*mov) (rtx, rtx);
8328 enum machine_mode mode = BLKmode;
8331 if (bytes >= 8 && TARGET_POWERPC64
8332 /* 64-bit loads and stores require word-aligned
8334 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8340 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8341 { /* move 4 bytes */
8346 else if (bytes == 2 && !STRICT_ALIGNMENT)
8347 { /* move 2 bytes */
8352 else /* move 1 byte at a time */
8359 dest = adjust_address (orig_dest, mode, offset);
8361 emit_insn ((*mov) (dest, const0_rtx));
8368 /* Expand a block move operation, and return 1 if successful. Return 0
8369 if we should let the compiler generate normal code.
8371 operands[0] is the destination
8372 operands[1] is the source
8373 operands[2] is the length
8374 operands[3] is the alignment */
8376 #define MAX_MOVE_REG 4
8379 expand_block_move (rtx operands[])
8381 rtx orig_dest = operands[0];
8382 rtx orig_src = operands[1];
8383 rtx bytes_rtx = operands[2];
8384 rtx align_rtx = operands[3];
8385 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8390 rtx stores[MAX_MOVE_REG];
8393 /* If this is not a fixed size move, just call memcpy */
8397 /* If this is not a fixed size alignment, abort */
8398 if (GET_CODE (align_rtx) != CONST_INT)
8400 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8402 /* Anything to move? */
8403 bytes = INTVAL (bytes_rtx);
8407 /* store_one_arg depends on expand_block_move to handle at least the size of
8408 reg_parm_stack_space. */
8409 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8412 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8415 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8416 rtx (*mov) (rtx, rtx);
8418 enum machine_mode mode = BLKmode;
8422 && bytes > 24 /* move up to 32 bytes at a time */
8430 && ! fixed_regs[12])
8432 move_bytes = (bytes > 32) ? 32 : bytes;
8433 gen_func.movmemsi = gen_movmemsi_8reg;
8435 else if (TARGET_STRING
8436 && bytes > 16 /* move up to 24 bytes at a time */
8442 && ! fixed_regs[10])
8444 move_bytes = (bytes > 24) ? 24 : bytes;
8445 gen_func.movmemsi = gen_movmemsi_6reg;
8447 else if (TARGET_STRING
8448 && bytes > 8 /* move up to 16 bytes at a time */
8454 move_bytes = (bytes > 16) ? 16 : bytes;
8455 gen_func.movmemsi = gen_movmemsi_4reg;
8457 else if (bytes >= 8 && TARGET_POWERPC64
8458 /* 64-bit loads and stores require word-aligned
8460 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8464 gen_func.mov = gen_movdi;
8466 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8467 { /* move up to 8 bytes at a time */
8468 move_bytes = (bytes > 8) ? 8 : bytes;
8469 gen_func.movmemsi = gen_movmemsi_2reg;
8471 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8472 { /* move 4 bytes */
8475 gen_func.mov = gen_movsi;
8477 else if (bytes == 2 && !STRICT_ALIGNMENT)
8478 { /* move 2 bytes */
8481 gen_func.mov = gen_movhi;
8483 else if (TARGET_STRING && bytes > 1)
8484 { /* move up to 4 bytes at a time */
8485 move_bytes = (bytes > 4) ? 4 : bytes;
8486 gen_func.movmemsi = gen_movmemsi_1reg;
8488 else /* move 1 byte at a time */
8492 gen_func.mov = gen_movqi;
8495 src = adjust_address (orig_src, mode, offset);
8496 dest = adjust_address (orig_dest, mode, offset);
8498 if (mode != BLKmode)
8500 rtx tmp_reg = gen_reg_rtx (mode);
8502 emit_insn ((*gen_func.mov) (tmp_reg, src));
8503 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8506 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8509 for (i = 0; i < num_reg; i++)
8510 emit_insn (stores[i]);
8514 if (mode == BLKmode)
8516 /* Move the address into scratch registers. The movmemsi
8517 patterns require zero offset. */
8518 if (!REG_P (XEXP (src, 0)))
8520 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8521 src = replace_equiv_address (src, src_reg);
8523 set_mem_size (src, GEN_INT (move_bytes));
8525 if (!REG_P (XEXP (dest, 0)))
8527 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8528 dest = replace_equiv_address (dest, dest_reg);
8530 set_mem_size (dest, GEN_INT (move_bytes));
8532 emit_insn ((*gen_func.movmemsi) (dest, src,
8533 GEN_INT (move_bytes & 31),
8542 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8543 known to be a PARALLEL. */
8545 save_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8550 int count = XVECLEN (op, 0);
8556 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8557 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8560 for (i=1; i <= 18; i++)
8562 elt = XVECEXP (op, 0, index++);
8563 if (GET_CODE (elt) != SET
8564 || GET_CODE (SET_DEST (elt)) != MEM
8565 || ! memory_operand (SET_DEST (elt), DFmode)
8566 || GET_CODE (SET_SRC (elt)) != REG
8567 || GET_MODE (SET_SRC (elt)) != DFmode)
8571 for (i=1; i <= 12; i++)
8573 elt = XVECEXP (op, 0, index++);
8574 if (GET_CODE (elt) != SET
8575 || GET_CODE (SET_DEST (elt)) != MEM
8576 || GET_CODE (SET_SRC (elt)) != REG
8577 || GET_MODE (SET_SRC (elt)) != V4SImode)
8581 for (i=1; i <= 19; i++)
8583 elt = XVECEXP (op, 0, index++);
8584 if (GET_CODE (elt) != SET
8585 || GET_CODE (SET_DEST (elt)) != MEM
8586 || ! memory_operand (SET_DEST (elt), Pmode)
8587 || GET_CODE (SET_SRC (elt)) != REG
8588 || GET_MODE (SET_SRC (elt)) != Pmode)
8592 elt = XVECEXP (op, 0, index++);
8593 if (GET_CODE (elt) != SET
8594 || GET_CODE (SET_DEST (elt)) != MEM
8595 || ! memory_operand (SET_DEST (elt), Pmode)
8596 || GET_CODE (SET_SRC (elt)) != REG
8597 || REGNO (SET_SRC (elt)) != CR2_REGNO
8598 || GET_MODE (SET_SRC (elt)) != Pmode)
8601 if (GET_CODE (XVECEXP (op, 0, index++)) != USE
8602 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8603 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8608 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8609 known to be a PARALLEL. */
8611 restore_world_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8616 int count = XVECLEN (op, 0);
8622 if (GET_CODE (XVECEXP (op, 0, index++)) != RETURN
8623 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8624 || GET_CODE (XVECEXP (op, 0, index++)) != USE
8625 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
8628 elt = XVECEXP (op, 0, index++);
8629 if (GET_CODE (elt) != SET
8630 || GET_CODE (SET_SRC (elt)) != MEM
8631 || ! memory_operand (SET_SRC (elt), Pmode)
8632 || GET_CODE (SET_DEST (elt)) != REG
8633 || REGNO (SET_DEST (elt)) != CR2_REGNO
8634 || GET_MODE (SET_DEST (elt)) != Pmode)
8637 for (i=1; i <= 19; i++)
8639 elt = XVECEXP (op, 0, index++);
8640 if (GET_CODE (elt) != SET
8641 || GET_CODE (SET_SRC (elt)) != MEM
8642 || ! memory_operand (SET_SRC (elt), Pmode)
8643 || GET_CODE (SET_DEST (elt)) != REG
8644 || GET_MODE (SET_DEST (elt)) != Pmode)
8648 for (i=1; i <= 12; i++)
8650 elt = XVECEXP (op, 0, index++);
8651 if (GET_CODE (elt) != SET
8652 || GET_CODE (SET_SRC (elt)) != MEM
8653 || GET_CODE (SET_DEST (elt)) != REG
8654 || GET_MODE (SET_DEST (elt)) != V4SImode)
8658 for (i=1; i <= 18; i++)
8660 elt = XVECEXP (op, 0, index++);
8661 if (GET_CODE (elt) != SET
8662 || GET_CODE (SET_SRC (elt)) != MEM
8663 || ! memory_operand (SET_SRC (elt), DFmode)
8664 || GET_CODE (SET_DEST (elt)) != REG
8665 || GET_MODE (SET_DEST (elt)) != DFmode)
8669 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8670 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8671 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8672 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
8673 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
8679 /* Return 1 if OP is a load multiple operation. It is known to be a
8680 PARALLEL and the first section will be tested. */
8683 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8685 int count = XVECLEN (op, 0);
8686 unsigned int dest_regno;
8690 /* Perform a quick check so we don't blow up below. */
8692 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8693 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8694 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8697 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8698 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8700 for (i = 1; i < count; i++)
8702 rtx elt = XVECEXP (op, 0, i);
8704 if (GET_CODE (elt) != SET
8705 || GET_CODE (SET_DEST (elt)) != REG
8706 || GET_MODE (SET_DEST (elt)) != SImode
8707 || REGNO (SET_DEST (elt)) != dest_regno + i
8708 || GET_CODE (SET_SRC (elt)) != MEM
8709 || GET_MODE (SET_SRC (elt)) != SImode
8710 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8711 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8712 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8713 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8720 /* Similar, but tests for store multiple. Here, the second vector element
8721 is a CLOBBER. It will be tested later. */
8724 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8726 int count = XVECLEN (op, 0) - 1;
8727 unsigned int src_regno;
8731 /* Perform a quick check so we don't blow up below. */
8733 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8734 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8735 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8738 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8739 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8741 for (i = 1; i < count; i++)
8743 rtx elt = XVECEXP (op, 0, i + 1);
8745 if (GET_CODE (elt) != SET
8746 || GET_CODE (SET_SRC (elt)) != REG
8747 || GET_MODE (SET_SRC (elt)) != SImode
8748 || REGNO (SET_SRC (elt)) != src_regno + i
8749 || GET_CODE (SET_DEST (elt)) != MEM
8750 || GET_MODE (SET_DEST (elt)) != SImode
8751 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8752 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8753 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8754 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8761 /* Return a string to perform a load_multiple operation.
8762 operands[0] is the vector.
8763 operands[1] is the source address.
8764 operands[2] is the first destination register. */
8767 rs6000_output_load_multiple (rtx operands[3])
8769 /* We have to handle the case where the pseudo used to contain the address
8770 is assigned to one of the output registers. */
8772 int words = XVECLEN (operands[0], 0);
8775 if (XVECLEN (operands[0], 0) == 1)
8776 return "{l|lwz} %2,0(%1)";
8778 for (i = 0; i < words; i++)
8779 if (refers_to_regno_p (REGNO (operands[2]) + i,
8780 REGNO (operands[2]) + i + 1, operands[1], 0))
8784 xop[0] = GEN_INT (4 * (words-1));
8785 xop[1] = operands[1];
8786 xop[2] = operands[2];
8787 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8792 xop[0] = GEN_INT (4 * (words-1));
8793 xop[1] = operands[1];
8794 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8795 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8800 for (j = 0; j < words; j++)
8803 xop[0] = GEN_INT (j * 4);
8804 xop[1] = operands[1];
8805 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8806 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8808 xop[0] = GEN_INT (i * 4);
8809 xop[1] = operands[1];
8810 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8815 return "{lsi|lswi} %2,%1,%N0";
8818 /* Return 1 for a parallel vrsave operation. */
8821 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8823 int count = XVECLEN (op, 0);
8824 unsigned int dest_regno, src_regno;
8828 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8829 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8830 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8833 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8834 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8836 if (dest_regno != VRSAVE_REGNO
8837 && src_regno != VRSAVE_REGNO)
8840 for (i = 1; i < count; i++)
8842 rtx elt = XVECEXP (op, 0, i);
8844 if (GET_CODE (elt) != CLOBBER
8845 && GET_CODE (elt) != SET)
8852 /* Return 1 for an PARALLEL suitable for mfcr. */
8855 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8857 int count = XVECLEN (op, 0);
8860 /* Perform a quick check so we don't blow up below. */
8862 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8863 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8864 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8867 for (i = 0; i < count; i++)
8869 rtx exp = XVECEXP (op, 0, i);
8874 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8876 if (GET_CODE (src_reg) != REG
8877 || GET_MODE (src_reg) != CCmode
8878 || ! CR_REGNO_P (REGNO (src_reg)))
8881 if (GET_CODE (exp) != SET
8882 || GET_CODE (SET_DEST (exp)) != REG
8883 || GET_MODE (SET_DEST (exp)) != SImode
8884 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8886 unspec = SET_SRC (exp);
8887 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8889 if (GET_CODE (unspec) != UNSPEC
8890 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8891 || XVECLEN (unspec, 0) != 2
8892 || XVECEXP (unspec, 0, 0) != src_reg
8893 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8894 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8900 /* Return 1 for an PARALLEL suitable for mtcrf. */
8903 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8905 int count = XVECLEN (op, 0);
8909 /* Perform a quick check so we don't blow up below. */
8911 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8912 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8913 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8915 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8917 if (GET_CODE (src_reg) != REG
8918 || GET_MODE (src_reg) != SImode
8919 || ! INT_REGNO_P (REGNO (src_reg)))
8922 for (i = 0; i < count; i++)
8924 rtx exp = XVECEXP (op, 0, i);
8928 if (GET_CODE (exp) != SET
8929 || GET_CODE (SET_DEST (exp)) != REG
8930 || GET_MODE (SET_DEST (exp)) != CCmode
8931 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8933 unspec = SET_SRC (exp);
8934 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8936 if (GET_CODE (unspec) != UNSPEC
8937 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8938 || XVECLEN (unspec, 0) != 2
8939 || XVECEXP (unspec, 0, 0) != src_reg
8940 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8941 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8947 /* Return 1 for an PARALLEL suitable for lmw. */
8950 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8952 int count = XVECLEN (op, 0);
8953 unsigned int dest_regno;
8955 unsigned int base_regno;
8956 HOST_WIDE_INT offset;
8959 /* Perform a quick check so we don't blow up below. */
8961 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8962 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8963 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8966 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8967 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8970 || count != 32 - (int) dest_regno)
8973 if (legitimate_indirect_address_p (src_addr, 0))
8976 base_regno = REGNO (src_addr);
8977 if (base_regno == 0)
8980 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, 0))
8982 offset = INTVAL (XEXP (src_addr, 1));
8983 base_regno = REGNO (XEXP (src_addr, 0));
8988 for (i = 0; i < count; i++)
8990 rtx elt = XVECEXP (op, 0, i);
8993 HOST_WIDE_INT newoffset;
8995 if (GET_CODE (elt) != SET
8996 || GET_CODE (SET_DEST (elt)) != REG
8997 || GET_MODE (SET_DEST (elt)) != SImode
8998 || REGNO (SET_DEST (elt)) != dest_regno + i
8999 || GET_CODE (SET_SRC (elt)) != MEM
9000 || GET_MODE (SET_SRC (elt)) != SImode)
9002 newaddr = XEXP (SET_SRC (elt), 0);
9003 if (legitimate_indirect_address_p (newaddr, 0))
9008 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9010 addr_reg = XEXP (newaddr, 0);
9011 newoffset = INTVAL (XEXP (newaddr, 1));
9015 if (REGNO (addr_reg) != base_regno
9016 || newoffset != offset + 4 * i)
9023 /* Return 1 for an PARALLEL suitable for stmw. */
9026 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9028 int count = XVECLEN (op, 0);
9029 unsigned int src_regno;
9031 unsigned int base_regno;
9032 HOST_WIDE_INT offset;
9035 /* Perform a quick check so we don't blow up below. */
9037 || GET_CODE (XVECEXP (op, 0, 0)) != SET
9038 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
9039 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
9042 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
9043 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
9046 || count != 32 - (int) src_regno)
9049 if (legitimate_indirect_address_p (dest_addr, 0))
9052 base_regno = REGNO (dest_addr);
9053 if (base_regno == 0)
9056 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, 0))
9058 offset = INTVAL (XEXP (dest_addr, 1));
9059 base_regno = REGNO (XEXP (dest_addr, 0));
9064 for (i = 0; i < count; i++)
9066 rtx elt = XVECEXP (op, 0, i);
9069 HOST_WIDE_INT newoffset;
9071 if (GET_CODE (elt) != SET
9072 || GET_CODE (SET_SRC (elt)) != REG
9073 || GET_MODE (SET_SRC (elt)) != SImode
9074 || REGNO (SET_SRC (elt)) != src_regno + i
9075 || GET_CODE (SET_DEST (elt)) != MEM
9076 || GET_MODE (SET_DEST (elt)) != SImode)
9078 newaddr = XEXP (SET_DEST (elt), 0);
9079 if (legitimate_indirect_address_p (newaddr, 0))
9084 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
9086 addr_reg = XEXP (newaddr, 0);
9087 newoffset = INTVAL (XEXP (newaddr, 1));
9091 if (REGNO (addr_reg) != base_regno
9092 || newoffset != offset + 4 * i)
9099 /* A validation routine: say whether CODE, a condition code, and MODE
9100 match. The other alternatives either don't make sense or should
9101 never be generated. */
9104 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9106 if ((GET_RTX_CLASS (code) != RTX_COMPARE
9107 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
9108 || GET_MODE_CLASS (mode) != MODE_CC)
9111 /* These don't make sense. */
9112 if ((code == GT || code == LT || code == GE || code == LE)
9113 && mode == CCUNSmode)
9116 if ((code == GTU || code == LTU || code == GEU || code == LEU)
9117 && mode != CCUNSmode)
9120 if (mode != CCFPmode
9121 && (code == ORDERED || code == UNORDERED
9122 || code == UNEQ || code == LTGT
9123 || code == UNGT || code == UNLT
9124 || code == UNGE || code == UNLE))
9127 /* These should never be generated except for
9128 flag_finite_math_only. */
9129 if (mode == CCFPmode
9130 && ! flag_finite_math_only
9131 && (code == LE || code == GE
9132 || code == UNEQ || code == LTGT
9133 || code == UNGT || code == UNLT))
9136 /* These are invalid; the information is not there. */
9137 if (mode == CCEQmode
9138 && code != EQ && code != NE)
9142 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
9143 We only check the opcode against the mode of the CC value here. */
9146 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9148 enum rtx_code code = GET_CODE (op);
9149 enum machine_mode cc_mode;
9151 if (!COMPARISON_P (op))
9154 cc_mode = GET_MODE (XEXP (op, 0));
9155 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
9158 validate_condition_mode (code, cc_mode);
9163 /* Return 1 if OP is a comparison operation that is valid for a branch
9164 insn and which is true if the corresponding bit in the CC register
9168 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
9172 if (! branch_comparison_operator (op, mode))
9175 code = GET_CODE (op);
9176 return (code == EQ || code == LT || code == GT
9177 || code == LTU || code == GTU
9178 || code == UNORDERED);
9181 /* Return 1 if OP is a comparison operation that is valid for an scc
9182 insn: it must be a positive comparison. */
9185 scc_comparison_operator (rtx op, enum machine_mode mode)
9187 return branch_positive_comparison_operator (op, mode);
9191 trap_comparison_operator (rtx op, enum machine_mode mode)
9193 if (mode != VOIDmode && mode != GET_MODE (op))
9195 return COMPARISON_P (op);
9199 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9201 enum rtx_code code = GET_CODE (op);
9202 return (code == AND || code == IOR || code == XOR);
9206 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9208 enum rtx_code code = GET_CODE (op);
9209 return (code == IOR || code == XOR);
9213 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9215 enum rtx_code code = GET_CODE (op);
9216 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
9219 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9220 mask required to convert the result of a rotate insn into a shift
9221 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9224 includes_lshift_p (rtx shiftop, rtx andop)
9226 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9228 shift_mask <<= INTVAL (shiftop);
9230 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9233 /* Similar, but for right shift. */
9236 includes_rshift_p (rtx shiftop, rtx andop)
9238 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9240 shift_mask >>= INTVAL (shiftop);
9242 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9245 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9246 to perform a left shift. It must have exactly SHIFTOP least
9247 significant 0's, then one or more 1's, then zero or more 0's. */
9250 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9252 if (GET_CODE (andop) == CONST_INT)
9254 HOST_WIDE_INT c, lsb, shift_mask;
9257 if (c == 0 || c == ~0)
9261 shift_mask <<= INTVAL (shiftop);
9263 /* Find the least significant one bit. */
9266 /* It must coincide with the LSB of the shift mask. */
9267 if (-lsb != shift_mask)
9270 /* Invert to look for the next transition (if any). */
9273 /* Remove the low group of ones (originally low group of zeros). */
9276 /* Again find the lsb, and check we have all 1's above. */
9280 else if (GET_CODE (andop) == CONST_DOUBLE
9281 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9283 HOST_WIDE_INT low, high, lsb;
9284 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9286 low = CONST_DOUBLE_LOW (andop);
9287 if (HOST_BITS_PER_WIDE_INT < 64)
9288 high = CONST_DOUBLE_HIGH (andop);
9290 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9291 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9294 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9296 shift_mask_high = ~0;
9297 if (INTVAL (shiftop) > 32)
9298 shift_mask_high <<= INTVAL (shiftop) - 32;
9302 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9309 return high == -lsb;
9312 shift_mask_low = ~0;
9313 shift_mask_low <<= INTVAL (shiftop);
9317 if (-lsb != shift_mask_low)
9320 if (HOST_BITS_PER_WIDE_INT < 64)
9325 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9328 return high == -lsb;
9332 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9338 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9339 to perform a left shift. It must have SHIFTOP or more least
9340 significant 0's, with the remainder of the word 1's. */
9343 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9345 if (GET_CODE (andop) == CONST_INT)
9347 HOST_WIDE_INT c, lsb, shift_mask;
9350 shift_mask <<= INTVAL (shiftop);
9353 /* Find the least significant one bit. */
9356 /* It must be covered by the shift mask.
9357 This test also rejects c == 0. */
9358 if ((lsb & shift_mask) == 0)
9361 /* Check we have all 1's above the transition, and reject all 1's. */
9362 return c == -lsb && lsb != 1;
9364 else if (GET_CODE (andop) == CONST_DOUBLE
9365 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9367 HOST_WIDE_INT low, lsb, shift_mask_low;
9369 low = CONST_DOUBLE_LOW (andop);
9371 if (HOST_BITS_PER_WIDE_INT < 64)
9373 HOST_WIDE_INT high, shift_mask_high;
9375 high = CONST_DOUBLE_HIGH (andop);
9379 shift_mask_high = ~0;
9380 if (INTVAL (shiftop) > 32)
9381 shift_mask_high <<= INTVAL (shiftop) - 32;
9385 if ((lsb & shift_mask_high) == 0)
9388 return high == -lsb;
9394 shift_mask_low = ~0;
9395 shift_mask_low <<= INTVAL (shiftop);
9399 if ((lsb & shift_mask_low) == 0)
9402 return low == -lsb && lsb != 1;
9408 /* Return 1 if operands will generate a valid arguments to rlwimi
9409 instruction for insert with right shift in 64-bit mode. The mask may
9410 not start on the first bit or stop on the last bit because wrap-around
9411 effects of instruction do not correspond to semantics of RTL insn. */
9414 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9416 if (INTVAL (startop) < 64
9417 && INTVAL (startop) > 32
9418 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9419 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9420 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9421 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9422 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9428 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9429 for lfq and stfq insns iff the registers are hard registers. */
9432 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9434 /* We might have been passed a SUBREG. */
9435 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9438 /* We might have been passed non floating point registers. */
9439 if (!FP_REGNO_P (REGNO (reg1))
9440 || !FP_REGNO_P (REGNO (reg2)))
9443 return (REGNO (reg1) == REGNO (reg2) - 1);
9446 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9447 addr1 and addr2 must be in consecutive memory locations
9448 (addr2 == addr1 + 8). */
9451 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9457 /* The mems cannot be volatile. */
9458 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9461 addr1 = XEXP (mem1, 0);
9462 addr2 = XEXP (mem2, 0);
9464 /* Extract an offset (if used) from the first addr. */
9465 if (GET_CODE (addr1) == PLUS)
9467 /* If not a REG, return zero. */
9468 if (GET_CODE (XEXP (addr1, 0)) != REG)
9472 reg1 = REGNO (XEXP (addr1, 0));
9473 /* The offset must be constant! */
9474 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9476 offset1 = INTVAL (XEXP (addr1, 1));
9479 else if (GET_CODE (addr1) != REG)
9483 reg1 = REGNO (addr1);
9484 /* This was a simple (mem (reg)) expression. Offset is 0. */
9488 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9489 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9490 register as addr1. */
9491 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9493 if (GET_CODE (addr2) != PLUS)
9496 if (GET_CODE (XEXP (addr2, 0)) != REG
9497 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9500 if (reg1 != REGNO (XEXP (addr2, 0)))
9503 /* The offset for the second addr must be 8 more than the first addr. */
9504 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9507 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9512 /* Return the register class of a scratch register needed to copy IN into
9513 or out of a register in CLASS in MODE. If it can be done directly,
9514 NO_REGS is returned. */
9517 secondary_reload_class (enum reg_class class,
9518 enum machine_mode mode ATTRIBUTE_UNUSED,
9523 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9525 && MACHOPIC_INDIRECT
9529 /* We cannot copy a symbolic operand directly into anything
9530 other than BASE_REGS for TARGET_ELF. So indicate that a
9531 register from BASE_REGS is needed as an intermediate
9534 On Darwin, pic addresses require a load from memory, which
9535 needs a base register. */
9536 if (class != BASE_REGS
9537 && (GET_CODE (in) == SYMBOL_REF
9538 || GET_CODE (in) == HIGH
9539 || GET_CODE (in) == LABEL_REF
9540 || GET_CODE (in) == CONST))
9544 if (GET_CODE (in) == REG)
9547 if (regno >= FIRST_PSEUDO_REGISTER)
9549 regno = true_regnum (in);
9550 if (regno >= FIRST_PSEUDO_REGISTER)
9554 else if (GET_CODE (in) == SUBREG)
9556 regno = true_regnum (in);
9557 if (regno >= FIRST_PSEUDO_REGISTER)
9563 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9565 if (class == GENERAL_REGS || class == BASE_REGS
9566 || (regno >= 0 && INT_REGNO_P (regno)))
9569 /* Constants, memory, and FP registers can go into FP registers. */
9570 if ((regno == -1 || FP_REGNO_P (regno))
9571 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9574 /* Memory, and AltiVec registers can go into AltiVec registers. */
9575 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9576 && class == ALTIVEC_REGS)
9579 /* We can copy among the CR registers. */
9580 if ((class == CR_REGS || class == CR0_REGS)
9581 && regno >= 0 && CR_REGNO_P (regno))
9584 /* Otherwise, we need GENERAL_REGS. */
9585 return GENERAL_REGS;
9588 /* Given a comparison operation, return the bit number in CCR to test. We
9589 know this is a valid comparison.
9591 SCC_P is 1 if this is for an scc. That means that %D will have been
9592 used instead of %C, so the bits will be in different places.
9594 Return -1 if OP isn't a valid comparison for some reason. */
9597 ccr_bit (rtx op, int scc_p)
9599 enum rtx_code code = GET_CODE (op);
9600 enum machine_mode cc_mode;
9605 if (!COMPARISON_P (op))
9610 if (GET_CODE (reg) != REG
9611 || ! CR_REGNO_P (REGNO (reg)))
9614 cc_mode = GET_MODE (reg);
9615 cc_regnum = REGNO (reg);
9616 base_bit = 4 * (cc_regnum - CR0_REGNO);
9618 validate_condition_mode (code, cc_mode);
9620 /* When generating a sCOND operation, only positive conditions are
9622 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9623 && code != GTU && code != LTU)
9629 return scc_p ? base_bit + 3 : base_bit + 2;
9631 return base_bit + 2;
9632 case GT: case GTU: case UNLE:
9633 return base_bit + 1;
9634 case LT: case LTU: case UNGE:
9636 case ORDERED: case UNORDERED:
9637 return base_bit + 3;
9640 /* If scc, we will have done a cror to put the bit in the
9641 unordered position. So test that bit. For integer, this is ! LT
9642 unless this is an scc insn. */
9643 return scc_p ? base_bit + 3 : base_bit;
9646 return scc_p ? base_bit + 3 : base_bit + 1;
9653 /* Return the GOT register. */
9656 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9658 /* The second flow pass currently (June 1999) can't update
9659 regs_ever_live without disturbing other parts of the compiler, so
9660 update it here to make the prolog/epilogue code happy. */
9661 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9662 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9664 current_function_uses_pic_offset_table = 1;
9666 return pic_offset_table_rtx;
9669 /* Function to init struct machine_function.
9670 This will be called, via a pointer variable,
9671 from push_function_context. */
9673 static struct machine_function *
9674 rs6000_init_machine_status (void)
9676 return ggc_alloc_cleared (sizeof (machine_function));
9679 /* These macros test for integers and extract the low-order bits. */
9681 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9682 && GET_MODE (X) == VOIDmode)
9684 #define INT_LOWPART(X) \
9685 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9691 unsigned long val = INT_LOWPART (op);
9693 /* If the high bit is zero, the value is the first 1 bit we find
9695 if ((val & 0x80000000) == 0)
9697 if ((val & 0xffffffff) == 0)
9701 while (((val <<= 1) & 0x80000000) == 0)
9706 /* If the high bit is set and the low bit is not, or the mask is all
9707 1's, the value is zero. */
9708 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9711 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9714 while (((val >>= 1) & 1) != 0)
9724 unsigned long val = INT_LOWPART (op);
9726 /* If the low bit is zero, the value is the first 1 bit we find from
9730 if ((val & 0xffffffff) == 0)
9734 while (((val >>= 1) & 1) == 0)
9740 /* If the low bit is set and the high bit is not, or the mask is all
9741 1's, the value is 31. */
9742 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9745 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9748 while (((val <<= 1) & 0x80000000) != 0)
9754 /* Locate some local-dynamic symbol still in use by this function
9755 so that we can print its name in some tls_ld pattern. */
9758 rs6000_get_some_local_dynamic_name (void)
9762 if (cfun->machine->some_ld_name)
9763 return cfun->machine->some_ld_name;
9765 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9767 && for_each_rtx (&PATTERN (insn),
9768 rs6000_get_some_local_dynamic_name_1, 0))
9769 return cfun->machine->some_ld_name;
9774 /* Helper function for rs6000_get_some_local_dynamic_name. */
9777 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9781 if (GET_CODE (x) == SYMBOL_REF)
9783 const char *str = XSTR (x, 0);
9784 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9786 cfun->machine->some_ld_name = str;
9794 /* Print an operand. Recognize special options, documented below. */
9797 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9798 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9800 #define SMALL_DATA_RELOC "sda21"
9801 #define SMALL_DATA_REG 0
9805 print_operand (FILE *file, rtx x, int code)
9809 unsigned HOST_WIDE_INT uval;
9814 /* Write out an instruction after the call which may be replaced
9815 with glue code by the loader. This depends on the AIX version. */
9816 asm_fprintf (file, RS6000_CALL_GLUE);
9819 /* %a is output_address. */
9822 /* If X is a constant integer whose low-order 5 bits are zero,
9823 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9824 in the AIX assembler where "sri" with a zero shift count
9825 writes a trash instruction. */
9826 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9833 /* If constant, low-order 16 bits of constant, unsigned.
9834 Otherwise, write normally. */
9836 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9838 print_operand (file, x, 0);
9842 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9843 for 64-bit mask direction. */
9844 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9847 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9851 /* X is a CR register. Print the number of the GT bit of the CR. */
9852 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9853 output_operand_lossage ("invalid %%E value");
9855 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9859 /* Like 'J' but get to the EQ bit. */
9860 if (GET_CODE (x) != REG)
9863 /* Bit 1 is EQ bit. */
9864 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9866 /* If we want bit 31, write a shift count of zero, not 32. */
9867 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9871 /* X is a CR register. Print the number of the EQ bit of the CR */
9872 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9873 output_operand_lossage ("invalid %%E value");
9875 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9879 /* X is a CR register. Print the shift count needed to move it
9880 to the high-order four bits. */
9881 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9882 output_operand_lossage ("invalid %%f value");
9884 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9888 /* Similar, but print the count for the rotate in the opposite
9890 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9891 output_operand_lossage ("invalid %%F value");
9893 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9897 /* X is a constant integer. If it is negative, print "m",
9898 otherwise print "z". This is to make an aze or ame insn. */
9899 if (GET_CODE (x) != CONST_INT)
9900 output_operand_lossage ("invalid %%G value");
9901 else if (INTVAL (x) >= 0)
9908 /* If constant, output low-order five bits. Otherwise, write
9911 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9913 print_operand (file, x, 0);
9917 /* If constant, output low-order six bits. Otherwise, write
9920 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9922 print_operand (file, x, 0);
9926 /* Print `i' if this is a constant, else nothing. */
9932 /* Write the bit number in CCR for jump. */
9935 output_operand_lossage ("invalid %%j code");
9937 fprintf (file, "%d", i);
9941 /* Similar, but add one for shift count in rlinm for scc and pass
9942 scc flag to `ccr_bit'. */
9945 output_operand_lossage ("invalid %%J code");
9947 /* If we want bit 31, write a shift count of zero, not 32. */
9948 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9952 /* X must be a constant. Write the 1's complement of the
9955 output_operand_lossage ("invalid %%k value");
9957 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9961 /* X must be a symbolic constant on ELF. Write an
9962 expression suitable for an 'addi' that adds in the low 16
9964 if (GET_CODE (x) != CONST)
9966 print_operand_address (file, x);
9971 if (GET_CODE (XEXP (x, 0)) != PLUS
9972 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9973 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9974 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9975 output_operand_lossage ("invalid %%K value");
9976 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9978 /* For GNU as, there must be a non-alphanumeric character
9979 between 'l' and the number. The '-' is added by
9980 print_operand() already. */
9981 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9983 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9987 /* %l is output_asm_label. */
9990 /* Write second word of DImode or DFmode reference. Works on register
9991 or non-indexed memory only. */
9992 if (GET_CODE (x) == REG)
9993 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9994 else if (GET_CODE (x) == MEM)
9996 /* Handle possible auto-increment. Since it is pre-increment and
9997 we have already done it, we can just use an offset of word. */
9998 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9999 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10000 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10003 output_address (XEXP (adjust_address_nv (x, SImode,
10007 if (small_data_operand (x, GET_MODE (x)))
10008 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10009 reg_names[SMALL_DATA_REG]);
10014 /* MB value for a mask operand. */
10015 if (! mask_operand (x, SImode))
10016 output_operand_lossage ("invalid %%m value");
10018 fprintf (file, "%d", extract_MB (x));
10022 /* ME value for a mask operand. */
10023 if (! mask_operand (x, SImode))
10024 output_operand_lossage ("invalid %%M value");
10026 fprintf (file, "%d", extract_ME (x));
10029 /* %n outputs the negative of its operand. */
10032 /* Write the number of elements in the vector times 4. */
10033 if (GET_CODE (x) != PARALLEL)
10034 output_operand_lossage ("invalid %%N value");
10036 fprintf (file, "%d", XVECLEN (x, 0) * 4);
10040 /* Similar, but subtract 1 first. */
10041 if (GET_CODE (x) != PARALLEL)
10042 output_operand_lossage ("invalid %%O value");
10044 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10048 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10050 || INT_LOWPART (x) < 0
10051 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10052 output_operand_lossage ("invalid %%p value");
10054 fprintf (file, "%d", i);
10058 /* The operand must be an indirect memory reference. The result
10059 is the register name. */
10060 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10061 || REGNO (XEXP (x, 0)) >= 32)
10062 output_operand_lossage ("invalid %%P value");
10064 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
10068 /* This outputs the logical code corresponding to a boolean
10069 expression. The expression may have one or both operands
10070 negated (if one, only the first one). For condition register
10071 logical operations, it will also treat the negated
10072 CR codes as NOTs, but not handle NOTs of them. */
10074 const char *const *t = 0;
10076 enum rtx_code code = GET_CODE (x);
10077 static const char * const tbl[3][3] = {
10078 { "and", "andc", "nor" },
10079 { "or", "orc", "nand" },
10080 { "xor", "eqv", "xor" } };
10084 else if (code == IOR)
10086 else if (code == XOR)
10089 output_operand_lossage ("invalid %%q value");
10091 if (GET_CODE (XEXP (x, 0)) != NOT)
10095 if (GET_CODE (XEXP (x, 1)) == NOT)
10113 /* X is a CR register. Print the mask for `mtcrf'. */
10114 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10115 output_operand_lossage ("invalid %%R value");
10117 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10121 /* Low 5 bits of 32 - value */
10123 output_operand_lossage ("invalid %%s value");
10125 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10129 /* PowerPC64 mask position. All 0's is excluded.
10130 CONST_INT 32-bit mask is considered sign-extended so any
10131 transition must occur within the CONST_INT, not on the boundary. */
10132 if (! mask64_operand (x, DImode))
10133 output_operand_lossage ("invalid %%S value");
10135 uval = INT_LOWPART (x);
10137 if (uval & 1) /* Clear Left */
10139 #if HOST_BITS_PER_WIDE_INT > 64
10140 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10144 else /* Clear Right */
10147 #if HOST_BITS_PER_WIDE_INT > 64
10148 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10156 fprintf (file, "%d", i);
10160 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10161 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
10164 /* Bit 3 is OV bit. */
10165 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10167 /* If we want bit 31, write a shift count of zero, not 32. */
10168 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10172 /* Print the symbolic name of a branch target register. */
10173 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10174 && REGNO (x) != COUNT_REGISTER_REGNUM))
10175 output_operand_lossage ("invalid %%T value");
10176 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10177 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10179 fputs ("ctr", file);
10183 /* High-order 16 bits of constant for use in unsigned operand. */
10185 output_operand_lossage ("invalid %%u value");
10187 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10188 (INT_LOWPART (x) >> 16) & 0xffff);
10192 /* High-order 16 bits of constant for use in signed operand. */
10194 output_operand_lossage ("invalid %%v value");
10196 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10197 (INT_LOWPART (x) >> 16) & 0xffff);
10201 /* Print `u' if this has an auto-increment or auto-decrement. */
10202 if (GET_CODE (x) == MEM
10203 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10204 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10209 /* Print the trap code for this operand. */
10210 switch (GET_CODE (x))
10213 fputs ("eq", file); /* 4 */
10216 fputs ("ne", file); /* 24 */
10219 fputs ("lt", file); /* 16 */
10222 fputs ("le", file); /* 20 */
10225 fputs ("gt", file); /* 8 */
10228 fputs ("ge", file); /* 12 */
10231 fputs ("llt", file); /* 2 */
10234 fputs ("lle", file); /* 6 */
10237 fputs ("lgt", file); /* 1 */
10240 fputs ("lge", file); /* 5 */
10248 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10251 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10252 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10254 print_operand (file, x, 0);
10258 /* MB value for a PowerPC64 rldic operand. */
10259 val = (GET_CODE (x) == CONST_INT
10260 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10265 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10266 if ((val <<= 1) < 0)
10269 #if HOST_BITS_PER_WIDE_INT == 32
10270 if (GET_CODE (x) == CONST_INT && i >= 0)
10271 i += 32; /* zero-extend high-part was all 0's */
10272 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10274 val = CONST_DOUBLE_LOW (x);
10281 for ( ; i < 64; i++)
10282 if ((val <<= 1) < 0)
10287 fprintf (file, "%d", i + 1);
10291 if (GET_CODE (x) == MEM
10292 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10297 /* Like 'L', for third word of TImode */
10298 if (GET_CODE (x) == REG)
10299 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
10300 else if (GET_CODE (x) == MEM)
10302 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10303 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10304 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10306 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10307 if (small_data_operand (x, GET_MODE (x)))
10308 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10309 reg_names[SMALL_DATA_REG]);
10314 /* X is a SYMBOL_REF. Write out the name preceded by a
10315 period and without any trailing data in brackets. Used for function
10316 names. If we are configured for System V (or the embedded ABI) on
10317 the PowerPC, do not emit the period, since those systems do not use
10318 TOCs and the like. */
10319 if (GET_CODE (x) != SYMBOL_REF)
10322 /* Mark the decl as referenced so that cgraph will output the function. */
10323 if (SYMBOL_REF_DECL (x))
10324 mark_decl_referenced (SYMBOL_REF_DECL (x));
10326 if (XSTR (x, 0)[0] != '.')
10328 switch (DEFAULT_ABI)
10342 /* For macho, we need to check it see if we need a stub. */
10345 const char *name = XSTR (x, 0);
10347 if (MACHOPIC_INDIRECT
10348 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10349 name = machopic_indirection_name (x, /*stub_p=*/true);
10351 assemble_name (file, name);
10353 else if (TARGET_AIX)
10354 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
10356 assemble_name (file, XSTR (x, 0));
10360 /* Like 'L', for last word of TImode. */
10361 if (GET_CODE (x) == REG)
10362 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
10363 else if (GET_CODE (x) == MEM)
10365 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10366 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10367 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10369 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10370 if (small_data_operand (x, GET_MODE (x)))
10371 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10372 reg_names[SMALL_DATA_REG]);
10376 /* Print AltiVec or SPE memory operand. */
10381 if (GET_CODE (x) != MEM)
10388 /* Handle [reg]. */
10389 if (GET_CODE (tmp) == REG)
10391 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10394 /* Handle [reg+UIMM]. */
10395 else if (GET_CODE (tmp) == PLUS &&
10396 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10400 if (GET_CODE (XEXP (tmp, 0)) != REG)
10403 x = INTVAL (XEXP (tmp, 1));
10404 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10408 /* Fall through. Must be [reg+reg]. */
10410 if (GET_CODE (tmp) == REG)
10411 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10412 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
10414 if (REGNO (XEXP (tmp, 0)) == 0)
10415 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10416 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10418 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10419 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10427 if (GET_CODE (x) == REG)
10428 fprintf (file, "%s", reg_names[REGNO (x)]);
10429 else if (GET_CODE (x) == MEM)
10431 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10432 know the width from the mode. */
10433 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10434 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10435 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10436 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10437 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10438 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10440 output_address (XEXP (x, 0));
10443 output_addr_const (file, x);
10447 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10451 output_operand_lossage ("invalid %%xn code");
10455 /* Print the address of an operand. */
10458 print_operand_address (FILE *file, rtx x)
10460 if (GET_CODE (x) == REG)
10461 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10462 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10463 || GET_CODE (x) == LABEL_REF)
10465 output_addr_const (file, x);
10466 if (small_data_operand (x, GET_MODE (x)))
10467 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10468 reg_names[SMALL_DATA_REG]);
10469 else if (TARGET_TOC)
10472 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10474 if (REGNO (XEXP (x, 0)) == 0)
10475 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10476 reg_names[ REGNO (XEXP (x, 0)) ]);
10478 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10479 reg_names[ REGNO (XEXP (x, 1)) ]);
10481 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10482 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10483 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10485 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10486 && CONSTANT_P (XEXP (x, 1)))
10488 output_addr_const (file, XEXP (x, 1));
10489 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10493 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10494 && CONSTANT_P (XEXP (x, 1)))
10496 fprintf (file, "lo16(");
10497 output_addr_const (file, XEXP (x, 1));
10498 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10501 else if (legitimate_constant_pool_address_p (x))
10503 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10505 rtx contains_minus = XEXP (x, 1);
10509 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10510 turn it into (sym) for output_addr_const. */
10511 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10512 contains_minus = XEXP (contains_minus, 0);
10514 minus = XEXP (contains_minus, 0);
10515 symref = XEXP (minus, 0);
10516 XEXP (contains_minus, 0) = symref;
10521 name = XSTR (symref, 0);
10522 newname = alloca (strlen (name) + sizeof ("@toc"));
10523 strcpy (newname, name);
10524 strcat (newname, "@toc");
10525 XSTR (symref, 0) = newname;
10527 output_addr_const (file, XEXP (x, 1));
10529 XSTR (symref, 0) = name;
10530 XEXP (contains_minus, 0) = minus;
10533 output_addr_const (file, XEXP (x, 1));
10535 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10541 /* Target hook for assembling integer objects. The PowerPC version has
10542 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10543 is defined. It also needs to handle DI-mode objects on 64-bit
10547 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10549 #ifdef RELOCATABLE_NEEDS_FIXUP
10550 /* Special handling for SI values. */
10551 if (size == 4 && aligned_p)
10553 extern int in_toc_section (void);
10554 static int recurse = 0;
10556 /* For -mrelocatable, we mark all addresses that need to be fixed up
10557 in the .fixup section. */
10558 if (TARGET_RELOCATABLE
10559 && !in_toc_section ()
10560 && !in_text_section ()
10561 && !in_unlikely_text_section ()
10563 && GET_CODE (x) != CONST_INT
10564 && GET_CODE (x) != CONST_DOUBLE
10570 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10572 ASM_OUTPUT_LABEL (asm_out_file, buf);
10573 fprintf (asm_out_file, "\t.long\t(");
10574 output_addr_const (asm_out_file, x);
10575 fprintf (asm_out_file, ")@fixup\n");
10576 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10577 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10578 fprintf (asm_out_file, "\t.long\t");
10579 assemble_name (asm_out_file, buf);
10580 fprintf (asm_out_file, "\n\t.previous\n");
10584 /* Remove initial .'s to turn a -mcall-aixdesc function
10585 address into the address of the descriptor, not the function
10587 else if (GET_CODE (x) == SYMBOL_REF
10588 && XSTR (x, 0)[0] == '.'
10589 && DEFAULT_ABI == ABI_AIX)
10591 const char *name = XSTR (x, 0);
10592 while (*name == '.')
10595 fprintf (asm_out_file, "\t.long\t%s\n", name);
10599 #endif /* RELOCATABLE_NEEDS_FIXUP */
10600 return default_assemble_integer (x, size, aligned_p);
10603 #ifdef HAVE_GAS_HIDDEN
10604 /* Emit an assembler directive to set symbol visibility for DECL to
10605 VISIBILITY_TYPE. */
10608 rs6000_assemble_visibility (tree decl, int vis)
10610 /* Functions need to have their entry point symbol visibility set as
10611 well as their descriptor symbol visibility. */
10612 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
10614 static const char * const visibility_types[] = {
10615 NULL, "internal", "hidden", "protected"
10618 const char *name, *type;
10620 name = ((* targetm.strip_name_encoding)
10621 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10622 type = visibility_types[vis];
10624 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10625 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10628 default_assemble_visibility (decl, vis);
10633 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10635 /* Reversal of FP compares takes care -- an ordered compare
10636 becomes an unordered compare and vice versa. */
10637 if (mode == CCFPmode
10638 && (!flag_finite_math_only
10639 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10640 || code == UNEQ || code == LTGT))
10641 return reverse_condition_maybe_unordered (code);
10643 return reverse_condition (code);
10646 /* Generate a compare for CODE. Return a brand-new rtx that
10647 represents the result of the compare. */
10650 rs6000_generate_compare (enum rtx_code code)
10652 enum machine_mode comp_mode;
10653 rtx compare_result;
10655 if (rs6000_compare_fp_p)
10656 comp_mode = CCFPmode;
10657 else if (code == GTU || code == LTU
10658 || code == GEU || code == LEU)
10659 comp_mode = CCUNSmode;
10661 comp_mode = CCmode;
10663 /* First, the compare. */
10664 compare_result = gen_reg_rtx (comp_mode);
10666 /* SPE FP compare instructions on the GPRs. Yuck! */
10667 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10668 && rs6000_compare_fp_p)
10670 rtx cmp, or1, or2, or_result, compare_result2;
10672 /* Note: The E500 comparison instructions set the GT bit (x +
10673 1), on success. This explains the mess. */
10677 case EQ: case UNEQ: case NE: case LTGT:
10678 cmp = flag_finite_math_only
10679 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10680 rs6000_compare_op1)
10681 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10682 rs6000_compare_op1);
10684 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10685 cmp = flag_finite_math_only
10686 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10687 rs6000_compare_op1)
10688 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10689 rs6000_compare_op1);
10691 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10692 cmp = flag_finite_math_only
10693 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10694 rs6000_compare_op1)
10695 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10696 rs6000_compare_op1);
10702 /* Synthesize LE and GE from LT/GT || EQ. */
10703 if (code == LE || code == GE || code == LEU || code == GEU)
10709 case LE: code = LT; break;
10710 case GE: code = GT; break;
10711 case LEU: code = LT; break;
10712 case GEU: code = GT; break;
10716 or1 = gen_reg_rtx (SImode);
10717 or2 = gen_reg_rtx (SImode);
10718 or_result = gen_reg_rtx (CCEQmode);
10719 compare_result2 = gen_reg_rtx (CCFPmode);
10722 cmp = flag_finite_math_only
10723 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10724 rs6000_compare_op1)
10725 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10726 rs6000_compare_op1);
10729 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10730 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10732 /* OR them together. */
10733 cmp = gen_rtx_SET (VOIDmode, or_result,
10734 gen_rtx_COMPARE (CCEQmode,
10735 gen_rtx_IOR (SImode, or1, or2),
10737 compare_result = or_result;
10742 if (code == NE || code == LTGT)
10751 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10752 gen_rtx_COMPARE (comp_mode,
10753 rs6000_compare_op0,
10754 rs6000_compare_op1)));
10756 /* Some kinds of FP comparisons need an OR operation;
10757 under flag_finite_math_only we don't bother. */
10758 if (rs6000_compare_fp_p
10759 && ! flag_finite_math_only
10760 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10761 && (code == LE || code == GE
10762 || code == UNEQ || code == LTGT
10763 || code == UNGT || code == UNLT))
10765 enum rtx_code or1, or2;
10766 rtx or1_rtx, or2_rtx, compare2_rtx;
10767 rtx or_result = gen_reg_rtx (CCEQmode);
10771 case LE: or1 = LT; or2 = EQ; break;
10772 case GE: or1 = GT; or2 = EQ; break;
10773 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10774 case LTGT: or1 = LT; or2 = GT; break;
10775 case UNGT: or1 = UNORDERED; or2 = GT; break;
10776 case UNLT: or1 = UNORDERED; or2 = LT; break;
10779 validate_condition_mode (or1, comp_mode);
10780 validate_condition_mode (or2, comp_mode);
10781 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10782 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10783 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10784 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10786 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10788 compare_result = or_result;
10792 validate_condition_mode (code, GET_MODE (compare_result));
10794 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10798 /* Emit the RTL for an sCOND pattern. */
10801 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10804 enum machine_mode op_mode;
10805 enum rtx_code cond_code;
10807 condition_rtx = rs6000_generate_compare (code);
10808 cond_code = GET_CODE (condition_rtx);
10810 if (TARGET_E500 && rs6000_compare_fp_p
10811 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10815 PUT_MODE (condition_rtx, SImode);
10816 t = XEXP (condition_rtx, 0);
10818 if (cond_code != NE && cond_code != EQ)
10821 if (cond_code == NE)
10822 emit_insn (gen_e500_flip_eq_bit (t, t));
10824 emit_insn (gen_move_from_CR_eq_bit (result, t));
10828 if (cond_code == NE
10829 || cond_code == GE || cond_code == LE
10830 || cond_code == GEU || cond_code == LEU
10831 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10833 rtx not_result = gen_reg_rtx (CCEQmode);
10834 rtx not_op, rev_cond_rtx;
10835 enum machine_mode cc_mode;
10837 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10839 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10840 SImode, XEXP (condition_rtx, 0), const0_rtx);
10841 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10842 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10843 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10846 op_mode = GET_MODE (rs6000_compare_op0);
10847 if (op_mode == VOIDmode)
10848 op_mode = GET_MODE (rs6000_compare_op1);
10850 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10852 PUT_MODE (condition_rtx, DImode);
10853 convert_move (result, condition_rtx, 0);
10857 PUT_MODE (condition_rtx, SImode);
10858 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10862 /* Emit a branch of kind CODE to location LOC. */
10865 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10867 rtx condition_rtx, loc_ref;
10869 condition_rtx = rs6000_generate_compare (code);
10870 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10871 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10872 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10873 loc_ref, pc_rtx)));
10876 /* Return the string to output a conditional branch to LABEL, which is
10877 the operand number of the label, or -1 if the branch is really a
10878 conditional return.
10880 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10881 condition code register and its mode specifies what kind of
10882 comparison we made.
10884 REVERSED is nonzero if we should reverse the sense of the comparison.
10886 INSN is the insn. */
10889 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10891 static char string[64];
10892 enum rtx_code code = GET_CODE (op);
10893 rtx cc_reg = XEXP (op, 0);
10894 enum machine_mode mode = GET_MODE (cc_reg);
10895 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10896 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10897 int really_reversed = reversed ^ need_longbranch;
10903 validate_condition_mode (code, mode);
10905 /* Work out which way this really branches. We could use
10906 reverse_condition_maybe_unordered here always but this
10907 makes the resulting assembler clearer. */
10908 if (really_reversed)
10910 /* Reversal of FP compares takes care -- an ordered compare
10911 becomes an unordered compare and vice versa. */
10912 if (mode == CCFPmode)
10913 code = reverse_condition_maybe_unordered (code);
10915 code = reverse_condition (code);
10918 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10920 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10923 /* Opposite of GT. */
10925 else if (code == NE)
10933 /* Not all of these are actually distinct opcodes, but
10934 we distinguish them for clarity of the resulting assembler. */
10935 case NE: case LTGT:
10936 ccode = "ne"; break;
10937 case EQ: case UNEQ:
10938 ccode = "eq"; break;
10940 ccode = "ge"; break;
10941 case GT: case GTU: case UNGT:
10942 ccode = "gt"; break;
10944 ccode = "le"; break;
10945 case LT: case LTU: case UNLT:
10946 ccode = "lt"; break;
10947 case UNORDERED: ccode = "un"; break;
10948 case ORDERED: ccode = "nu"; break;
10949 case UNGE: ccode = "nl"; break;
10950 case UNLE: ccode = "ng"; break;
10955 /* Maybe we have a guess as to how likely the branch is.
10956 The old mnemonics don't have a way to specify this information. */
10958 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10959 if (note != NULL_RTX)
10961 /* PROB is the difference from 50%. */
10962 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10964 /* Only hint for highly probable/improbable branches on newer
10965 cpus as static prediction overrides processor dynamic
10966 prediction. For older cpus we may as well always hint, but
10967 assume not taken for branches that are very close to 50% as a
10968 mispredicted taken branch is more expensive than a
10969 mispredicted not-taken branch. */
10970 if (rs6000_always_hint
10971 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10973 if (abs (prob) > REG_BR_PROB_BASE / 20
10974 && ((prob > 0) ^ need_longbranch))
10982 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10984 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10986 /* We need to escape any '%' characters in the reg_names string.
10987 Assume they'd only be the first character.... */
10988 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10990 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10994 /* If the branch distance was too far, we may have to use an
10995 unconditional branch to go the distance. */
10996 if (need_longbranch)
10997 s += sprintf (s, ",$+8\n\tb %s", label);
10999 s += sprintf (s, ",%s", label);
11005 /* Return the string to flip the EQ bit on a CR. */
11007 output_e500_flip_eq_bit (rtx dst, rtx src)
11009 static char string[64];
11012 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
11013 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
11017 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
11018 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
11020 sprintf (string, "crnot %d,%d", a, b);
11024 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11025 operands of the last comparison is nonzero/true, FALSE_COND if it
11026 is zero/false. Return 0 if the hardware has no such operation. */
11029 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11031 enum rtx_code code = GET_CODE (op);
11032 rtx op0 = rs6000_compare_op0;
11033 rtx op1 = rs6000_compare_op1;
11034 REAL_VALUE_TYPE c1;
11035 enum machine_mode compare_mode = GET_MODE (op0);
11036 enum machine_mode result_mode = GET_MODE (dest);
11039 /* These modes should always match. */
11040 if (GET_MODE (op1) != compare_mode
11041 /* In the isel case however, we can use a compare immediate, so
11042 op1 may be a small constant. */
11043 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11045 if (GET_MODE (true_cond) != result_mode)
11047 if (GET_MODE (false_cond) != result_mode)
11050 /* First, work out if the hardware can do this at all, or
11051 if it's too slow.... */
11052 if (! rs6000_compare_fp_p)
11055 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11058 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11059 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11062 /* Eliminate half of the comparisons by switching operands, this
11063 makes the remaining code simpler. */
11064 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11065 || code == LTGT || code == LT || code == UNLE)
11067 code = reverse_condition_maybe_unordered (code);
11069 true_cond = false_cond;
11073 /* UNEQ and LTGT take four instructions for a comparison with zero,
11074 it'll probably be faster to use a branch here too. */
11075 if (code == UNEQ && HONOR_NANS (compare_mode))
11078 if (GET_CODE (op1) == CONST_DOUBLE)
11079 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11081 /* We're going to try to implement comparisons by performing
11082 a subtract, then comparing against zero. Unfortunately,
11083 Inf - Inf is NaN which is not zero, and so if we don't
11084 know that the operand is finite and the comparison
11085 would treat EQ different to UNORDERED, we can't do it. */
11086 if (HONOR_INFINITIES (compare_mode)
11087 && code != GT && code != UNGE
11088 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11089 /* Constructs of the form (a OP b ? a : b) are safe. */
11090 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11091 || (! rtx_equal_p (op0, true_cond)
11092 && ! rtx_equal_p (op1, true_cond))))
11094 /* At this point we know we can use fsel. */
11096 /* Reduce the comparison to a comparison against zero. */
11097 temp = gen_reg_rtx (compare_mode);
11098 emit_insn (gen_rtx_SET (VOIDmode, temp,
11099 gen_rtx_MINUS (compare_mode, op0, op1)));
11101 op1 = CONST0_RTX (compare_mode);
11103 /* If we don't care about NaNs we can reduce some of the comparisons
11104 down to faster ones. */
11105 if (! HONOR_NANS (compare_mode))
11111 true_cond = false_cond;
11124 /* Now, reduce everything down to a GE. */
11131 temp = gen_reg_rtx (compare_mode);
11132 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11137 temp = gen_reg_rtx (compare_mode);
11138 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
11143 temp = gen_reg_rtx (compare_mode);
11144 emit_insn (gen_rtx_SET (VOIDmode, temp,
11145 gen_rtx_NEG (compare_mode,
11146 gen_rtx_ABS (compare_mode, op0))));
11151 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11152 temp = gen_reg_rtx (result_mode);
11153 emit_insn (gen_rtx_SET (VOIDmode, temp,
11154 gen_rtx_IF_THEN_ELSE (result_mode,
11155 gen_rtx_GE (VOIDmode,
11157 true_cond, false_cond)));
11158 false_cond = true_cond;
11161 temp = gen_reg_rtx (compare_mode);
11162 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11167 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11168 temp = gen_reg_rtx (result_mode);
11169 emit_insn (gen_rtx_SET (VOIDmode, temp,
11170 gen_rtx_IF_THEN_ELSE (result_mode,
11171 gen_rtx_GE (VOIDmode,
11173 true_cond, false_cond)));
11174 true_cond = false_cond;
11177 temp = gen_reg_rtx (compare_mode);
11178 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11186 emit_insn (gen_rtx_SET (VOIDmode, dest,
11187 gen_rtx_IF_THEN_ELSE (result_mode,
11188 gen_rtx_GE (VOIDmode,
11190 true_cond, false_cond)));
11194 /* Same as above, but for ints (isel). */
11197 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11199 rtx condition_rtx, cr;
11201 /* All isel implementations thus far are 32-bits. */
11202 if (GET_MODE (rs6000_compare_op0) != SImode)
11205 /* We still have to do the compare, because isel doesn't do a
11206 compare, it just looks at the CRx bits set by a previous compare
11208 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11209 cr = XEXP (condition_rtx, 0);
11211 if (GET_MODE (cr) == CCmode)
11212 emit_insn (gen_isel_signed (dest, condition_rtx,
11213 true_cond, false_cond, cr));
11215 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11216 true_cond, false_cond, cr));
11222 output_isel (rtx *operands)
11224 enum rtx_code code;
11226 code = GET_CODE (operands[1]);
11227 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11229 PUT_CODE (operands[1], reverse_condition (code));
11230 return "isel %0,%3,%2,%j1";
11233 return "isel %0,%2,%3,%j1";
11237 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11239 enum machine_mode mode = GET_MODE (op0);
11243 if (code == SMAX || code == SMIN)
11248 if (code == SMAX || code == UMAX)
11249 target = emit_conditional_move (dest, c, op0, op1, mode,
11250 op0, op1, mode, 0);
11252 target = emit_conditional_move (dest, c, op0, op1, mode,
11253 op1, op0, mode, 0);
11254 if (target == NULL_RTX)
11256 if (target != dest)
11257 emit_move_insn (dest, target);
11260 /* Emit instructions to move SRC to DST. Called by splitters for
11261 multi-register moves. It will emit at most one instruction for
11262 each register that is accessed; that is, it won't emit li/lis pairs
11263 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11267 rs6000_split_multireg_move (rtx dst, rtx src)
11269 /* The register number of the first register being moved. */
11271 /* The mode that is to be moved. */
11272 enum machine_mode mode;
11273 /* The mode that the move is being done in, and its size. */
11274 enum machine_mode reg_mode;
11276 /* The number of registers that will be moved. */
11279 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11280 mode = GET_MODE (dst);
11281 nregs = HARD_REGNO_NREGS (reg, mode);
11282 if (FP_REGNO_P (reg))
11284 else if (ALTIVEC_REGNO_P (reg))
11285 reg_mode = V16QImode;
11287 reg_mode = word_mode;
11288 reg_mode_size = GET_MODE_SIZE (reg_mode);
11290 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
11293 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11295 /* Move register range backwards, if we might have destructive
11298 for (i = nregs - 1; i >= 0; i--)
11299 emit_insn (gen_rtx_SET (VOIDmode,
11300 simplify_gen_subreg (reg_mode, dst, mode,
11301 i * reg_mode_size),
11302 simplify_gen_subreg (reg_mode, src, mode,
11303 i * reg_mode_size)));
11309 bool used_update = false;
11311 if (MEM_P (src) && INT_REGNO_P (reg))
11315 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11316 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11319 breg = XEXP (XEXP (src, 0), 0);
11320 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
11321 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11322 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
11323 emit_insn (TARGET_32BIT
11324 ? gen_addsi3 (breg, breg, delta_rtx)
11325 : gen_adddi3 (breg, breg, delta_rtx));
11326 src = gen_rtx_MEM (mode, breg);
11328 else if (! offsettable_memref_p (src))
11330 rtx newsrc, basereg;
11331 basereg = gen_rtx_REG (Pmode, reg);
11332 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11333 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11334 MEM_COPY_ATTRIBUTES (newsrc, src);
11338 /* We have now address involving an base register only.
11339 If we use one of the registers to address memory,
11340 we have change that register last. */
11342 breg = (GET_CODE (XEXP (src, 0)) == PLUS
11343 ? XEXP (XEXP (src, 0), 0)
11349 if (REGNO (breg) >= REGNO (dst)
11350 && REGNO (breg) < REGNO (dst) + nregs)
11351 j = REGNO (breg) - REGNO (dst);
11354 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11358 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11359 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11362 breg = XEXP (XEXP (dst, 0), 0);
11363 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
11364 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11365 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
11367 /* We have to update the breg before doing the store.
11368 Use store with update, if available. */
11372 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11373 emit_insn (TARGET_32BIT
11374 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
11375 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
11376 used_update = true;
11379 emit_insn (TARGET_32BIT
11380 ? gen_addsi3 (breg, breg, delta_rtx)
11381 : gen_adddi3 (breg, breg, delta_rtx));
11382 dst = gen_rtx_MEM (mode, breg);
11384 else if (! offsettable_memref_p (dst))
11388 for (i = 0; i < nregs; i++)
11390 /* Calculate index to next subword. */
11395 /* If compiler already emited move of first word by
11396 store with update, no need to do anything. */
11397 if (j == 0 && used_update)
11400 emit_insn (gen_rtx_SET (VOIDmode,
11401 simplify_gen_subreg (reg_mode, dst, mode,
11402 j * reg_mode_size),
11403 simplify_gen_subreg (reg_mode, src, mode,
11404 j * reg_mode_size)));
11410 /* This page contains routines that are used to determine what the
11411 function prologue and epilogue code will do and write them out. */
11413 /* Return the first fixed-point register that is required to be
11414 saved. 32 if none. */
11417 first_reg_to_save (void)
11421 /* Find lowest numbered live register. */
11422 for (first_reg = 13; first_reg <= 31; first_reg++)
11423 if (regs_ever_live[first_reg]
11424 && (! call_used_regs[first_reg]
11425 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11426 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11427 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11428 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11433 && current_function_uses_pic_offset_table
11434 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11435 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11441 /* Similar, for FP regs. */
11444 first_fp_reg_to_save (void)
11448 /* Find lowest numbered live register. */
11449 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11450 if (regs_ever_live[first_reg])
11456 /* Similar, for AltiVec regs. */
11459 first_altivec_reg_to_save (void)
11463 /* Stack frame remains as is unless we are in AltiVec ABI. */
11464 if (! TARGET_ALTIVEC_ABI)
11465 return LAST_ALTIVEC_REGNO + 1;
11467 /* Find lowest numbered live register. */
11468 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11469 if (regs_ever_live[i])
11475 /* Return a 32-bit mask of the AltiVec registers we need to set in
11476 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11477 the 32-bit word is 0. */
11479 static unsigned int
11480 compute_vrsave_mask (void)
11482 unsigned int i, mask = 0;
11484 /* First, find out if we use _any_ altivec registers. */
11485 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11486 if (regs_ever_live[i])
11487 mask |= ALTIVEC_REG_BIT (i);
11492 /* Next, remove the argument registers from the set. These must
11493 be in the VRSAVE mask set by the caller, so we don't need to add
11494 them in again. More importantly, the mask we compute here is
11495 used to generate CLOBBERs in the set_vrsave insn, and we do not
11496 wish the argument registers to die. */
11497 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11498 mask &= ~ALTIVEC_REG_BIT (i);
11500 /* Similarly, remove the return value from the set. */
11503 diddle_return_value (is_altivec_return_reg, &yes);
11505 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11511 /* For a very restricted set of circumstances, we can cut down the
11512 size of prologs/epilogs by calling our own save/restore-the-world
11516 compute_save_world_info(rs6000_stack_t *info_ptr)
11518 info_ptr->world_save_p =
11519 (DEFAULT_ABI == ABI_DARWIN)
11520 && ! (current_function_calls_setjmp && flag_exceptions)
11521 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
11522 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
11523 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
11524 && info_ptr->cr_save_p;
11526 /* This will not work in conjunction with sibcalls. Make sure there
11527 are none. (This check is expensive, but seldom executed.) */
11528 if ( info_ptr->world_save_p )
11531 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
11532 if ( GET_CODE (insn) == CALL_INSN
11533 && SIBLING_CALL_P (insn))
11535 info_ptr->world_save_p = 0;
11540 if (info_ptr->world_save_p)
11542 /* Even if we're not touching VRsave, make sure there's room on the
11543 stack for it, if it looks like we're calling SAVE_WORLD, which
11544 will attempt to save it. */
11545 info_ptr->vrsave_size = 4;
11547 /* "Save" the VRsave register too if we're saving the world. */
11548 if (info_ptr->vrsave_mask == 0)
11549 info_ptr->vrsave_mask = compute_vrsave_mask ();
11551 /* Because the Darwin register save/restore routines only handle
11552 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistancy
11553 check and abort if there's something worng. */
11554 if (info_ptr->first_fp_reg_save < FIRST_SAVED_FP_REGNO
11555 || info_ptr->first_altivec_reg_save < FIRST_SAVED_ALTIVEC_REGNO)
11563 is_altivec_return_reg (rtx reg, void *xyes)
11565 bool *yes = (bool *) xyes;
11566 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11571 /* Calculate the stack information for the current function. This is
11572 complicated by having two separate calling sequences, the AIX calling
11573 sequence and the V.4 calling sequence.
11575 AIX (and Darwin/Mac OS X) stack frames look like:
11577 SP----> +---------------------------------------+
11578 | back chain to caller | 0 0
11579 +---------------------------------------+
11580 | saved CR | 4 8 (8-11)
11581 +---------------------------------------+
11583 +---------------------------------------+
11584 | reserved for compilers | 12 24
11585 +---------------------------------------+
11586 | reserved for binders | 16 32
11587 +---------------------------------------+
11588 | saved TOC pointer | 20 40
11589 +---------------------------------------+
11590 | Parameter save area (P) | 24 48
11591 +---------------------------------------+
11592 | Alloca space (A) | 24+P etc.
11593 +---------------------------------------+
11594 | Local variable space (L) | 24+P+A
11595 +---------------------------------------+
11596 | Float/int conversion temporary (X) | 24+P+A+L
11597 +---------------------------------------+
11598 | Save area for AltiVec registers (W) | 24+P+A+L+X
11599 +---------------------------------------+
11600 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11601 +---------------------------------------+
11602 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11603 +---------------------------------------+
11604 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11605 +---------------------------------------+
11606 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11607 +---------------------------------------+
11608 old SP->| back chain to caller's caller |
11609 +---------------------------------------+
11611 The required alignment for AIX configurations is two words (i.e., 8
11615 V.4 stack frames look like:
11617 SP----> +---------------------------------------+
11618 | back chain to caller | 0
11619 +---------------------------------------+
11620 | caller's saved LR | 4
11621 +---------------------------------------+
11622 | Parameter save area (P) | 8
11623 +---------------------------------------+
11624 | Alloca space (A) | 8+P
11625 +---------------------------------------+
11626 | Varargs save area (V) | 8+P+A
11627 +---------------------------------------+
11628 | Local variable space (L) | 8+P+A+V
11629 +---------------------------------------+
11630 | Float/int conversion temporary (X) | 8+P+A+V+L
11631 +---------------------------------------+
11632 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11633 +---------------------------------------+
11634 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11635 +---------------------------------------+
11636 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11637 +---------------------------------------+
11638 | SPE: area for 64-bit GP registers |
11639 +---------------------------------------+
11640 | SPE alignment padding |
11641 +---------------------------------------+
11642 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11643 +---------------------------------------+
11644 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11645 +---------------------------------------+
11646 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11647 +---------------------------------------+
11648 old SP->| back chain to caller's caller |
11649 +---------------------------------------+
11651 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11652 given. (But note below and in sysv4.h that we require only 8 and
11653 may round up the size of our stack frame anyways. The historical
11654 reason is early versions of powerpc-linux which didn't properly
11655 align the stack at program startup. A happy side-effect is that
11656 -mno-eabi libraries can be used with -meabi programs.)
11658 The EABI configuration defaults to the V.4 layout. However,
11659 the stack alignment requirements may differ. If -mno-eabi is not
11660 given, the required stack alignment is 8 bytes; if -mno-eabi is
11661 given, the required alignment is 16 bytes. (But see V.4 comment
11664 #ifndef ABI_STACK_BOUNDARY
11665 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11668 static rs6000_stack_t *
11669 rs6000_stack_info (void)
11671 static rs6000_stack_t info, zero_info;
11672 rs6000_stack_t *info_ptr = &info;
11673 int reg_size = TARGET_32BIT ? 4 : 8;
11675 HOST_WIDE_INT non_fixed_size;
11677 /* Zero all fields portably. */
11682 /* Cache value so we don't rescan instruction chain over and over. */
11683 if (cfun->machine->insn_chain_scanned_p == 0)
11685 cfun->machine->insn_chain_scanned_p = 1;
11686 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
11690 /* Select which calling sequence. */
11691 info_ptr->abi = DEFAULT_ABI;
11693 /* Calculate which registers need to be saved & save area size. */
11694 info_ptr->first_gp_reg_save = first_reg_to_save ();
11695 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11696 even if it currently looks like we won't. */
11697 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11698 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11699 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11700 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11701 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11703 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11705 /* For the SPE, we have an additional upper 32-bits on each GPR.
11706 Ideally we should save the entire 64-bits only when the upper
11707 half is used in SIMD instructions. Since we only record
11708 registers live (not the size they are used in), this proves
11709 difficult because we'd have to traverse the instruction chain at
11710 the right time, taking reload into account. This is a real pain,
11711 so we opt to save the GPRs in 64-bits always if but one register
11712 gets used in 64-bits. Otherwise, all the registers in the frame
11713 get saved in 32-bits.
11715 So... since when we save all GPRs (except the SP) in 64-bits, the
11716 traditional GP save area will be empty. */
11717 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11718 info_ptr->gp_size = 0;
11720 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11721 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11723 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11724 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11725 - info_ptr->first_altivec_reg_save);
11727 /* Does this function call anything? */
11728 info_ptr->calls_p = (! current_function_is_leaf
11729 || cfun->machine->ra_needs_full_frame);
11731 /* Determine if we need to save the link register. */
11732 if (rs6000_ra_ever_killed ()
11733 || (DEFAULT_ABI == ABI_AIX
11734 && current_function_profile
11735 && !TARGET_PROFILE_KERNEL)
11736 #ifdef TARGET_RELOCATABLE
11737 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11739 || (info_ptr->first_fp_reg_save != 64
11740 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11741 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11742 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11743 || (DEFAULT_ABI == ABI_DARWIN
11745 && current_function_uses_pic_offset_table)
11746 || info_ptr->calls_p)
11748 info_ptr->lr_save_p = 1;
11749 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11752 /* Determine if we need to save the condition code registers. */
11753 if (regs_ever_live[CR2_REGNO]
11754 || regs_ever_live[CR3_REGNO]
11755 || regs_ever_live[CR4_REGNO])
11757 info_ptr->cr_save_p = 1;
11758 if (DEFAULT_ABI == ABI_V4)
11759 info_ptr->cr_size = reg_size;
11762 /* If the current function calls __builtin_eh_return, then we need
11763 to allocate stack space for registers that will hold data for
11764 the exception handler. */
11765 if (current_function_calls_eh_return)
11768 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11771 /* SPE saves EH registers in 64-bits. */
11772 ehrd_size = i * (TARGET_SPE_ABI
11773 && info_ptr->spe_64bit_regs_used != 0
11774 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11779 /* Determine various sizes. */
11780 info_ptr->reg_size = reg_size;
11781 info_ptr->fixed_size = RS6000_SAVE_AREA;
11782 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11783 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11784 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11785 TARGET_ALTIVEC ? 16 : 8);
11787 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11788 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11790 info_ptr->spe_gp_size = 0;
11792 if (TARGET_ALTIVEC_ABI)
11793 info_ptr->vrsave_mask = compute_vrsave_mask ();
11795 info_ptr->vrsave_mask = 0;
11797 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11798 info_ptr->vrsave_size = 4;
11800 info_ptr->vrsave_size = 0;
11802 compute_save_world_info (info_ptr);
11804 /* Calculate the offsets. */
11805 switch (DEFAULT_ABI)
11813 info_ptr->fp_save_offset = - info_ptr->fp_size;
11814 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11816 if (TARGET_ALTIVEC_ABI)
11818 info_ptr->vrsave_save_offset
11819 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11821 /* Align stack so vector save area is on a quadword boundary. */
11822 if (info_ptr->altivec_size != 0)
11823 info_ptr->altivec_padding_size
11824 = 16 - (-info_ptr->vrsave_save_offset % 16);
11826 info_ptr->altivec_padding_size = 0;
11828 info_ptr->altivec_save_offset
11829 = info_ptr->vrsave_save_offset
11830 - info_ptr->altivec_padding_size
11831 - info_ptr->altivec_size;
11833 /* Adjust for AltiVec case. */
11834 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11837 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11838 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11839 info_ptr->lr_save_offset = 2*reg_size;
11843 info_ptr->fp_save_offset = - info_ptr->fp_size;
11844 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11845 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11847 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11849 /* Align stack so SPE GPR save area is aligned on a
11850 double-word boundary. */
11851 if (info_ptr->spe_gp_size != 0)
11852 info_ptr->spe_padding_size
11853 = 8 - (-info_ptr->cr_save_offset % 8);
11855 info_ptr->spe_padding_size = 0;
11857 info_ptr->spe_gp_save_offset
11858 = info_ptr->cr_save_offset
11859 - info_ptr->spe_padding_size
11860 - info_ptr->spe_gp_size;
11862 /* Adjust for SPE case. */
11863 info_ptr->toc_save_offset
11864 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11866 else if (TARGET_ALTIVEC_ABI)
11868 info_ptr->vrsave_save_offset
11869 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11871 /* Align stack so vector save area is on a quadword boundary. */
11872 if (info_ptr->altivec_size != 0)
11873 info_ptr->altivec_padding_size
11874 = 16 - (-info_ptr->vrsave_save_offset % 16);
11876 info_ptr->altivec_padding_size = 0;
11878 info_ptr->altivec_save_offset
11879 = info_ptr->vrsave_save_offset
11880 - info_ptr->altivec_padding_size
11881 - info_ptr->altivec_size;
11883 /* Adjust for AltiVec case. */
11884 info_ptr->toc_save_offset
11885 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11888 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11889 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11890 info_ptr->lr_save_offset = reg_size;
11894 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11895 + info_ptr->gp_size
11896 + info_ptr->altivec_size
11897 + info_ptr->altivec_padding_size
11898 + info_ptr->spe_gp_size
11899 + info_ptr->spe_padding_size
11901 + info_ptr->cr_size
11902 + info_ptr->lr_size
11903 + info_ptr->vrsave_size
11904 + info_ptr->toc_size,
11905 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11908 non_fixed_size = (info_ptr->vars_size
11909 + info_ptr->parm_size
11910 + info_ptr->save_size
11911 + info_ptr->varargs_size);
11913 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11914 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11916 /* Determine if we need to allocate any stack frame:
11918 For AIX we need to push the stack if a frame pointer is needed
11919 (because the stack might be dynamically adjusted), if we are
11920 debugging, if we make calls, or if the sum of fp_save, gp_save,
11921 and local variables are more than the space needed to save all
11922 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11923 + 18*8 = 288 (GPR13 reserved).
11925 For V.4 we don't have the stack cushion that AIX uses, but assume
11926 that the debugger can handle stackless frames. */
11928 if (info_ptr->calls_p)
11929 info_ptr->push_p = 1;
11931 else if (DEFAULT_ABI == ABI_V4)
11932 info_ptr->push_p = non_fixed_size != 0;
11934 else if (frame_pointer_needed)
11935 info_ptr->push_p = 1;
11937 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11938 info_ptr->push_p = 1;
11941 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11943 /* Zero offsets if we're not saving those registers. */
11944 if (info_ptr->fp_size == 0)
11945 info_ptr->fp_save_offset = 0;
11947 if (info_ptr->gp_size == 0)
11948 info_ptr->gp_save_offset = 0;
11950 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11951 info_ptr->altivec_save_offset = 0;
11953 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11954 info_ptr->vrsave_save_offset = 0;
11956 if (! TARGET_SPE_ABI
11957 || info_ptr->spe_64bit_regs_used == 0
11958 || info_ptr->spe_gp_size == 0)
11959 info_ptr->spe_gp_save_offset = 0;
11961 if (! info_ptr->lr_save_p)
11962 info_ptr->lr_save_offset = 0;
11964 if (! info_ptr->cr_save_p)
11965 info_ptr->cr_save_offset = 0;
11967 if (! info_ptr->toc_save_p)
11968 info_ptr->toc_save_offset = 0;
11973 /* Return true if the current function uses any GPRs in 64-bit SIMD
11977 spe_func_has_64bit_regs_p (void)
11981 /* Functions that save and restore all the call-saved registers will
11982 need to save/restore the registers in 64-bits. */
11983 if (current_function_calls_eh_return
11984 || current_function_calls_setjmp
11985 || current_function_has_nonlocal_goto)
11988 insns = get_insns ();
11990 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11996 i = PATTERN (insn);
11997 if (GET_CODE (i) == SET
11998 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
12007 debug_stack_info (rs6000_stack_t *info)
12009 const char *abi_string;
12012 info = rs6000_stack_info ();
12014 fprintf (stderr, "\nStack information for function %s:\n",
12015 ((current_function_decl && DECL_NAME (current_function_decl))
12016 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12021 default: abi_string = "Unknown"; break;
12022 case ABI_NONE: abi_string = "NONE"; break;
12023 case ABI_AIX: abi_string = "AIX"; break;
12024 case ABI_DARWIN: abi_string = "Darwin"; break;
12025 case ABI_V4: abi_string = "V.4"; break;
12028 fprintf (stderr, "\tABI = %5s\n", abi_string);
12030 if (TARGET_ALTIVEC_ABI)
12031 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12033 if (TARGET_SPE_ABI)
12034 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12036 if (info->first_gp_reg_save != 32)
12037 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12039 if (info->first_fp_reg_save != 64)
12040 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
12042 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12043 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12044 info->first_altivec_reg_save);
12046 if (info->lr_save_p)
12047 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
12049 if (info->cr_save_p)
12050 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12052 if (info->toc_save_p)
12053 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
12055 if (info->vrsave_mask)
12056 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
12059 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
12062 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
12064 if (info->gp_save_offset)
12065 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
12067 if (info->fp_save_offset)
12068 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
12070 if (info->altivec_save_offset)
12071 fprintf (stderr, "\taltivec_save_offset = %5d\n",
12072 info->altivec_save_offset);
12074 if (info->spe_gp_save_offset)
12075 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
12076 info->spe_gp_save_offset);
12078 if (info->vrsave_save_offset)
12079 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
12080 info->vrsave_save_offset);
12082 if (info->lr_save_offset)
12083 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
12085 if (info->cr_save_offset)
12086 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
12088 if (info->toc_save_offset)
12089 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
12091 if (info->varargs_save_offset)
12092 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
12094 if (info->total_size)
12095 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12098 if (info->varargs_size)
12099 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
12101 if (info->vars_size)
12102 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
12105 if (info->parm_size)
12106 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
12108 if (info->fixed_size)
12109 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
12112 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
12114 if (info->spe_gp_size)
12115 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
12118 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
12120 if (info->altivec_size)
12121 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
12123 if (info->vrsave_size)
12124 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
12126 if (info->altivec_padding_size)
12127 fprintf (stderr, "\taltivec_padding_size= %5d\n",
12128 info->altivec_padding_size);
12130 if (info->spe_padding_size)
12131 fprintf (stderr, "\tspe_padding_size = %5d\n",
12132 info->spe_padding_size);
12135 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
12138 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
12140 if (info->toc_size)
12141 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
12143 if (info->save_size)
12144 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
12146 if (info->reg_size != 4)
12147 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
12149 fprintf (stderr, "\n");
12153 rs6000_return_addr (int count, rtx frame)
12155 /* Currently we don't optimize very well between prolog and body
12156 code and for PIC code the code can be actually quite bad, so
12157 don't try to be too clever here. */
12158 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
12160 cfun->machine->ra_needs_full_frame = 1;
12167 plus_constant (copy_to_reg
12168 (gen_rtx_MEM (Pmode,
12169 memory_address (Pmode, frame))),
12170 RETURN_ADDRESS_OFFSET)));
12173 cfun->machine->ra_need_lr = 1;
12174 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
12177 /* Say whether a function is a candidate for sibcall handling or not.
12178 We do not allow indirect calls to be optimized into sibling calls.
12179 Also, we can't do it if there are any vector parameters; there's
12180 nowhere to put the VRsave code so it works; note that functions with
12181 vector parameters are required to have a prototype, so the argument
12182 type info must be available here. (The tail recursion case can work
12183 with vector parameters, but there's no way to distinguish here.) */
12185 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
12190 if (TARGET_ALTIVEC_VRSAVE)
12192 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
12193 type; type = TREE_CHAIN (type))
12195 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
12199 if (DEFAULT_ABI == ABI_DARWIN
12200 || (*targetm.binds_local_p) (decl))
12202 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12204 if (!lookup_attribute ("longcall", attr_list)
12205 || lookup_attribute ("shortcall", attr_list))
12213 rs6000_ra_ever_killed (void)
12219 if (current_function_is_thunk)
12222 /* regs_ever_live has LR marked as used if any sibcalls are present,
12223 but this should not force saving and restoring in the
12224 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12225 clobbers LR, so that is inappropriate. */
12227 /* Also, the prologue can generate a store into LR that
12228 doesn't really count, like this:
12231 bcl to set PIC register
12235 When we're called from the epilogue, we need to avoid counting
12236 this as a store. */
12238 push_topmost_sequence ();
12239 top = get_insns ();
12240 pop_topmost_sequence ();
12241 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12243 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12247 if (FIND_REG_INC_NOTE (insn, reg))
12249 else if (GET_CODE (insn) == CALL_INSN
12250 && !SIBLING_CALL_P (insn))
12252 else if (set_of (reg, insn) != NULL_RTX
12253 && !prologue_epilogue_contains (insn))
12260 /* Add a REG_MAYBE_DEAD note to the insn. */
12262 rs6000_maybe_dead (rtx insn)
12264 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12269 /* Emit instructions needed to load the TOC register.
12270 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12271 a constant pool; or for SVR4 -fpic. */
12274 rs6000_emit_load_toc_table (int fromprolog)
12277 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12279 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12281 rtx temp = (fromprolog
12282 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12283 : gen_reg_rtx (Pmode));
12284 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
12286 rs6000_maybe_dead (insn);
12287 insn = emit_move_insn (dest, temp);
12289 rs6000_maybe_dead (insn);
12291 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12294 rtx tempLR = (fromprolog
12295 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12296 : gen_reg_rtx (Pmode));
12297 rtx temp0 = (fromprolog
12298 ? gen_rtx_REG (Pmode, 0)
12299 : gen_reg_rtx (Pmode));
12302 /* possibly create the toc section */
12303 if (! toc_initialized)
12306 function_section (current_function_decl);
12313 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12314 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12316 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12317 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12319 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12321 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12322 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12329 static int reload_toc_labelno = 0;
12331 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12333 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
12334 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12336 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
12337 emit_move_insn (dest, tempLR);
12338 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12340 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12342 rs6000_maybe_dead (insn);
12344 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12346 /* This is for AIX code running in non-PIC ELF32. */
12349 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12350 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12352 insn = emit_insn (gen_elf_high (dest, realsym));
12354 rs6000_maybe_dead (insn);
12355 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12357 rs6000_maybe_dead (insn);
12359 else if (DEFAULT_ABI == ABI_AIX)
12362 insn = emit_insn (gen_load_toc_aix_si (dest));
12364 insn = emit_insn (gen_load_toc_aix_di (dest));
12366 rs6000_maybe_dead (insn);
12372 /* Emit instructions to restore the link register after determining where
12373 its value has been stored. */
12376 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12378 rs6000_stack_t *info = rs6000_stack_info ();
12381 operands[0] = source;
12382 operands[1] = scratch;
12384 if (info->lr_save_p)
12386 rtx frame_rtx = stack_pointer_rtx;
12387 HOST_WIDE_INT sp_offset = 0;
12390 if (frame_pointer_needed
12391 || current_function_calls_alloca
12392 || info->total_size > 32767)
12394 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12395 frame_rtx = operands[1];
12397 else if (info->push_p)
12398 sp_offset = info->total_size;
12400 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12401 tmp = gen_rtx_MEM (Pmode, tmp);
12402 emit_move_insn (tmp, operands[0]);
12405 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12408 static GTY(()) int set = -1;
12411 get_TOC_alias_set (void)
12414 set = new_alias_set ();
12418 /* This returns nonzero if the current function uses the TOC. This is
12419 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12420 is generated by the ABI_V4 load_toc_* patterns. */
12427 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12430 rtx pat = PATTERN (insn);
12433 if (GET_CODE (pat) == PARALLEL)
12434 for (i = 0; i < XVECLEN (pat, 0); i++)
12436 rtx sub = XVECEXP (pat, 0, i);
12437 if (GET_CODE (sub) == USE)
12439 sub = XEXP (sub, 0);
12440 if (GET_CODE (sub) == UNSPEC
12441 && XINT (sub, 1) == UNSPEC_TOC)
12451 create_TOC_reference (rtx symbol)
12453 return gen_rtx_PLUS (Pmode,
12454 gen_rtx_REG (Pmode, TOC_REGISTER),
12455 gen_rtx_CONST (Pmode,
12456 gen_rtx_MINUS (Pmode, symbol,
12457 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12460 /* If _Unwind_* has been called from within the same module,
12461 toc register is not guaranteed to be saved to 40(1) on function
12462 entry. Save it there in that case. */
12465 rs6000_aix_emit_builtin_unwind_init (void)
12468 rtx stack_top = gen_reg_rtx (Pmode);
12469 rtx opcode_addr = gen_reg_rtx (Pmode);
12470 rtx opcode = gen_reg_rtx (SImode);
12471 rtx tocompare = gen_reg_rtx (SImode);
12472 rtx no_toc_save_needed = gen_label_rtx ();
12474 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12475 emit_move_insn (stack_top, mem);
12477 mem = gen_rtx_MEM (Pmode,
12478 gen_rtx_PLUS (Pmode, stack_top,
12479 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12480 emit_move_insn (opcode_addr, mem);
12481 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12482 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12483 : 0xE8410028, SImode));
12485 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12486 SImode, NULL_RTX, NULL_RTX,
12487 no_toc_save_needed);
12489 mem = gen_rtx_MEM (Pmode,
12490 gen_rtx_PLUS (Pmode, stack_top,
12491 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12492 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12493 emit_label (no_toc_save_needed);
12496 /* This ties together stack memory (MEM with an alias set of
12497 rs6000_sr_alias_set) and the change to the stack pointer. */
12500 rs6000_emit_stack_tie (void)
12502 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12504 set_mem_alias_set (mem, rs6000_sr_alias_set);
12505 emit_insn (gen_stack_tie (mem));
12508 /* Emit the correct code for allocating stack space, as insns.
12509 If COPY_R12, make sure a copy of the old frame is left in r12.
12510 The generated code may use hard register 0 as a temporary. */
12513 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12516 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12517 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12518 rtx todec = GEN_INT (-size);
12520 if (current_function_limit_stack)
12522 if (REG_P (stack_limit_rtx)
12523 && REGNO (stack_limit_rtx) > 1
12524 && REGNO (stack_limit_rtx) <= 31)
12526 emit_insn (TARGET_32BIT
12527 ? gen_addsi3 (tmp_reg,
12530 : gen_adddi3 (tmp_reg,
12534 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12537 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12539 && DEFAULT_ABI == ABI_V4)
12541 rtx toload = gen_rtx_CONST (VOIDmode,
12542 gen_rtx_PLUS (Pmode,
12546 emit_insn (gen_elf_high (tmp_reg, toload));
12547 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12548 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12552 warning ("stack limit expression is not supported");
12555 if (copy_r12 || ! TARGET_UPDATE)
12556 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12562 /* Need a note here so that try_split doesn't get confused. */
12563 if (get_last_insn() == NULL_RTX)
12564 emit_note (NOTE_INSN_DELETED);
12565 insn = emit_move_insn (tmp_reg, todec);
12566 try_split (PATTERN (insn), insn, 0);
12570 insn = emit_insn (TARGET_32BIT
12571 ? gen_movsi_update (stack_reg, stack_reg,
12573 : gen_movdi_update (stack_reg, stack_reg,
12574 todec, stack_reg));
12578 insn = emit_insn (TARGET_32BIT
12579 ? gen_addsi3 (stack_reg, stack_reg, todec)
12580 : gen_adddi3 (stack_reg, stack_reg, todec));
12581 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12582 gen_rtx_REG (Pmode, 12));
12585 RTX_FRAME_RELATED_P (insn) = 1;
12587 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12588 gen_rtx_SET (VOIDmode, stack_reg,
12589 gen_rtx_PLUS (Pmode, stack_reg,
12594 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12595 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12596 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12597 deduce these equivalences by itself so it wasn't necessary to hold
12598 its hand so much. */
12601 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12602 rtx reg2, rtx rreg)
12606 /* copy_rtx will not make unique copies of registers, so we need to
12607 ensure we don't have unwanted sharing here. */
12609 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12612 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12614 real = copy_rtx (PATTERN (insn));
12616 if (reg2 != NULL_RTX)
12617 real = replace_rtx (real, reg2, rreg);
12619 real = replace_rtx (real, reg,
12620 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12621 STACK_POINTER_REGNUM),
12624 /* We expect that 'real' is either a SET or a PARALLEL containing
12625 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12626 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12628 if (GET_CODE (real) == SET)
12632 temp = simplify_rtx (SET_SRC (set));
12634 SET_SRC (set) = temp;
12635 temp = simplify_rtx (SET_DEST (set));
12637 SET_DEST (set) = temp;
12638 if (GET_CODE (SET_DEST (set)) == MEM)
12640 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12642 XEXP (SET_DEST (set), 0) = temp;
12645 else if (GET_CODE (real) == PARALLEL)
12648 for (i = 0; i < XVECLEN (real, 0); i++)
12649 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
12651 rtx set = XVECEXP (real, 0, i);
12653 temp = simplify_rtx (SET_SRC (set));
12655 SET_SRC (set) = temp;
12656 temp = simplify_rtx (SET_DEST (set));
12658 SET_DEST (set) = temp;
12659 if (GET_CODE (SET_DEST (set)) == MEM)
12661 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12663 XEXP (SET_DEST (set), 0) = temp;
12665 RTX_FRAME_RELATED_P (set) = 1;
12672 real = spe_synthesize_frame_save (real);
12674 RTX_FRAME_RELATED_P (insn) = 1;
12675 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12680 /* Given an SPE frame note, return a PARALLEL of SETs with the
12681 original note, plus a synthetic register save. */
12684 spe_synthesize_frame_save (rtx real)
12686 rtx synth, offset, reg, real2;
12688 if (GET_CODE (real) != SET
12689 || GET_MODE (SET_SRC (real)) != V2SImode)
12692 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12693 frame related note. The parallel contains a set of the register
12694 being saved, and another set to a synthetic register (n+1200).
12695 This is so we can differentiate between 64-bit and 32-bit saves.
12696 Words cannot describe this nastiness. */
12698 if (GET_CODE (SET_DEST (real)) != MEM
12699 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
12700 || GET_CODE (SET_SRC (real)) != REG)
12704 (set (mem (plus (reg x) (const y)))
12707 (set (mem (plus (reg x) (const y+4)))
12711 real2 = copy_rtx (real);
12712 PUT_MODE (SET_DEST (real2), SImode);
12713 reg = SET_SRC (real2);
12714 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12715 synth = copy_rtx (real2);
12717 if (BYTES_BIG_ENDIAN)
12719 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12720 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12723 reg = SET_SRC (synth);
12725 synth = replace_rtx (synth, reg,
12726 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12728 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12729 synth = replace_rtx (synth, offset,
12730 GEN_INT (INTVAL (offset)
12731 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12733 RTX_FRAME_RELATED_P (synth) = 1;
12734 RTX_FRAME_RELATED_P (real2) = 1;
12735 if (BYTES_BIG_ENDIAN)
12736 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12738 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12743 /* Returns an insn that has a vrsave set operation with the
12744 appropriate CLOBBERs. */
12747 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12750 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12751 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12754 = gen_rtx_SET (VOIDmode,
12756 gen_rtx_UNSPEC_VOLATILE (SImode,
12757 gen_rtvec (2, reg, vrsave),
12762 /* We need to clobber the registers in the mask so the scheduler
12763 does not move sets to VRSAVE before sets of AltiVec registers.
12765 However, if the function receives nonlocal gotos, reload will set
12766 all call saved registers live. We will end up with:
12768 (set (reg 999) (mem))
12769 (parallel [ (set (reg vrsave) (unspec blah))
12770 (clobber (reg 999))])
12772 The clobber will cause the store into reg 999 to be dead, and
12773 flow will attempt to delete an epilogue insn. In this case, we
12774 need an unspec use/set of the register. */
12776 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12777 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12779 if (!epiloguep || call_used_regs [i])
12780 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12781 gen_rtx_REG (V4SImode, i));
12784 rtx reg = gen_rtx_REG (V4SImode, i);
12787 = gen_rtx_SET (VOIDmode,
12789 gen_rtx_UNSPEC (V4SImode,
12790 gen_rtvec (1, reg), 27));
12794 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12796 for (i = 0; i < nclobs; ++i)
12797 XVECEXP (insn, 0, i) = clobs[i];
12802 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12803 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12806 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12807 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12809 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12810 rtx replacea, replaceb;
12812 int_rtx = GEN_INT (offset);
12814 /* Some cases that need register indexed addressing. */
12815 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12817 && SPE_VECTOR_MODE (mode)
12818 && !SPE_CONST_OFFSET_OK (offset)))
12820 /* Whomever calls us must make sure r11 is available in the
12821 flow path of instructions in the prologue. */
12822 offset_rtx = gen_rtx_REG (Pmode, 11);
12823 emit_move_insn (offset_rtx, int_rtx);
12825 replacea = offset_rtx;
12826 replaceb = int_rtx;
12830 offset_rtx = int_rtx;
12831 replacea = NULL_RTX;
12832 replaceb = NULL_RTX;
12835 reg = gen_rtx_REG (mode, regno);
12836 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12837 mem = gen_rtx_MEM (mode, addr);
12838 set_mem_alias_set (mem, rs6000_sr_alias_set);
12840 insn = emit_move_insn (mem, reg);
12842 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12845 /* Emit an offset memory reference suitable for a frame store, while
12846 converting to a valid addressing mode. */
12849 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12851 rtx int_rtx, offset_rtx;
12853 int_rtx = GEN_INT (offset);
12855 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12857 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12858 emit_move_insn (offset_rtx, int_rtx);
12861 offset_rtx = int_rtx;
12863 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12866 #ifndef TARGET_FIX_AND_CONTINUE
12867 #define TARGET_FIX_AND_CONTINUE 0
12870 /* Emit function prologue as insns. */
12873 rs6000_emit_prologue (void)
12875 rs6000_stack_t *info = rs6000_stack_info ();
12876 enum machine_mode reg_mode = Pmode;
12877 int reg_size = TARGET_32BIT ? 4 : 8;
12878 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12879 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12880 rtx frame_reg_rtx = sp_reg_rtx;
12881 rtx cr_save_rtx = NULL_RTX;
12883 int saving_FPRs_inline;
12884 int using_store_multiple;
12885 HOST_WIDE_INT sp_offset = 0;
12887 if (TARGET_FIX_AND_CONTINUE)
12889 /* gdb on darwin arranges to forward a function from the old
12890 address by modifying the first 4 instructions of the function
12891 to branch to the overriding function. This is necessary to
12892 permit function pointers that point to the old function to
12893 actually forward to the new function. */
12894 emit_insn (gen_nop ());
12895 emit_insn (gen_nop ());
12896 emit_insn (gen_nop ());
12897 emit_insn (gen_nop ());
12900 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12902 reg_mode = V2SImode;
12906 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12907 && (!TARGET_SPE_ABI
12908 || info->spe_64bit_regs_used == 0)
12909 && info->first_gp_reg_save < 31);
12910 saving_FPRs_inline = (info->first_fp_reg_save == 64
12911 || FP_SAVE_INLINE (info->first_fp_reg_save)
12912 || current_function_calls_eh_return
12913 || cfun->machine->ra_need_lr);
12915 /* For V.4, update stack before we do any saving and set back pointer. */
12917 && (DEFAULT_ABI == ABI_V4
12918 || current_function_calls_eh_return))
12920 if (info->total_size < 32767)
12921 sp_offset = info->total_size;
12923 frame_reg_rtx = frame_ptr_rtx;
12924 rs6000_emit_allocate_stack (info->total_size,
12925 (frame_reg_rtx != sp_reg_rtx
12926 && (info->cr_save_p
12928 || info->first_fp_reg_save < 64
12929 || info->first_gp_reg_save < 32
12931 if (frame_reg_rtx != sp_reg_rtx)
12932 rs6000_emit_stack_tie ();
12935 /* Handle world saves specially here. */
12936 if (info->world_save_p)
12942 /* save_world expects lr in r0. */
12943 if (info->lr_save_p)
12945 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
12946 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12947 RTX_FRAME_RELATED_P (insn) = 1;
12950 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
12951 assumptions about the offsets of various bits of the stack
12952 frame. Abort if things aren't what they should be. */
12953 if (info->gp_save_offset != -220
12954 || info->fp_save_offset != -144
12955 || info->lr_save_offset != 8
12956 || info->cr_save_offset != 4
12958 || !info->lr_save_p
12959 || (current_function_calls_eh_return && info->ehrd_offset != -432)
12960 || (info->vrsave_save_offset != -224
12961 || info->altivec_save_offset != (-224 -16 -192)))
12964 treg = gen_rtx_REG (SImode, 11);
12965 emit_move_insn (treg, GEN_INT (-info->total_size));
12967 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
12968 in R11. It also clobbers R12, so beware! */
12970 /* Preserve CR2 for save_world prologues */
12972 sz += 32 - info->first_gp_reg_save;
12973 sz += 64 - info->first_fp_reg_save;
12974 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
12975 p = rtvec_alloc (sz);
12977 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
12978 gen_rtx_REG (Pmode,
12979 LINK_REGISTER_REGNUM));
12980 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
12981 gen_rtx_SYMBOL_REF (Pmode,
12983 /* We do floats first so that the instruction pattern matches
12985 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12987 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12988 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12989 GEN_INT (info->fp_save_offset
12990 + sp_offset + 8 * i));
12991 rtx mem = gen_rtx_MEM (DFmode, addr);
12992 set_mem_alias_set (mem, rs6000_sr_alias_set);
12994 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
12996 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
12998 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
12999 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13000 GEN_INT (info->altivec_save_offset
13001 + sp_offset + 16 * i));
13002 rtx mem = gen_rtx_MEM (V4SImode, addr);
13003 set_mem_alias_set (mem, rs6000_sr_alias_set);
13005 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13007 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13009 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13010 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13011 GEN_INT (info->gp_save_offset
13012 + sp_offset + reg_size * i));
13013 rtx mem = gen_rtx_MEM (reg_mode, addr);
13014 set_mem_alias_set (mem, rs6000_sr_alias_set);
13016 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13020 /* CR register traditionally saved as CR2. */
13021 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13022 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13023 GEN_INT (info->cr_save_offset
13025 rtx mem = gen_rtx_MEM (reg_mode, addr);
13026 set_mem_alias_set (mem, rs6000_sr_alias_set);
13028 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
13030 /* Prevent any attempt to delete the setting of r0 and treg! */
13031 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
13032 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
13033 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
13035 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13036 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13037 NULL_RTX, NULL_RTX);
13039 if (current_function_calls_eh_return)
13044 unsigned int regno = EH_RETURN_DATA_REGNO (i);
13045 if (regno == INVALID_REGNUM)
13047 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13048 info->ehrd_offset + sp_offset
13049 + reg_size * (int) i,
13055 /* Save AltiVec registers if needed. */
13056 if (! info->world_save_p && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13060 /* There should be a non inline version of this, for when we
13061 are saving lots of vector registers. */
13062 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13063 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13065 rtx areg, savereg, mem;
13068 offset = info->altivec_save_offset + sp_offset
13069 + 16 * (i - info->first_altivec_reg_save);
13071 savereg = gen_rtx_REG (V4SImode, i);
13073 areg = gen_rtx_REG (Pmode, 0);
13074 emit_move_insn (areg, GEN_INT (offset));
13076 /* AltiVec addressing mode is [reg+reg]. */
13077 mem = gen_rtx_MEM (V4SImode,
13078 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
13080 set_mem_alias_set (mem, rs6000_sr_alias_set);
13082 insn = emit_move_insn (mem, savereg);
13084 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13085 areg, GEN_INT (offset));
13089 /* VRSAVE is a bit vector representing which AltiVec registers
13090 are used. The OS uses this to determine which vector
13091 registers to save on a context switch. We need to save
13092 VRSAVE on the stack frame, add whatever AltiVec registers we
13093 used in this function, and do the corresponding magic in the
13096 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13097 && ! info->world_save_p && info->vrsave_mask != 0)
13099 rtx reg, mem, vrsave;
13102 /* Get VRSAVE onto a GPR. */
13103 reg = gen_rtx_REG (SImode, 12);
13104 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
13106 emit_insn (gen_get_vrsave_internal (reg));
13108 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
13111 offset = info->vrsave_save_offset + sp_offset;
13113 = gen_rtx_MEM (SImode,
13114 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
13115 set_mem_alias_set (mem, rs6000_sr_alias_set);
13116 insn = emit_move_insn (mem, reg);
13118 /* Include the registers in the mask. */
13119 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
13121 insn = emit_insn (generate_set_vrsave (reg, info, 0));
13124 /* If we use the link register, get it into r0. */
13125 if (! info->world_save_p && info->lr_save_p)
13127 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13128 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13129 RTX_FRAME_RELATED_P (insn) = 1;
13132 /* If we need to save CR, put it into r12. */
13133 if (! info->world_save_p && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
13137 cr_save_rtx = gen_rtx_REG (SImode, 12);
13138 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13139 RTX_FRAME_RELATED_P (insn) = 1;
13140 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13141 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13142 But that's OK. All we have to do is specify that _one_ condition
13143 code register is saved in this stack slot. The thrower's epilogue
13144 will then restore all the call-saved registers.
13145 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13146 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
13147 gen_rtx_REG (SImode, CR2_REGNO));
13148 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13153 /* Do any required saving of fpr's. If only one or two to save, do
13154 it ourselves. Otherwise, call function. */
13155 if (! info->world_save_p && saving_FPRs_inline)
13158 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13159 if ((regs_ever_live[info->first_fp_reg_save+i]
13160 && ! call_used_regs[info->first_fp_reg_save+i]))
13161 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
13162 info->first_fp_reg_save + i,
13163 info->fp_save_offset + sp_offset + 8 * i,
13166 else if (! info->world_save_p && info->first_fp_reg_save != 64)
13170 const char *alloc_rname;
13172 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
13174 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
13175 gen_rtx_REG (Pmode,
13176 LINK_REGISTER_REGNUM));
13177 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
13178 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
13179 alloc_rname = ggc_strdup (rname);
13180 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13181 gen_rtx_SYMBOL_REF (Pmode,
13183 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13185 rtx addr, reg, mem;
13186 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13187 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13188 GEN_INT (info->fp_save_offset
13189 + sp_offset + 8*i));
13190 mem = gen_rtx_MEM (DFmode, addr);
13191 set_mem_alias_set (mem, rs6000_sr_alias_set);
13193 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
13195 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13196 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13197 NULL_RTX, NULL_RTX);
13200 /* Save GPRs. This is done as a PARALLEL if we are using
13201 the store-multiple instructions. */
13202 if (! info->world_save_p && using_store_multiple)
13206 p = rtvec_alloc (32 - info->first_gp_reg_save);
13207 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13209 rtx addr, reg, mem;
13210 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13211 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13212 GEN_INT (info->gp_save_offset
13215 mem = gen_rtx_MEM (reg_mode, addr);
13216 set_mem_alias_set (mem, rs6000_sr_alias_set);
13218 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
13220 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13221 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13222 NULL_RTX, NULL_RTX);
13224 else if (! info->world_save_p)
13227 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13228 if ((regs_ever_live[info->first_gp_reg_save+i]
13229 && (! call_used_regs[info->first_gp_reg_save+i]
13230 || (i+info->first_gp_reg_save
13231 == RS6000_PIC_OFFSET_TABLE_REGNUM
13232 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13233 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13234 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13235 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13237 rtx addr, reg, mem;
13238 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13240 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13242 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13245 if (!SPE_CONST_OFFSET_OK (offset))
13247 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13248 emit_move_insn (b, GEN_INT (offset));
13251 b = GEN_INT (offset);
13253 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13254 mem = gen_rtx_MEM (V2SImode, addr);
13255 set_mem_alias_set (mem, rs6000_sr_alias_set);
13256 insn = emit_move_insn (mem, reg);
13258 if (GET_CODE (b) == CONST_INT)
13259 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13260 NULL_RTX, NULL_RTX);
13262 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13263 b, GEN_INT (offset));
13267 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13268 GEN_INT (info->gp_save_offset
13271 mem = gen_rtx_MEM (reg_mode, addr);
13272 set_mem_alias_set (mem, rs6000_sr_alias_set);
13274 insn = emit_move_insn (mem, reg);
13275 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13276 NULL_RTX, NULL_RTX);
13281 /* ??? There's no need to emit actual instructions here, but it's the
13282 easiest way to get the frame unwind information emitted. */
13283 if (! info->world_save_p && current_function_calls_eh_return)
13285 unsigned int i, regno;
13287 /* In AIX ABI we need to pretend we save r2 here. */
13290 rtx addr, reg, mem;
13292 reg = gen_rtx_REG (reg_mode, 2);
13293 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13294 GEN_INT (sp_offset + 5 * reg_size));
13295 mem = gen_rtx_MEM (reg_mode, addr);
13296 set_mem_alias_set (mem, rs6000_sr_alias_set);
13298 insn = emit_move_insn (mem, reg);
13299 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13300 NULL_RTX, NULL_RTX);
13301 PATTERN (insn) = gen_blockage ();
13306 regno = EH_RETURN_DATA_REGNO (i);
13307 if (regno == INVALID_REGNUM)
13310 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
13311 info->ehrd_offset + sp_offset
13312 + reg_size * (int) i,
13317 /* Save lr if we used it. */
13318 if (! info->world_save_p && info->lr_save_p)
13320 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13321 GEN_INT (info->lr_save_offset + sp_offset));
13322 rtx reg = gen_rtx_REG (Pmode, 0);
13323 rtx mem = gen_rtx_MEM (Pmode, addr);
13324 /* This should not be of rs6000_sr_alias_set, because of
13325 __builtin_return_address. */
13327 insn = emit_move_insn (mem, reg);
13328 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13329 NULL_RTX, NULL_RTX);
13332 /* Save CR if we use any that must be preserved. */
13333 if (! info->world_save_p && info->cr_save_p)
13335 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13336 GEN_INT (info->cr_save_offset + sp_offset));
13337 rtx mem = gen_rtx_MEM (SImode, addr);
13338 /* See the large comment above about why CR2_REGNO is used. */
13339 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13341 set_mem_alias_set (mem, rs6000_sr_alias_set);
13343 /* If r12 was used to hold the original sp, copy cr into r0 now
13345 if (REGNO (frame_reg_rtx) == 12)
13349 cr_save_rtx = gen_rtx_REG (SImode, 0);
13350 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13351 RTX_FRAME_RELATED_P (insn) = 1;
13352 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13353 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13358 insn = emit_move_insn (mem, cr_save_rtx);
13360 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13361 NULL_RTX, NULL_RTX);
13364 /* Update stack and set back pointer unless this is V.4,
13365 for which it was done previously. */
13366 if (! info->world_save_p && info->push_p
13367 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13368 rs6000_emit_allocate_stack (info->total_size, FALSE);
13370 /* Set frame pointer, if needed. */
13371 if (frame_pointer_needed)
13373 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13375 RTX_FRAME_RELATED_P (insn) = 1;
13378 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13379 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13380 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
13381 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13383 /* If emit_load_toc_table will use the link register, we need to save
13384 it. We use R12 for this purpose because emit_load_toc_table
13385 can use register 0. This allows us to use a plain 'blr' to return
13386 from the procedure more often. */
13387 int save_LR_around_toc_setup = (TARGET_ELF
13388 && DEFAULT_ABI != ABI_AIX
13390 && ! info->lr_save_p
13391 && EXIT_BLOCK_PTR->pred != NULL);
13392 if (save_LR_around_toc_setup)
13394 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13396 insn = emit_move_insn (frame_ptr_rtx, lr);
13397 rs6000_maybe_dead (insn);
13398 RTX_FRAME_RELATED_P (insn) = 1;
13400 rs6000_emit_load_toc_table (TRUE);
13402 insn = emit_move_insn (lr, frame_ptr_rtx);
13403 rs6000_maybe_dead (insn);
13404 RTX_FRAME_RELATED_P (insn) = 1;
13407 rs6000_emit_load_toc_table (TRUE);
13411 if (DEFAULT_ABI == ABI_DARWIN
13412 && flag_pic && current_function_uses_pic_offset_table)
13414 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13415 rtx src = machopic_function_base_sym ();
13417 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
13419 insn = emit_move_insn (gen_rtx_REG (Pmode,
13420 RS6000_PIC_OFFSET_TABLE_REGNUM),
13422 rs6000_maybe_dead (insn);
13427 /* Write function prologue. */
13430 rs6000_output_function_prologue (FILE *file,
13431 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13433 rs6000_stack_t *info = rs6000_stack_info ();
13435 if (TARGET_DEBUG_STACK)
13436 debug_stack_info (info);
13438 /* Write .extern for any function we will call to save and restore
13440 if (info->first_fp_reg_save < 64
13441 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13442 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13443 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13444 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13445 RESTORE_FP_SUFFIX);
13447 /* Write .extern for AIX common mode routines, if needed. */
13448 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13450 fputs ("\t.extern __mulh\n", file);
13451 fputs ("\t.extern __mull\n", file);
13452 fputs ("\t.extern __divss\n", file);
13453 fputs ("\t.extern __divus\n", file);
13454 fputs ("\t.extern __quoss\n", file);
13455 fputs ("\t.extern __quous\n", file);
13456 common_mode_defined = 1;
13459 if (! HAVE_prologue)
13463 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13464 the "toplevel" insn chain. */
13465 emit_note (NOTE_INSN_DELETED);
13466 rs6000_emit_prologue ();
13467 emit_note (NOTE_INSN_DELETED);
13469 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13473 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13475 INSN_ADDRESSES_NEW (insn, addr);
13480 if (TARGET_DEBUG_STACK)
13481 debug_rtx_list (get_insns (), 100);
13482 final (get_insns (), file, FALSE, FALSE);
13486 rs6000_pic_labelno++;
13489 /* Emit function epilogue as insns.
13491 At present, dwarf2out_frame_debug_expr doesn't understand
13492 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13493 anywhere in the epilogue. Most of the insns below would in any case
13494 need special notes to explain where r11 is in relation to the stack. */
13497 rs6000_emit_epilogue (int sibcall)
13499 rs6000_stack_t *info;
13500 int restoring_FPRs_inline;
13501 int using_load_multiple;
13502 int using_mfcr_multiple;
13503 int use_backchain_to_restore_sp;
13505 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13506 rtx frame_reg_rtx = sp_reg_rtx;
13507 enum machine_mode reg_mode = Pmode;
13508 int reg_size = TARGET_32BIT ? 4 : 8;
13511 info = rs6000_stack_info ();
13513 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13515 reg_mode = V2SImode;
13519 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13520 && (!TARGET_SPE_ABI
13521 || info->spe_64bit_regs_used == 0)
13522 && info->first_gp_reg_save < 31);
13523 restoring_FPRs_inline = (sibcall
13524 || current_function_calls_eh_return
13525 || info->first_fp_reg_save == 64
13526 || FP_SAVE_INLINE (info->first_fp_reg_save));
13527 use_backchain_to_restore_sp = (frame_pointer_needed
13528 || current_function_calls_alloca
13529 || info->total_size > 32767);
13530 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13531 || rs6000_cpu == PROCESSOR_PPC603
13532 || rs6000_cpu == PROCESSOR_PPC750
13535 if (info->world_save_p)
13539 const char *alloc_rname;
13542 /* eh_rest_world_r10 will return to the location saved in the LR
13543 stack slot (which is not likely to be our caller.)
13544 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
13545 rest_world is similar, except any R10 parameter is ignored.
13546 The exception-handling stuff that was here in 2.95 is no
13547 longer necessary. */
13551 + 32 - info->first_gp_reg_save
13552 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
13553 + 63 + 1 - info->first_fp_reg_save);
13555 strcpy (rname, (current_function_calls_eh_return) ?
13556 "*eh_rest_world_r10" : "*rest_world");
13557 alloc_rname = ggc_strdup (rname);
13560 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
13561 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
13562 gen_rtx_REG (Pmode,
13563 LINK_REGISTER_REGNUM));
13565 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
13566 /* The instruction pattern requires a clobber here;
13567 it is shared with the restVEC helper. */
13569 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
13572 /* CR register traditionally saved as CR2. */
13573 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
13574 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13575 GEN_INT (info->cr_save_offset));
13576 rtx mem = gen_rtx_MEM (reg_mode, addr);
13577 set_mem_alias_set (mem, rs6000_sr_alias_set);
13579 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13582 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13584 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
13585 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13586 GEN_INT (info->gp_save_offset
13588 rtx mem = gen_rtx_MEM (reg_mode, addr);
13589 set_mem_alias_set (mem, rs6000_sr_alias_set);
13591 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13593 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
13595 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
13596 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13597 GEN_INT (info->altivec_save_offset
13599 rtx mem = gen_rtx_MEM (V4SImode, addr);
13600 set_mem_alias_set (mem, rs6000_sr_alias_set);
13602 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13604 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
13606 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13607 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13608 GEN_INT (info->fp_save_offset
13610 rtx mem = gen_rtx_MEM (DFmode, addr);
13611 set_mem_alias_set (mem, rs6000_sr_alias_set);
13613 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
13616 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
13618 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
13620 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
13622 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
13624 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
13625 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13630 /* If we have a frame pointer, a call to alloca, or a large stack
13631 frame, restore the old stack pointer using the backchain. Otherwise,
13632 we know what size to update it with. */
13633 if (use_backchain_to_restore_sp)
13635 /* Under V.4, don't reset the stack pointer until after we're done
13636 loading the saved registers. */
13637 if (DEFAULT_ABI == ABI_V4)
13638 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
13640 emit_move_insn (frame_reg_rtx,
13641 gen_rtx_MEM (Pmode, sp_reg_rtx));
13644 else if (info->push_p)
13646 if (DEFAULT_ABI == ABI_V4
13647 || current_function_calls_eh_return)
13648 sp_offset = info->total_size;
13651 emit_insn (TARGET_32BIT
13652 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13653 GEN_INT (info->total_size))
13654 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13655 GEN_INT (info->total_size)));
13659 /* Restore AltiVec registers if needed. */
13660 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13664 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13665 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13667 rtx addr, areg, mem;
13669 areg = gen_rtx_REG (Pmode, 0);
13671 (areg, GEN_INT (info->altivec_save_offset
13673 + 16 * (i - info->first_altivec_reg_save)));
13675 /* AltiVec addressing mode is [reg+reg]. */
13676 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
13677 mem = gen_rtx_MEM (V4SImode, addr);
13678 set_mem_alias_set (mem, rs6000_sr_alias_set);
13680 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
13684 /* Restore VRSAVE if needed. */
13685 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13686 && info->vrsave_mask != 0)
13688 rtx addr, mem, reg;
13690 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13691 GEN_INT (info->vrsave_save_offset + sp_offset));
13692 mem = gen_rtx_MEM (SImode, addr);
13693 set_mem_alias_set (mem, rs6000_sr_alias_set);
13694 reg = gen_rtx_REG (SImode, 12);
13695 emit_move_insn (reg, mem);
13697 emit_insn (generate_set_vrsave (reg, info, 1));
13700 /* Get the old lr if we saved it. */
13701 if (info->lr_save_p)
13703 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
13704 info->lr_save_offset + sp_offset);
13706 set_mem_alias_set (mem, rs6000_sr_alias_set);
13708 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
13711 /* Get the old cr if we saved it. */
13712 if (info->cr_save_p)
13714 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13715 GEN_INT (info->cr_save_offset + sp_offset));
13716 rtx mem = gen_rtx_MEM (SImode, addr);
13718 set_mem_alias_set (mem, rs6000_sr_alias_set);
13720 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
13723 /* Set LR here to try to overlap restores below. */
13724 if (info->lr_save_p)
13725 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
13726 gen_rtx_REG (Pmode, 0));
13728 /* Load exception handler data registers, if needed. */
13729 if (current_function_calls_eh_return)
13731 unsigned int i, regno;
13735 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13736 GEN_INT (sp_offset + 5 * reg_size));
13737 rtx mem = gen_rtx_MEM (reg_mode, addr);
13739 set_mem_alias_set (mem, rs6000_sr_alias_set);
13741 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
13748 regno = EH_RETURN_DATA_REGNO (i);
13749 if (regno == INVALID_REGNUM)
13752 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
13753 info->ehrd_offset + sp_offset
13754 + reg_size * (int) i);
13755 set_mem_alias_set (mem, rs6000_sr_alias_set);
13757 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
13761 /* Restore GPRs. This is done as a PARALLEL if we are using
13762 the load-multiple instructions. */
13763 if (using_load_multiple)
13766 p = rtvec_alloc (32 - info->first_gp_reg_save);
13767 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13769 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13770 GEN_INT (info->gp_save_offset
13773 rtx mem = gen_rtx_MEM (reg_mode, addr);
13775 set_mem_alias_set (mem, rs6000_sr_alias_set);
13778 gen_rtx_SET (VOIDmode,
13779 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
13782 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13785 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13786 if ((regs_ever_live[info->first_gp_reg_save+i]
13787 && (! call_used_regs[info->first_gp_reg_save+i]
13788 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13789 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13790 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13791 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13792 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13794 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13795 GEN_INT (info->gp_save_offset
13798 rtx mem = gen_rtx_MEM (reg_mode, addr);
13800 /* Restore 64-bit quantities for SPE. */
13801 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13803 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13806 if (!SPE_CONST_OFFSET_OK (offset))
13808 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13809 emit_move_insn (b, GEN_INT (offset));
13812 b = GEN_INT (offset);
13814 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13815 mem = gen_rtx_MEM (V2SImode, addr);
13818 set_mem_alias_set (mem, rs6000_sr_alias_set);
13820 emit_move_insn (gen_rtx_REG (reg_mode,
13821 info->first_gp_reg_save + i), mem);
13824 /* Restore fpr's if we need to do it without calling a function. */
13825 if (restoring_FPRs_inline)
13826 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13827 if ((regs_ever_live[info->first_fp_reg_save+i]
13828 && ! call_used_regs[info->first_fp_reg_save+i]))
13831 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13832 GEN_INT (info->fp_save_offset
13835 mem = gen_rtx_MEM (DFmode, addr);
13836 set_mem_alias_set (mem, rs6000_sr_alias_set);
13838 emit_move_insn (gen_rtx_REG (DFmode,
13839 info->first_fp_reg_save + i),
13843 /* If we saved cr, restore it here. Just those that were used. */
13844 if (info->cr_save_p)
13846 rtx r12_rtx = gen_rtx_REG (SImode, 12);
13849 if (using_mfcr_multiple)
13851 for (i = 0; i < 8; i++)
13852 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13858 if (using_mfcr_multiple && count > 1)
13863 p = rtvec_alloc (count);
13866 for (i = 0; i < 8; i++)
13867 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13869 rtvec r = rtvec_alloc (2);
13870 RTVEC_ELT (r, 0) = r12_rtx;
13871 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
13872 RTVEC_ELT (p, ndx) =
13873 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
13874 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
13877 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13882 for (i = 0; i < 8; i++)
13883 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13885 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
13891 /* If this is V.4, unwind the stack pointer after all of the loads
13892 have been done. We need to emit a block here so that sched
13893 doesn't decide to move the sp change before the register restores
13894 (which may not have any obvious dependency on the stack). This
13895 doesn't hurt performance, because there is no scheduling that can
13896 be done after this point. */
13897 if (DEFAULT_ABI == ABI_V4
13898 || current_function_calls_eh_return)
13900 if (frame_reg_rtx != sp_reg_rtx)
13901 rs6000_emit_stack_tie ();
13903 if (use_backchain_to_restore_sp)
13905 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
13907 else if (sp_offset != 0)
13909 emit_insn (TARGET_32BIT
13910 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13911 GEN_INT (sp_offset))
13912 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13913 GEN_INT (sp_offset)));
13917 if (current_function_calls_eh_return)
13919 rtx sa = EH_RETURN_STACKADJ_RTX;
13920 emit_insn (TARGET_32BIT
13921 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
13922 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
13928 if (! restoring_FPRs_inline)
13929 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
13931 p = rtvec_alloc (2);
13933 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
13934 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13935 gen_rtx_REG (Pmode,
13936 LINK_REGISTER_REGNUM));
13938 /* If we have to restore more than two FP registers, branch to the
13939 restore function. It will return to our caller. */
13940 if (! restoring_FPRs_inline)
13944 const char *alloc_rname;
13946 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
13947 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
13948 alloc_rname = ggc_strdup (rname);
13949 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
13950 gen_rtx_SYMBOL_REF (Pmode,
13953 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13956 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
13957 GEN_INT (info->fp_save_offset + 8*i));
13958 mem = gen_rtx_MEM (DFmode, addr);
13959 set_mem_alias_set (mem, rs6000_sr_alias_set);
13961 RTVEC_ELT (p, i+3) =
13962 gen_rtx_SET (VOIDmode,
13963 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
13968 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13972 /* Write function epilogue. */
13975 rs6000_output_function_epilogue (FILE *file,
13976 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13978 rs6000_stack_t *info = rs6000_stack_info ();
13980 if (! HAVE_epilogue)
13982 rtx insn = get_last_insn ();
13983 /* If the last insn was a BARRIER, we don't have to write anything except
13984 the trace table. */
13985 if (GET_CODE (insn) == NOTE)
13986 insn = prev_nonnote_insn (insn);
13987 if (insn == 0 || GET_CODE (insn) != BARRIER)
13989 /* This is slightly ugly, but at least we don't have two
13990 copies of the epilogue-emitting code. */
13993 /* A NOTE_INSN_DELETED is supposed to be at the start
13994 and end of the "toplevel" insn chain. */
13995 emit_note (NOTE_INSN_DELETED);
13996 rs6000_emit_epilogue (FALSE);
13997 emit_note (NOTE_INSN_DELETED);
13999 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14003 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14005 INSN_ADDRESSES_NEW (insn, addr);
14010 if (TARGET_DEBUG_STACK)
14011 debug_rtx_list (get_insns (), 100);
14012 final (get_insns (), file, FALSE, FALSE);
14018 macho_branch_islands ();
14019 /* Mach-O doesn't support labels at the end of objects, so if
14020 it looks like we might want one, insert a NOP. */
14022 rtx insn = get_last_insn ();
14025 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
14026 insn = PREV_INSN (insn);
14030 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
14031 fputs ("\tnop\n", file);
14035 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14038 We don't output a traceback table if -finhibit-size-directive was
14039 used. The documentation for -finhibit-size-directive reads
14040 ``don't output a @code{.size} assembler directive, or anything
14041 else that would cause trouble if the function is split in the
14042 middle, and the two halves are placed at locations far apart in
14043 memory.'' The traceback table has this property, since it
14044 includes the offset from the start of the function to the
14045 traceback table itself.
14047 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14048 different traceback table. */
14049 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
14050 && rs6000_traceback != traceback_none)
14052 const char *fname = NULL;
14053 const char *language_string = lang_hooks.name;
14054 int fixed_parms = 0, float_parms = 0, parm_info = 0;
14056 int optional_tbtab;
14058 if (rs6000_traceback == traceback_full)
14059 optional_tbtab = 1;
14060 else if (rs6000_traceback == traceback_part)
14061 optional_tbtab = 0;
14063 optional_tbtab = !optimize_size && !TARGET_ELF;
14065 if (optional_tbtab)
14067 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
14068 while (*fname == '.') /* V.4 encodes . in the name */
14071 /* Need label immediately before tbtab, so we can compute
14072 its offset from the function start. */
14073 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14074 ASM_OUTPUT_LABEL (file, fname);
14077 /* The .tbtab pseudo-op can only be used for the first eight
14078 expressions, since it can't handle the possibly variable
14079 length fields that follow. However, if you omit the optional
14080 fields, the assembler outputs zeros for all optional fields
14081 anyways, giving each variable length field is minimum length
14082 (as defined in sys/debug.h). Thus we can not use the .tbtab
14083 pseudo-op at all. */
14085 /* An all-zero word flags the start of the tbtab, for debuggers
14086 that have to find it by searching forward from the entry
14087 point or from the current pc. */
14088 fputs ("\t.long 0\n", file);
14090 /* Tbtab format type. Use format type 0. */
14091 fputs ("\t.byte 0,", file);
14093 /* Language type. Unfortunately, there does not seem to be any
14094 official way to discover the language being compiled, so we
14095 use language_string.
14096 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14097 Java is 13. Objective-C is 14. */
14098 if (! strcmp (language_string, "GNU C"))
14100 else if (! strcmp (language_string, "GNU F77")
14101 || ! strcmp (language_string, "GNU F95"))
14103 else if (! strcmp (language_string, "GNU Pascal"))
14105 else if (! strcmp (language_string, "GNU Ada"))
14107 else if (! strcmp (language_string, "GNU C++"))
14109 else if (! strcmp (language_string, "GNU Java"))
14111 else if (! strcmp (language_string, "GNU Objective-C"))
14115 fprintf (file, "%d,", i);
14117 /* 8 single bit fields: global linkage (not set for C extern linkage,
14118 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14119 from start of procedure stored in tbtab, internal function, function
14120 has controlled storage, function has no toc, function uses fp,
14121 function logs/aborts fp operations. */
14122 /* Assume that fp operations are used if any fp reg must be saved. */
14123 fprintf (file, "%d,",
14124 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
14126 /* 6 bitfields: function is interrupt handler, name present in
14127 proc table, function calls alloca, on condition directives
14128 (controls stack walks, 3 bits), saves condition reg, saves
14130 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14131 set up as a frame pointer, even when there is no alloca call. */
14132 fprintf (file, "%d,",
14133 ((optional_tbtab << 6)
14134 | ((optional_tbtab & frame_pointer_needed) << 5)
14135 | (info->cr_save_p << 1)
14136 | (info->lr_save_p)));
14138 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14140 fprintf (file, "%d,",
14141 (info->push_p << 7) | (64 - info->first_fp_reg_save));
14143 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14144 fprintf (file, "%d,", (32 - first_reg_to_save ()));
14146 if (optional_tbtab)
14148 /* Compute the parameter info from the function decl argument
14151 int next_parm_info_bit = 31;
14153 for (decl = DECL_ARGUMENTS (current_function_decl);
14154 decl; decl = TREE_CHAIN (decl))
14156 rtx parameter = DECL_INCOMING_RTL (decl);
14157 enum machine_mode mode = GET_MODE (parameter);
14159 if (GET_CODE (parameter) == REG)
14161 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
14167 if (mode == SFmode)
14169 else if (mode == DFmode || mode == TFmode)
14174 /* If only one bit will fit, don't or in this entry. */
14175 if (next_parm_info_bit > 0)
14176 parm_info |= (bits << (next_parm_info_bit - 1));
14177 next_parm_info_bit -= 2;
14181 fixed_parms += ((GET_MODE_SIZE (mode)
14182 + (UNITS_PER_WORD - 1))
14184 next_parm_info_bit -= 1;
14190 /* Number of fixed point parameters. */
14191 /* This is actually the number of words of fixed point parameters; thus
14192 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14193 fprintf (file, "%d,", fixed_parms);
14195 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14197 /* This is actually the number of fp registers that hold parameters;
14198 and thus the maximum value is 13. */
14199 /* Set parameters on stack bit if parameters are not in their original
14200 registers, regardless of whether they are on the stack? Xlc
14201 seems to set the bit when not optimizing. */
14202 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
14204 if (! optional_tbtab)
14207 /* Optional fields follow. Some are variable length. */
14209 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14210 11 double float. */
14211 /* There is an entry for each parameter in a register, in the order that
14212 they occur in the parameter list. Any intervening arguments on the
14213 stack are ignored. If the list overflows a long (max possible length
14214 34 bits) then completely leave off all elements that don't fit. */
14215 /* Only emit this long if there was at least one parameter. */
14216 if (fixed_parms || float_parms)
14217 fprintf (file, "\t.long %d\n", parm_info);
14219 /* Offset from start of code to tb table. */
14220 fputs ("\t.long ", file);
14221 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
14223 RS6000_OUTPUT_BASENAME (file, fname);
14225 assemble_name (file, fname);
14227 fputs ("-.", file);
14229 RS6000_OUTPUT_BASENAME (file, fname);
14231 assemble_name (file, fname);
14235 /* Interrupt handler mask. */
14236 /* Omit this long, since we never set the interrupt handler bit
14239 /* Number of CTL (controlled storage) anchors. */
14240 /* Omit this long, since the has_ctl bit is never set above. */
14242 /* Displacement into stack of each CTL anchor. */
14243 /* Omit this list of longs, because there are no CTL anchors. */
14245 /* Length of function name. */
14248 fprintf (file, "\t.short %d\n", (int) strlen (fname));
14250 /* Function name. */
14251 assemble_string (fname, strlen (fname));
14253 /* Register for alloca automatic storage; this is always reg 31.
14254 Only emit this if the alloca bit was set above. */
14255 if (frame_pointer_needed)
14256 fputs ("\t.byte 31\n", file);
14258 fputs ("\t.align 2\n", file);
14262 /* A C compound statement that outputs the assembler code for a thunk
14263 function, used to implement C++ virtual function calls with
14264 multiple inheritance. The thunk acts as a wrapper around a virtual
14265 function, adjusting the implicit object parameter before handing
14266 control off to the real function.
14268 First, emit code to add the integer DELTA to the location that
14269 contains the incoming first argument. Assume that this argument
14270 contains a pointer, and is the one used to pass the `this' pointer
14271 in C++. This is the incoming argument *before* the function
14272 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14273 values of all other incoming arguments.
14275 After the addition, emit code to jump to FUNCTION, which is a
14276 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14277 not touch the return address. Hence returning from FUNCTION will
14278 return to whoever called the current `thunk'.
14280 The effect must be as if FUNCTION had been called directly with the
14281 adjusted first argument. This macro is responsible for emitting
14282 all of the code for a thunk function; output_function_prologue()
14283 and output_function_epilogue() are not invoked.
14285 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14286 been extracted from it.) It might possibly be useful on some
14287 targets, but probably not.
14289 If you do not define this macro, the target-independent code in the
14290 C++ frontend will generate a less efficient heavyweight thunk that
14291 calls FUNCTION instead of jumping to it. The generic approach does
14292 not support varargs. */
14295 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
14296 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
14299 rtx this, insn, funexp;
14301 reload_completed = 1;
14302 epilogue_completed = 1;
14303 no_new_pseudos = 1;
14304 reset_block_changes ();
14306 /* Mark the end of the (empty) prologue. */
14307 emit_note (NOTE_INSN_PROLOGUE_END);
14309 /* Find the "this" pointer. If the function returns a structure,
14310 the structure return pointer is in r3. */
14311 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
14312 this = gen_rtx_REG (Pmode, 4);
14314 this = gen_rtx_REG (Pmode, 3);
14316 /* Apply the constant offset, if required. */
14319 rtx delta_rtx = GEN_INT (delta);
14320 emit_insn (TARGET_32BIT
14321 ? gen_addsi3 (this, this, delta_rtx)
14322 : gen_adddi3 (this, this, delta_rtx));
14325 /* Apply the offset from the vtable, if required. */
14328 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
14329 rtx tmp = gen_rtx_REG (Pmode, 12);
14331 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
14332 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
14334 emit_insn (TARGET_32BIT
14335 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
14336 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
14337 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
14341 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
14343 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
14345 emit_insn (TARGET_32BIT
14346 ? gen_addsi3 (this, this, tmp)
14347 : gen_adddi3 (this, this, tmp));
14350 /* Generate a tail call to the target function. */
14351 if (!TREE_USED (function))
14353 assemble_external (function);
14354 TREE_USED (function) = 1;
14356 funexp = XEXP (DECL_RTL (function), 0);
14357 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
14360 if (MACHOPIC_INDIRECT)
14361 funexp = machopic_indirect_call_target (funexp);
14364 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14365 generate sibcall RTL explicitly to avoid constraint abort. */
14366 insn = emit_call_insn (
14367 gen_rtx_PARALLEL (VOIDmode,
14369 gen_rtx_CALL (VOIDmode,
14370 funexp, const0_rtx),
14371 gen_rtx_USE (VOIDmode, const0_rtx),
14372 gen_rtx_USE (VOIDmode,
14373 gen_rtx_REG (SImode,
14374 LINK_REGISTER_REGNUM)),
14375 gen_rtx_RETURN (VOIDmode))));
14376 SIBLING_CALL_P (insn) = 1;
14379 /* Run just enough of rest_of_compilation to get the insns emitted.
14380 There's not really enough bulk here to make other passes such as
14381 instruction scheduling worth while. Note that use_thunk calls
14382 assemble_start_function and assemble_end_function. */
14383 insn = get_insns ();
14384 insn_locators_initialize ();
14385 shorten_branches (insn);
14386 final_start_function (insn, file, 1);
14387 final (insn, file, 1, 0);
14388 final_end_function ();
14390 reload_completed = 0;
14391 epilogue_completed = 0;
14392 no_new_pseudos = 0;
14395 /* A quick summary of the various types of 'constant-pool tables'
14398 Target Flags Name One table per
14399 AIX (none) AIX TOC object file
14400 AIX -mfull-toc AIX TOC object file
14401 AIX -mminimal-toc AIX minimal TOC translation unit
14402 SVR4/EABI (none) SVR4 SDATA object file
14403 SVR4/EABI -fpic SVR4 pic object file
14404 SVR4/EABI -fPIC SVR4 PIC translation unit
14405 SVR4/EABI -mrelocatable EABI TOC function
14406 SVR4/EABI -maix AIX TOC object file
14407 SVR4/EABI -maix -mminimal-toc
14408 AIX minimal TOC translation unit
14410 Name Reg. Set by entries contains:
14411 made by addrs? fp? sum?
14413 AIX TOC 2 crt0 as Y option option
14414 AIX minimal TOC 30 prolog gcc Y Y option
14415 SVR4 SDATA 13 crt0 gcc N Y N
14416 SVR4 pic 30 prolog ld Y not yet N
14417 SVR4 PIC 30 prolog gcc Y option option
14418 EABI TOC 30 prolog gcc Y option option
14422 /* Hash functions for the hash table. */
14425 rs6000_hash_constant (rtx k)
14427 enum rtx_code code = GET_CODE (k);
14428 enum machine_mode mode = GET_MODE (k);
14429 unsigned result = (code << 3) ^ mode;
14430 const char *format;
14433 format = GET_RTX_FORMAT (code);
14434 flen = strlen (format);
14440 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
14443 if (mode != VOIDmode)
14444 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
14456 for (; fidx < flen; fidx++)
14457 switch (format[fidx])
14462 const char *str = XSTR (k, fidx);
14463 len = strlen (str);
14464 result = result * 613 + len;
14465 for (i = 0; i < len; i++)
14466 result = result * 613 + (unsigned) str[i];
14471 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
14475 result = result * 613 + (unsigned) XINT (k, fidx);
14478 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
14479 result = result * 613 + (unsigned) XWINT (k, fidx);
14483 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
14484 result = result * 613 + (unsigned) (XWINT (k, fidx)
14498 toc_hash_function (const void *hash_entry)
14500 const struct toc_hash_struct *thc =
14501 (const struct toc_hash_struct *) hash_entry;
14502 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
14505 /* Compare H1 and H2 for equivalence. */
14508 toc_hash_eq (const void *h1, const void *h2)
14510 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
14511 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
14513 if (((const struct toc_hash_struct *) h1)->key_mode
14514 != ((const struct toc_hash_struct *) h2)->key_mode)
14517 return rtx_equal_p (r1, r2);
14520 /* These are the names given by the C++ front-end to vtables, and
14521 vtable-like objects. Ideally, this logic should not be here;
14522 instead, there should be some programmatic way of inquiring as
14523 to whether or not an object is a vtable. */
14525 #define VTABLE_NAME_P(NAME) \
14526 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
14527 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14528 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14529 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14530 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14533 rs6000_output_symbol_ref (FILE *file, rtx x)
14535 /* Currently C++ toc references to vtables can be emitted before it
14536 is decided whether the vtable is public or private. If this is
14537 the case, then the linker will eventually complain that there is
14538 a reference to an unknown section. Thus, for vtables only,
14539 we emit the TOC reference to reference the symbol and not the
14541 const char *name = XSTR (x, 0);
14543 if (VTABLE_NAME_P (name))
14545 RS6000_OUTPUT_BASENAME (file, name);
14548 assemble_name (file, name);
14551 /* Output a TOC entry. We derive the entry name from what is being
14555 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14558 const char *name = buf;
14559 const char *real_name;
14566 /* When the linker won't eliminate them, don't output duplicate
14567 TOC entries (this happens on AIX if there is any kind of TOC,
14568 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14570 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14572 struct toc_hash_struct *h;
14575 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14576 time because GGC is not initialized at that point. */
14577 if (toc_hash_table == NULL)
14578 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14579 toc_hash_eq, NULL);
14581 h = ggc_alloc (sizeof (*h));
14583 h->key_mode = mode;
14584 h->labelno = labelno;
14586 found = htab_find_slot (toc_hash_table, h, 1);
14587 if (*found == NULL)
14589 else /* This is indeed a duplicate.
14590 Set this label equal to that label. */
14592 fputs ("\t.set ", file);
14593 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14594 fprintf (file, "%d,", labelno);
14595 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14596 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
14602 /* If we're going to put a double constant in the TOC, make sure it's
14603 aligned properly when strict alignment is on. */
14604 if (GET_CODE (x) == CONST_DOUBLE
14605 && STRICT_ALIGNMENT
14606 && GET_MODE_BITSIZE (mode) >= 64
14607 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
14608 ASM_OUTPUT_ALIGN (file, 3);
14611 (*targetm.asm_out.internal_label) (file, "LC", labelno);
14613 /* Handle FP constants specially. Note that if we have a minimal
14614 TOC, things we put here aren't actually in the TOC, so we can allow
14616 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
14618 REAL_VALUE_TYPE rv;
14621 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14622 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
14626 if (TARGET_MINIMAL_TOC)
14627 fputs (DOUBLE_INT_ASM_OP, file);
14629 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14630 k[0] & 0xffffffff, k[1] & 0xffffffff,
14631 k[2] & 0xffffffff, k[3] & 0xffffffff);
14632 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
14633 k[0] & 0xffffffff, k[1] & 0xffffffff,
14634 k[2] & 0xffffffff, k[3] & 0xffffffff);
14639 if (TARGET_MINIMAL_TOC)
14640 fputs ("\t.long ", file);
14642 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14643 k[0] & 0xffffffff, k[1] & 0xffffffff,
14644 k[2] & 0xffffffff, k[3] & 0xffffffff);
14645 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14646 k[0] & 0xffffffff, k[1] & 0xffffffff,
14647 k[2] & 0xffffffff, k[3] & 0xffffffff);
14651 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
14653 REAL_VALUE_TYPE rv;
14656 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14657 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
14661 if (TARGET_MINIMAL_TOC)
14662 fputs (DOUBLE_INT_ASM_OP, file);
14664 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14665 k[0] & 0xffffffff, k[1] & 0xffffffff);
14666 fprintf (file, "0x%lx%08lx\n",
14667 k[0] & 0xffffffff, k[1] & 0xffffffff);
14672 if (TARGET_MINIMAL_TOC)
14673 fputs ("\t.long ", file);
14675 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14676 k[0] & 0xffffffff, k[1] & 0xffffffff);
14677 fprintf (file, "0x%lx,0x%lx\n",
14678 k[0] & 0xffffffff, k[1] & 0xffffffff);
14682 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
14684 REAL_VALUE_TYPE rv;
14687 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14688 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
14692 if (TARGET_MINIMAL_TOC)
14693 fputs (DOUBLE_INT_ASM_OP, file);
14695 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14696 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
14701 if (TARGET_MINIMAL_TOC)
14702 fputs ("\t.long ", file);
14704 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14705 fprintf (file, "0x%lx\n", l & 0xffffffff);
14709 else if (GET_MODE (x) == VOIDmode
14710 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
14712 unsigned HOST_WIDE_INT low;
14713 HOST_WIDE_INT high;
14715 if (GET_CODE (x) == CONST_DOUBLE)
14717 low = CONST_DOUBLE_LOW (x);
14718 high = CONST_DOUBLE_HIGH (x);
14721 #if HOST_BITS_PER_WIDE_INT == 32
14724 high = (low & 0x80000000) ? ~0 : 0;
14728 low = INTVAL (x) & 0xffffffff;
14729 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
14733 /* TOC entries are always Pmode-sized, but since this
14734 is a bigendian machine then if we're putting smaller
14735 integer constants in the TOC we have to pad them.
14736 (This is still a win over putting the constants in
14737 a separate constant pool, because then we'd have
14738 to have both a TOC entry _and_ the actual constant.)
14740 For a 32-bit target, CONST_INT values are loaded and shifted
14741 entirely within `low' and can be stored in one TOC entry. */
14743 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
14744 abort ();/* It would be easy to make this work, but it doesn't now. */
14746 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
14748 #if HOST_BITS_PER_WIDE_INT == 32
14749 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
14750 POINTER_SIZE, &low, &high, 0);
14753 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
14754 high = (HOST_WIDE_INT) low >> 32;
14761 if (TARGET_MINIMAL_TOC)
14762 fputs (DOUBLE_INT_ASM_OP, file);
14764 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14765 (long) high & 0xffffffff, (long) low & 0xffffffff);
14766 fprintf (file, "0x%lx%08lx\n",
14767 (long) high & 0xffffffff, (long) low & 0xffffffff);
14772 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
14774 if (TARGET_MINIMAL_TOC)
14775 fputs ("\t.long ", file);
14777 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14778 (long) high & 0xffffffff, (long) low & 0xffffffff);
14779 fprintf (file, "0x%lx,0x%lx\n",
14780 (long) high & 0xffffffff, (long) low & 0xffffffff);
14784 if (TARGET_MINIMAL_TOC)
14785 fputs ("\t.long ", file);
14787 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
14788 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
14794 if (GET_CODE (x) == CONST)
14796 if (GET_CODE (XEXP (x, 0)) != PLUS)
14799 base = XEXP (XEXP (x, 0), 0);
14800 offset = INTVAL (XEXP (XEXP (x, 0), 1));
14803 if (GET_CODE (base) == SYMBOL_REF)
14804 name = XSTR (base, 0);
14805 else if (GET_CODE (base) == LABEL_REF)
14806 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
14807 else if (GET_CODE (base) == CODE_LABEL)
14808 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
14812 real_name = (*targetm.strip_name_encoding) (name);
14813 if (TARGET_MINIMAL_TOC)
14814 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
14817 fprintf (file, "\t.tc %s", real_name);
14820 fprintf (file, ".N%d", - offset);
14822 fprintf (file, ".P%d", offset);
14824 fputs ("[TC],", file);
14827 /* Currently C++ toc references to vtables can be emitted before it
14828 is decided whether the vtable is public or private. If this is
14829 the case, then the linker will eventually complain that there is
14830 a TOC reference to an unknown section. Thus, for vtables only,
14831 we emit the TOC reference to reference the symbol and not the
14833 if (VTABLE_NAME_P (name))
14835 RS6000_OUTPUT_BASENAME (file, name);
14837 fprintf (file, "%d", offset);
14838 else if (offset > 0)
14839 fprintf (file, "+%d", offset);
14842 output_addr_const (file, x);
14846 /* Output an assembler pseudo-op to write an ASCII string of N characters
14847 starting at P to FILE.
14849 On the RS/6000, we have to do this using the .byte operation and
14850 write out special characters outside the quoted string.
14851 Also, the assembler is broken; very long strings are truncated,
14852 so we must artificially break them up early. */
14855 output_ascii (FILE *file, const char *p, int n)
14858 int i, count_string;
14859 const char *for_string = "\t.byte \"";
14860 const char *for_decimal = "\t.byte ";
14861 const char *to_close = NULL;
14864 for (i = 0; i < n; i++)
14867 if (c >= ' ' && c < 0177)
14870 fputs (for_string, file);
14873 /* Write two quotes to get one. */
14881 for_decimal = "\"\n\t.byte ";
14885 if (count_string >= 512)
14887 fputs (to_close, file);
14889 for_string = "\t.byte \"";
14890 for_decimal = "\t.byte ";
14898 fputs (for_decimal, file);
14899 fprintf (file, "%d", c);
14901 for_string = "\n\t.byte \"";
14902 for_decimal = ", ";
14908 /* Now close the string if we have written one. Then end the line. */
14910 fputs (to_close, file);
14913 /* Generate a unique section name for FILENAME for a section type
14914 represented by SECTION_DESC. Output goes into BUF.
14916 SECTION_DESC can be any string, as long as it is different for each
14917 possible section type.
14919 We name the section in the same manner as xlc. The name begins with an
14920 underscore followed by the filename (after stripping any leading directory
14921 names) with the last period replaced by the string SECTION_DESC. If
14922 FILENAME does not contain a period, SECTION_DESC is appended to the end of
14926 rs6000_gen_section_name (char **buf, const char *filename,
14927 const char *section_desc)
14929 const char *q, *after_last_slash, *last_period = 0;
14933 after_last_slash = filename;
14934 for (q = filename; *q; q++)
14937 after_last_slash = q + 1;
14938 else if (*q == '.')
14942 len = strlen (after_last_slash) + strlen (section_desc) + 2;
14943 *buf = (char *) xmalloc (len);
14948 for (q = after_last_slash; *q; q++)
14950 if (q == last_period)
14952 strcpy (p, section_desc);
14953 p += strlen (section_desc);
14957 else if (ISALNUM (*q))
14961 if (last_period == 0)
14962 strcpy (p, section_desc);
14967 /* Emit profile function. */
14970 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
14972 if (TARGET_PROFILE_KERNEL)
14975 if (DEFAULT_ABI == ABI_AIX)
14977 #ifndef NO_PROFILE_COUNTERS
14978 # define NO_PROFILE_COUNTERS 0
14980 if (NO_PROFILE_COUNTERS)
14981 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
14985 const char *label_name;
14988 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14989 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
14990 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
14992 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
14996 else if (DEFAULT_ABI == ABI_DARWIN)
14998 const char *mcount_name = RS6000_MCOUNT;
14999 int caller_addr_regno = LINK_REGISTER_REGNUM;
15001 /* Be conservative and always set this, at least for now. */
15002 current_function_uses_pic_offset_table = 1;
15005 /* For PIC code, set up a stub and collect the caller's address
15006 from r0, which is where the prologue puts it. */
15007 if (MACHOPIC_INDIRECT
15008 && current_function_uses_pic_offset_table)
15009 caller_addr_regno = 0;
15011 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
15013 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
15017 /* Write function profiler code. */
15020 output_function_profiler (FILE *file, int labelno)
15025 switch (DEFAULT_ABI)
15034 warning ("no profiling of 64-bit code for this ABI");
15037 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
15038 fprintf (file, "\tmflr %s\n", reg_names[0]);
15041 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
15042 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15043 reg_names[0], save_lr, reg_names[1]);
15044 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
15045 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
15046 assemble_name (file, buf);
15047 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
15049 else if (flag_pic > 1)
15051 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15052 reg_names[0], save_lr, reg_names[1]);
15053 /* Now, we need to get the address of the label. */
15054 fputs ("\tbl 1f\n\t.long ", file);
15055 assemble_name (file, buf);
15056 fputs ("-.\n1:", file);
15057 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
15058 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
15059 reg_names[0], reg_names[11]);
15060 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
15061 reg_names[0], reg_names[0], reg_names[11]);
15065 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
15066 assemble_name (file, buf);
15067 fputs ("@ha\n", file);
15068 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
15069 reg_names[0], save_lr, reg_names[1]);
15070 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
15071 assemble_name (file, buf);
15072 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
15075 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15076 fprintf (file, "\tbl %s%s\n",
15077 RS6000_MCOUNT, flag_pic ? "@plt" : "");
15082 if (!TARGET_PROFILE_KERNEL)
15084 /* Don't do anything, done in output_profile_hook (). */
15091 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
15092 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
15094 if (cfun->static_chain_decl != NULL)
15096 asm_fprintf (file, "\tstd %s,24(%s)\n",
15097 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15098 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15099 asm_fprintf (file, "\tld %s,24(%s)\n",
15100 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
15103 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
15110 /* Power4 load update and store update instructions are cracked into a
15111 load or store and an integer insn which are executed in the same cycle.
15112 Branches have their own dispatch slot which does not count against the
15113 GCC issue rate, but it changes the program flow so there are no other
15114 instructions to issue in this cycle. */
15117 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
15118 int verbose ATTRIBUTE_UNUSED,
15119 rtx insn, int more)
15121 if (GET_CODE (PATTERN (insn)) == USE
15122 || GET_CODE (PATTERN (insn)) == CLOBBER)
15125 if (rs6000_sched_groups)
15127 if (is_microcoded_insn (insn))
15129 else if (is_cracked_insn (insn))
15130 return more > 2 ? more - 2 : 0;
15136 /* Adjust the cost of a scheduling dependency. Return the new cost of
15137 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15140 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
15143 if (! recog_memoized (insn))
15146 if (REG_NOTE_KIND (link) != 0)
15149 if (REG_NOTE_KIND (link) == 0)
15151 /* Data dependency; DEP_INSN writes a register that INSN reads
15152 some cycles later. */
15153 switch (get_attr_type (insn))
15156 /* Tell the first scheduling pass about the latency between
15157 a mtctr and bctr (and mtlr and br/blr). The first
15158 scheduling pass will not know about this latency since
15159 the mtctr instruction, which has the latency associated
15160 to it, will be generated by reload. */
15161 return TARGET_POWER ? 5 : 4;
15163 /* Leave some extra cycles between a compare and its
15164 dependent branch, to inhibit expensive mispredicts. */
15165 if ((rs6000_cpu_attr == CPU_PPC603
15166 || rs6000_cpu_attr == CPU_PPC604
15167 || rs6000_cpu_attr == CPU_PPC604E
15168 || rs6000_cpu_attr == CPU_PPC620
15169 || rs6000_cpu_attr == CPU_PPC630
15170 || rs6000_cpu_attr == CPU_PPC750
15171 || rs6000_cpu_attr == CPU_PPC7400
15172 || rs6000_cpu_attr == CPU_PPC7450
15173 || rs6000_cpu_attr == CPU_POWER4
15174 || rs6000_cpu_attr == CPU_POWER5)
15175 && recog_memoized (dep_insn)
15176 && (INSN_CODE (dep_insn) >= 0)
15177 && (get_attr_type (dep_insn) == TYPE_CMP
15178 || get_attr_type (dep_insn) == TYPE_COMPARE
15179 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
15180 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
15181 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
15182 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
15183 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
15184 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
15189 /* Fall out to return default cost. */
15195 /* The function returns a true if INSN is microcoded.
15196 Return false otherwise. */
15199 is_microcoded_insn (rtx insn)
15201 if (!insn || !INSN_P (insn)
15202 || GET_CODE (PATTERN (insn)) == USE
15203 || GET_CODE (PATTERN (insn)) == CLOBBER)
15206 if (rs6000_sched_groups)
15208 enum attr_type type = get_attr_type (insn);
15209 if (type == TYPE_LOAD_EXT_U
15210 || type == TYPE_LOAD_EXT_UX
15211 || type == TYPE_LOAD_UX
15212 || type == TYPE_STORE_UX
15213 || type == TYPE_MFCR)
15220 /* The function returns a nonzero value if INSN can be scheduled only
15221 as the first insn in a dispatch group ("dispatch-slot restricted").
15222 In this case, the returned value indicates how many dispatch slots
15223 the insn occupies (at the beginning of the group).
15224 Return 0 otherwise. */
15227 is_dispatch_slot_restricted (rtx insn)
15229 enum attr_type type;
15231 if (!rs6000_sched_groups)
15235 || insn == NULL_RTX
15236 || GET_CODE (insn) == NOTE
15237 || GET_CODE (PATTERN (insn)) == USE
15238 || GET_CODE (PATTERN (insn)) == CLOBBER)
15241 type = get_attr_type (insn);
15248 case TYPE_DELAYED_CR:
15249 case TYPE_CR_LOGICAL:
15257 if (rs6000_cpu == PROCESSOR_POWER5
15258 && is_cracked_insn (insn))
15264 /* The function returns true if INSN is cracked into 2 instructions
15265 by the processor (and therefore occupies 2 issue slots). */
15268 is_cracked_insn (rtx insn)
15270 if (!insn || !INSN_P (insn)
15271 || GET_CODE (PATTERN (insn)) == USE
15272 || GET_CODE (PATTERN (insn)) == CLOBBER)
15275 if (rs6000_sched_groups)
15277 enum attr_type type = get_attr_type (insn);
15278 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
15279 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
15280 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
15281 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
15282 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
15283 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
15284 || type == TYPE_IDIV || type == TYPE_LDIV
15285 || type == TYPE_INSERT_WORD)
15292 /* The function returns true if INSN can be issued only from
15293 the branch slot. */
15296 is_branch_slot_insn (rtx insn)
15298 if (!insn || !INSN_P (insn)
15299 || GET_CODE (PATTERN (insn)) == USE
15300 || GET_CODE (PATTERN (insn)) == CLOBBER)
15303 if (rs6000_sched_groups)
15305 enum attr_type type = get_attr_type (insn);
15306 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
15314 /* A C statement (sans semicolon) to update the integer scheduling
15315 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15316 INSN earlier, reduce the priority to execute INSN later. Do not
15317 define this macro if you do not need to adjust the scheduling
15318 priorities of insns. */
15321 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
15323 /* On machines (like the 750) which have asymmetric integer units,
15324 where one integer unit can do multiply and divides and the other
15325 can't, reduce the priority of multiply/divide so it is scheduled
15326 before other integer operations. */
15329 if (! INSN_P (insn))
15332 if (GET_CODE (PATTERN (insn)) == USE)
15335 switch (rs6000_cpu_attr) {
15337 switch (get_attr_type (insn))
15344 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
15345 priority, priority);
15346 if (priority >= 0 && priority < 0x01000000)
15353 if (is_dispatch_slot_restricted (insn)
15354 && reload_completed
15355 && current_sched_info->sched_max_insns_priority
15356 && rs6000_sched_restricted_insns_priority)
15359 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
15360 if (rs6000_sched_restricted_insns_priority == 1)
15361 /* Attach highest priority to insn. This means that in
15362 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15363 precede 'priority' (critical path) considerations. */
15364 return current_sched_info->sched_max_insns_priority;
15365 else if (rs6000_sched_restricted_insns_priority == 2)
15366 /* Increase priority of insn by a minimal amount. This means that in
15367 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
15368 precede dispatch-slot restriction considerations. */
15369 return (priority + 1);
15375 /* Return how many instructions the machine can issue per cycle. */
15378 rs6000_issue_rate (void)
15380 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15381 if (!reload_completed)
15384 switch (rs6000_cpu_attr) {
15385 case CPU_RIOS1: /* ? */
15387 case CPU_PPC601: /* ? */
15410 /* Return how many instructions to look ahead for better insn
15414 rs6000_use_sched_lookahead (void)
15416 if (rs6000_cpu_attr == CPU_PPC8540)
15421 /* Determine is PAT refers to memory. */
15424 is_mem_ref (rtx pat)
15430 if (GET_CODE (pat) == MEM)
15433 /* Recursively process the pattern. */
15434 fmt = GET_RTX_FORMAT (GET_CODE (pat));
15436 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
15439 ret |= is_mem_ref (XEXP (pat, i));
15440 else if (fmt[i] == 'E')
15441 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
15442 ret |= is_mem_ref (XVECEXP (pat, i, j));
15448 /* Determine if PAT is a PATTERN of a load insn. */
15451 is_load_insn1 (rtx pat)
15453 if (!pat || pat == NULL_RTX)
15456 if (GET_CODE (pat) == SET)
15457 return is_mem_ref (SET_SRC (pat));
15459 if (GET_CODE (pat) == PARALLEL)
15463 for (i = 0; i < XVECLEN (pat, 0); i++)
15464 if (is_load_insn1 (XVECEXP (pat, 0, i)))
15471 /* Determine if INSN loads from memory. */
15474 is_load_insn (rtx insn)
15476 if (!insn || !INSN_P (insn))
15479 if (GET_CODE (insn) == CALL_INSN)
15482 return is_load_insn1 (PATTERN (insn));
15485 /* Determine if PAT is a PATTERN of a store insn. */
15488 is_store_insn1 (rtx pat)
15490 if (!pat || pat == NULL_RTX)
15493 if (GET_CODE (pat) == SET)
15494 return is_mem_ref (SET_DEST (pat));
15496 if (GET_CODE (pat) == PARALLEL)
15500 for (i = 0; i < XVECLEN (pat, 0); i++)
15501 if (is_store_insn1 (XVECEXP (pat, 0, i)))
15508 /* Determine if INSN stores to memory. */
15511 is_store_insn (rtx insn)
15513 if (!insn || !INSN_P (insn))
15516 return is_store_insn1 (PATTERN (insn));
15519 /* Returns whether the dependence between INSN and NEXT is considered
15520 costly by the given target. */
15523 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
15525 /* If the flag is not enbled - no dependence is considered costly;
15526 allow all dependent insns in the same group.
15527 This is the most aggressive option. */
15528 if (rs6000_sched_costly_dep == no_dep_costly)
15531 /* If the flag is set to 1 - a dependence is always considered costly;
15532 do not allow dependent instructions in the same group.
15533 This is the most conservative option. */
15534 if (rs6000_sched_costly_dep == all_deps_costly)
15537 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15538 && is_load_insn (next)
15539 && is_store_insn (insn))
15540 /* Prevent load after store in the same group. */
15543 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15544 && is_load_insn (next)
15545 && is_store_insn (insn)
15546 && (!link || (int) REG_NOTE_KIND (link) == 0))
15547 /* Prevent load after store in the same group if it is a true dependence. */
15550 /* The flag is set to X; dependences with latency >= X are considered costly,
15551 and will not be scheduled in the same group. */
15552 if (rs6000_sched_costly_dep <= max_dep_latency
15553 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15559 /* Return the next insn after INSN that is found before TAIL is reached,
15560 skipping any "non-active" insns - insns that will not actually occupy
15561 an issue slot. Return NULL_RTX if such an insn is not found. */
15564 get_next_active_insn (rtx insn, rtx tail)
15568 if (!insn || insn == tail)
15571 next_insn = NEXT_INSN (insn);
15574 && next_insn != tail
15575 && (GET_CODE(next_insn) == NOTE
15576 || GET_CODE (PATTERN (next_insn)) == USE
15577 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
15579 next_insn = NEXT_INSN (next_insn);
15582 if (!next_insn || next_insn == tail)
15588 /* Return whether the presence of INSN causes a dispatch group termination
15589 of group WHICH_GROUP.
15591 If WHICH_GROUP == current_group, this function will return true if INSN
15592 causes the termination of the current group (i.e, the dispatch group to
15593 which INSN belongs). This means that INSN will be the last insn in the
15594 group it belongs to.
15596 If WHICH_GROUP == previous_group, this function will return true if INSN
15597 causes the termination of the previous group (i.e, the dispatch group that
15598 precedes the group to which INSN belongs). This means that INSN will be
15599 the first insn in the group it belongs to). */
15602 insn_terminates_group_p (rtx insn, enum group_termination which_group)
15604 enum attr_type type;
15609 type = get_attr_type (insn);
15611 if (is_microcoded_insn (insn))
15614 if (which_group == current_group)
15616 if (is_branch_slot_insn (insn))
15620 else if (which_group == previous_group)
15622 if (is_dispatch_slot_restricted (insn))
15630 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15631 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15634 is_costly_group (rtx *group_insns, rtx next_insn)
15639 int issue_rate = rs6000_issue_rate ();
15641 for (i = 0; i < issue_rate; i++)
15643 rtx insn = group_insns[i];
15646 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
15648 rtx next = XEXP (link, 0);
15649 if (next == next_insn)
15651 cost = insn_cost (insn, link, next_insn);
15652 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
15661 /* Utility of the function redefine_groups.
15662 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15663 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15664 to keep it "far" (in a separate group) from GROUP_INSNS, following
15665 one of the following schemes, depending on the value of the flag
15666 -minsert_sched_nops = X:
15667 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15668 in order to force NEXT_INSN into a separate group.
15669 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15670 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15671 insertion (has a group just ended, how many vacant issue slots remain in the
15672 last group, and how many dispatch groups were encountered so far). */
15675 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
15676 bool *group_end, int can_issue_more, int *group_count)
15680 int issue_rate = rs6000_issue_rate ();
15681 bool end = *group_end;
15684 if (next_insn == NULL_RTX)
15685 return can_issue_more;
15687 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
15688 return can_issue_more;
15690 force = is_costly_group (group_insns, next_insn);
15692 return can_issue_more;
15694 if (sched_verbose > 6)
15695 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
15696 *group_count ,can_issue_more);
15698 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
15701 can_issue_more = 0;
15703 /* Since only a branch can be issued in the last issue_slot, it is
15704 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15705 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15706 in this case the last nop will start a new group and the branch will be
15707 forced to the new group. */
15708 if (can_issue_more && !is_branch_slot_insn (next_insn))
15711 while (can_issue_more > 0)
15714 emit_insn_before (nop, next_insn);
15722 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
15724 int n_nops = rs6000_sched_insert_nops;
15726 /* Nops can't be issued from the branch slot, so the effective
15727 issue_rate for nops is 'issue_rate - 1'. */
15728 if (can_issue_more == 0)
15729 can_issue_more = issue_rate;
15731 if (can_issue_more == 0)
15733 can_issue_more = issue_rate - 1;
15736 for (i = 0; i < issue_rate; i++)
15738 group_insns[i] = 0;
15745 emit_insn_before (nop, next_insn);
15746 if (can_issue_more == issue_rate - 1) /* new group begins */
15749 if (can_issue_more == 0)
15751 can_issue_more = issue_rate - 1;
15754 for (i = 0; i < issue_rate; i++)
15756 group_insns[i] = 0;
15762 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15765 *group_end = /* Is next_insn going to start a new group? */
15767 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15768 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15769 || (can_issue_more < issue_rate &&
15770 insn_terminates_group_p (next_insn, previous_group)));
15771 if (*group_end && end)
15774 if (sched_verbose > 6)
15775 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
15776 *group_count, can_issue_more);
15777 return can_issue_more;
15780 return can_issue_more;
15783 /* This function tries to synch the dispatch groups that the compiler "sees"
15784 with the dispatch groups that the processor dispatcher is expected to
15785 form in practice. It tries to achieve this synchronization by forcing the
15786 estimated processor grouping on the compiler (as opposed to the function
15787 'pad_goups' which tries to force the scheduler's grouping on the processor).
15789 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15790 examines the (estimated) dispatch groups that will be formed by the processor
15791 dispatcher. It marks these group boundaries to reflect the estimated
15792 processor grouping, overriding the grouping that the scheduler had marked.
15793 Depending on the value of the flag '-minsert-sched-nops' this function can
15794 force certain insns into separate groups or force a certain distance between
15795 them by inserting nops, for example, if there exists a "costly dependence"
15798 The function estimates the group boundaries that the processor will form as
15799 folllows: It keeps track of how many vacant issue slots are available after
15800 each insn. A subsequent insn will start a new group if one of the following
15802 - no more vacant issue slots remain in the current dispatch group.
15803 - only the last issue slot, which is the branch slot, is vacant, but the next
15804 insn is not a branch.
15805 - only the last 2 or less issue slots, including the branch slot, are vacant,
15806 which means that a cracked insn (which occupies two issue slots) can't be
15807 issued in this group.
15808 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15809 start a new group. */
15812 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15814 rtx insn, next_insn;
15816 int can_issue_more;
15819 int group_count = 0;
15823 issue_rate = rs6000_issue_rate ();
15824 group_insns = alloca (issue_rate * sizeof (rtx));
15825 for (i = 0; i < issue_rate; i++)
15827 group_insns[i] = 0;
15829 can_issue_more = issue_rate;
15831 insn = get_next_active_insn (prev_head_insn, tail);
15834 while (insn != NULL_RTX)
15836 slot = (issue_rate - can_issue_more);
15837 group_insns[slot] = insn;
15839 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15840 if (insn_terminates_group_p (insn, current_group))
15841 can_issue_more = 0;
15843 next_insn = get_next_active_insn (insn, tail);
15844 if (next_insn == NULL_RTX)
15845 return group_count + 1;
15847 group_end = /* Is next_insn going to start a new group? */
15848 (can_issue_more == 0
15849 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15850 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15851 || (can_issue_more < issue_rate &&
15852 insn_terminates_group_p (next_insn, previous_group)));
15854 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
15855 next_insn, &group_end, can_issue_more, &group_count);
15860 can_issue_more = 0;
15861 for (i = 0; i < issue_rate; i++)
15863 group_insns[i] = 0;
15867 if (GET_MODE (next_insn) == TImode && can_issue_more)
15868 PUT_MODE(next_insn, VOIDmode);
15869 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
15870 PUT_MODE (next_insn, TImode);
15873 if (can_issue_more == 0)
15874 can_issue_more = issue_rate;
15877 return group_count;
15880 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
15881 dispatch group boundaries that the scheduler had marked. Pad with nops
15882 any dispatch groups which have vacant issue slots, in order to force the
15883 scheduler's grouping on the processor dispatcher. The function
15884 returns the number of dispatch groups found. */
15887 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15889 rtx insn, next_insn;
15892 int can_issue_more;
15894 int group_count = 0;
15896 /* Initialize issue_rate. */
15897 issue_rate = rs6000_issue_rate ();
15898 can_issue_more = issue_rate;
15900 insn = get_next_active_insn (prev_head_insn, tail);
15901 next_insn = get_next_active_insn (insn, tail);
15903 while (insn != NULL_RTX)
15906 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15908 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
15910 if (next_insn == NULL_RTX)
15915 /* If the scheduler had marked group termination at this location
15916 (between insn and next_indn), and neither insn nor next_insn will
15917 force group termination, pad the group with nops to force group
15920 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
15921 && !insn_terminates_group_p (insn, current_group)
15922 && !insn_terminates_group_p (next_insn, previous_group))
15924 if (!is_branch_slot_insn(next_insn))
15927 while (can_issue_more)
15930 emit_insn_before (nop, next_insn);
15935 can_issue_more = issue_rate;
15940 next_insn = get_next_active_insn (insn, tail);
15943 return group_count;
15946 /* The following function is called at the end of scheduling BB.
15947 After reload, it inserts nops at insn group bundling. */
15950 rs6000_sched_finish (FILE *dump, int sched_verbose)
15955 fprintf (dump, "=== Finishing schedule.\n");
15957 if (reload_completed && rs6000_sched_groups)
15959 if (rs6000_sched_insert_nops == sched_finish_none)
15962 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
15963 n_groups = pad_groups (dump, sched_verbose,
15964 current_sched_info->prev_head,
15965 current_sched_info->next_tail);
15967 n_groups = redefine_groups (dump, sched_verbose,
15968 current_sched_info->prev_head,
15969 current_sched_info->next_tail);
15971 if (sched_verbose >= 6)
15973 fprintf (dump, "ngroups = %d\n", n_groups);
15974 print_rtl (dump, current_sched_info->prev_head);
15975 fprintf (dump, "Done finish_sched\n");
15980 /* Length in units of the trampoline for entering a nested function. */
15983 rs6000_trampoline_size (void)
15987 switch (DEFAULT_ABI)
15993 ret = (TARGET_32BIT) ? 12 : 24;
15998 ret = (TARGET_32BIT) ? 40 : 48;
16005 /* Emit RTL insns to initialize the variable parts of a trampoline.
16006 FNADDR is an RTX for the address of the function's pure code.
16007 CXT is an RTX for the static chain value for the function. */
16010 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
16012 enum machine_mode pmode = Pmode;
16013 int regsize = (TARGET_32BIT) ? 4 : 8;
16014 rtx ctx_reg = force_reg (pmode, cxt);
16016 switch (DEFAULT_ABI)
16021 /* Macros to shorten the code expansions below. */
16022 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16023 #define MEM_PLUS(addr,offset) \
16024 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16026 /* Under AIX, just build the 3 word function descriptor */
16029 rtx fn_reg = gen_reg_rtx (pmode);
16030 rtx toc_reg = gen_reg_rtx (pmode);
16031 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
16032 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
16033 emit_move_insn (MEM_DEREF (addr), fn_reg);
16034 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
16035 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
16039 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16042 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
16043 FALSE, VOIDmode, 4,
16045 GEN_INT (rs6000_trampoline_size ()), SImode,
16055 /* Table of valid machine attributes. */
16057 const struct attribute_spec rs6000_attribute_table[] =
16059 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16060 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
16061 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16062 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
16063 { NULL, 0, 0, false, false, false, NULL }
16066 /* Handle the "altivec" attribute. The attribute may have
16067 arguments as follows:
16069 __attribute__((altivec(vector__)))
16070 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16071 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16073 and may appear more than once (e.g., 'vector bool char') in a
16074 given declaration. */
16077 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
16078 int flags ATTRIBUTE_UNUSED,
16079 bool *no_add_attrs)
16081 tree type = *node, result = NULL_TREE;
16082 enum machine_mode mode;
16085 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
16086 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
16087 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
16090 while (POINTER_TYPE_P (type)
16091 || TREE_CODE (type) == FUNCTION_TYPE
16092 || TREE_CODE (type) == METHOD_TYPE
16093 || TREE_CODE (type) == ARRAY_TYPE)
16094 type = TREE_TYPE (type);
16096 mode = TYPE_MODE (type);
16098 if (rs6000_warn_altivec_long
16099 && (type == long_unsigned_type_node || type == long_integer_type_node))
16100 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
16102 switch (altivec_type)
16105 unsigned_p = TYPE_UNSIGNED (type);
16109 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
16112 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
16115 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
16117 case SFmode: result = V4SF_type_node; break;
16118 /* If the user says 'vector int bool', we may be handed the 'bool'
16119 attribute _before_ the 'vector' attribute, and so select the proper
16120 type in the 'b' case below. */
16121 case V4SImode: case V8HImode: case V16QImode: result = type;
16128 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
16129 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
16130 case QImode: case V16QImode: result = bool_V16QI_type_node;
16137 case V8HImode: result = pixel_V8HI_type_node;
16143 if (result && result != type && TYPE_READONLY (type))
16144 result = build_qualified_type (result, TYPE_QUAL_CONST);
16146 *no_add_attrs = true; /* No need to hang on to the attribute. */
16149 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
16151 *node = reconstruct_complex_type (*node, result);
16156 /* AltiVec defines four built-in scalar types that serve as vector
16157 elements; we must teach the compiler how to mangle them. */
16159 static const char *
16160 rs6000_mangle_fundamental_type (tree type)
16162 if (type == bool_char_type_node) return "U6__boolc";
16163 if (type == bool_short_type_node) return "U6__bools";
16164 if (type == pixel_type_node) return "u7__pixel";
16165 if (type == bool_int_type_node) return "U6__booli";
16167 /* For all other types, use normal C++ mangling. */
16171 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16172 struct attribute_spec.handler. */
16175 rs6000_handle_longcall_attribute (tree *node, tree name,
16176 tree args ATTRIBUTE_UNUSED,
16177 int flags ATTRIBUTE_UNUSED,
16178 bool *no_add_attrs)
16180 if (TREE_CODE (*node) != FUNCTION_TYPE
16181 && TREE_CODE (*node) != FIELD_DECL
16182 && TREE_CODE (*node) != TYPE_DECL)
16184 warning ("`%s' attribute only applies to functions",
16185 IDENTIFIER_POINTER (name));
16186 *no_add_attrs = true;
16192 /* Set longcall attributes on all functions declared when
16193 rs6000_default_long_calls is true. */
16195 rs6000_set_default_type_attributes (tree type)
16197 if (rs6000_default_long_calls
16198 && (TREE_CODE (type) == FUNCTION_TYPE
16199 || TREE_CODE (type) == METHOD_TYPE))
16200 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
16202 TYPE_ATTRIBUTES (type));
16205 /* Return a reference suitable for calling a function with the
16206 longcall attribute. */
16209 rs6000_longcall_ref (rtx call_ref)
16211 const char *call_name;
16214 if (GET_CODE (call_ref) != SYMBOL_REF)
16217 /* System V adds '.' to the internal name, so skip them. */
16218 call_name = XSTR (call_ref, 0);
16219 if (*call_name == '.')
16221 while (*call_name == '.')
16224 node = get_identifier (call_name);
16225 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
16228 return force_reg (Pmode, call_ref);
16231 #ifdef USING_ELFOS_H
16233 /* A C statement or statements to switch to the appropriate section
16234 for output of RTX in mode MODE. You can assume that RTX is some
16235 kind of constant in RTL. The argument MODE is redundant except in
16236 the case of a `const_int' rtx. Select the section by calling
16237 `text_section' or one of the alternatives for other sections.
16239 Do not define this macro if you put all constants in the read-only
16243 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
16244 unsigned HOST_WIDE_INT align)
16246 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16249 default_elf_select_rtx_section (mode, x, align);
16252 /* A C statement or statements to switch to the appropriate
16253 section for output of DECL. DECL is either a `VAR_DECL' node
16254 or a constant of some sort. RELOC indicates whether forming
16255 the initial value of DECL requires link-time relocations. */
16258 rs6000_elf_select_section (tree decl, int reloc,
16259 unsigned HOST_WIDE_INT align)
16261 /* Pretend that we're always building for a shared library when
16262 ABI_AIX, because otherwise we end up with dynamic relocations
16263 in read-only sections. This happens for function pointers,
16264 references to vtables in typeinfo, and probably other cases. */
16265 default_elf_select_section_1 (decl, reloc, align,
16266 flag_pic || DEFAULT_ABI == ABI_AIX);
16269 /* A C statement to build up a unique section name, expressed as a
16270 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16271 RELOC indicates whether the initial value of EXP requires
16272 link-time relocations. If you do not define this macro, GCC will use
16273 the symbol name prefixed by `.' as the section name. Note - this
16274 macro can now be called for uninitialized data items as well as
16275 initialized data and functions. */
16278 rs6000_elf_unique_section (tree decl, int reloc)
16280 /* As above, pretend that we're always building for a shared library
16281 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16282 default_unique_section_1 (decl, reloc,
16283 flag_pic || DEFAULT_ABI == ABI_AIX);
16286 /* For a SYMBOL_REF, set generic flags and then perform some
16287 target-specific processing.
16289 When the AIX ABI is requested on a non-AIX system, replace the
16290 function name with the real name (with a leading .) rather than the
16291 function descriptor name. This saves a lot of overriding code to
16292 read the prefixes. */
16295 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
16297 default_encode_section_info (decl, rtl, first);
16300 && TREE_CODE (decl) == FUNCTION_DECL
16302 && DEFAULT_ABI == ABI_AIX)
16304 rtx sym_ref = XEXP (rtl, 0);
16305 size_t len = strlen (XSTR (sym_ref, 0));
16306 char *str = alloca (len + 2);
16308 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
16309 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
16314 rs6000_elf_in_small_data_p (tree decl)
16316 if (rs6000_sdata == SDATA_NONE)
16319 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
16321 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
16322 if (strcmp (section, ".sdata") == 0
16323 || strcmp (section, ".sdata2") == 0
16324 || strcmp (section, ".sbss") == 0
16325 || strcmp (section, ".sbss2") == 0
16326 || strcmp (section, ".PPC.EMB.sdata0") == 0
16327 || strcmp (section, ".PPC.EMB.sbss0") == 0)
16332 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
16335 && (unsigned HOST_WIDE_INT) size <= g_switch_value
16336 /* If it's not public, and we're not going to reference it there,
16337 there's no need to put it in the small data section. */
16338 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
16345 #endif /* USING_ELFOS_H */
16348 /* Return a REG that occurs in ADDR with coefficient 1.
16349 ADDR can be effectively incremented by incrementing REG.
16351 r0 is special and we must not select it as an address
16352 register by this routine since our caller will try to
16353 increment the returned register via an "la" instruction. */
16356 find_addr_reg (rtx addr)
16358 while (GET_CODE (addr) == PLUS)
16360 if (GET_CODE (XEXP (addr, 0)) == REG
16361 && REGNO (XEXP (addr, 0)) != 0)
16362 addr = XEXP (addr, 0);
16363 else if (GET_CODE (XEXP (addr, 1)) == REG
16364 && REGNO (XEXP (addr, 1)) != 0)
16365 addr = XEXP (addr, 1);
16366 else if (CONSTANT_P (XEXP (addr, 0)))
16367 addr = XEXP (addr, 1);
16368 else if (CONSTANT_P (XEXP (addr, 1)))
16369 addr = XEXP (addr, 0);
16373 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
16379 rs6000_fatal_bad_address (rtx op)
16381 fatal_insn ("bad address", op);
16387 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
16388 reference and a constant. */
16391 symbolic_operand (rtx op)
16393 switch (GET_CODE (op))
16400 return (GET_CODE (op) == SYMBOL_REF ||
16401 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
16402 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
16403 && GET_CODE (XEXP (op, 1)) == CONST_INT);
16412 static tree branch_island_list = 0;
16414 /* Remember to generate a branch island for far calls to the given
16418 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
16420 tree branch_island = build_tree_list (function_name, label_name);
16421 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number, 0);
16422 TREE_CHAIN (branch_island) = branch_island_list;
16423 branch_island_list = branch_island;
16426 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
16427 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
16428 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
16429 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16431 /* Generate far-jump branch islands for everything on the
16432 branch_island_list. Invoked immediately after the last instruction
16433 of the epilogue has been emitted; the branch-islands must be
16434 appended to, and contiguous with, the function body. Mach-O stubs
16435 are generated in machopic_output_stub(). */
16438 macho_branch_islands (void)
16441 tree branch_island;
16443 for (branch_island = branch_island_list;
16445 branch_island = TREE_CHAIN (branch_island))
16447 const char *label =
16448 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
16450 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
16451 char name_buf[512];
16452 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16453 if (name[0] == '*' || name[0] == '&')
16454 strcpy (name_buf, name+1);
16458 strcpy (name_buf+1, name);
16460 strcpy (tmp_buf, "\n");
16461 strcat (tmp_buf, label);
16462 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16463 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16464 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16465 BRANCH_ISLAND_LINE_NUMBER(branch_island));
16466 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16469 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
16470 strcat (tmp_buf, label);
16471 strcat (tmp_buf, "_pic\n");
16472 strcat (tmp_buf, label);
16473 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
16475 strcat (tmp_buf, "\taddis r11,r11,ha16(");
16476 strcat (tmp_buf, name_buf);
16477 strcat (tmp_buf, " - ");
16478 strcat (tmp_buf, label);
16479 strcat (tmp_buf, "_pic)\n");
16481 strcat (tmp_buf, "\tmtlr r0\n");
16483 strcat (tmp_buf, "\taddi r12,r11,lo16(");
16484 strcat (tmp_buf, name_buf);
16485 strcat (tmp_buf, " - ");
16486 strcat (tmp_buf, label);
16487 strcat (tmp_buf, "_pic)\n");
16489 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
16493 strcat (tmp_buf, ":\nlis r12,hi16(");
16494 strcat (tmp_buf, name_buf);
16495 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
16496 strcat (tmp_buf, name_buf);
16497 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
16499 output_asm_insn (tmp_buf, 0);
16500 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16501 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16502 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16503 BRANCH_ISLAND_LINE_NUMBER (branch_island));
16504 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16507 branch_island_list = 0;
16510 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16511 already there or not. */
16514 no_previous_def (tree function_name)
16516 tree branch_island;
16517 for (branch_island = branch_island_list;
16519 branch_island = TREE_CHAIN (branch_island))
16520 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16525 /* GET_PREV_LABEL gets the label name from the previous definition of
16529 get_prev_label (tree function_name)
16531 tree branch_island;
16532 for (branch_island = branch_island_list;
16534 branch_island = TREE_CHAIN (branch_island))
16535 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16536 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16540 /* INSN is either a function call or a millicode call. It may have an
16541 unconditional jump in its delay slot.
16543 CALL_DEST is the routine we are calling. */
16546 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
16548 static char buf[256];
16549 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16550 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16553 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
16555 if (no_previous_def (funname))
16557 int line_number = 0;
16558 rtx label_rtx = gen_label_rtx ();
16559 char *label_buf, temp_buf[256];
16560 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
16561 CODE_LABEL_NUMBER (label_rtx));
16562 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
16563 labelname = get_identifier (label_buf);
16564 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
16566 line_number = NOTE_LINE_NUMBER (insn);
16567 add_compiler_branch_island (labelname, funname, line_number);
16570 labelname = get_prev_label (funname);
16572 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16573 instruction will reach 'foo', otherwise link as 'bl L42'".
16574 "L42" should be a 'branch island', that will do a far jump to
16575 'foo'. Branch islands are generated in
16576 macho_branch_islands(). */
16577 sprintf (buf, "jbsr %%z%d,%.246s",
16578 dest_operand_number, IDENTIFIER_POINTER (labelname));
16581 sprintf (buf, "bl %%z%d", dest_operand_number);
16585 #endif /* TARGET_MACHO */
16587 /* Generate PIC and indirect symbol stubs. */
16590 machopic_output_stub (FILE *file, const char *symb, const char *stub)
16592 unsigned int length;
16593 char *symbol_name, *lazy_ptr_name;
16594 char *local_label_0;
16595 static int label = 0;
16597 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16598 symb = (*targetm.strip_name_encoding) (symb);
16601 length = strlen (symb);
16602 symbol_name = alloca (length + 32);
16603 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
16605 lazy_ptr_name = alloca (length + 32);
16606 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
16609 machopic_picsymbol_stub1_section ();
16611 machopic_symbol_stub1_section ();
16615 fprintf (file, "\t.align 5\n");
16617 fprintf (file, "%s:\n", stub);
16618 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16621 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
16622 sprintf (local_label_0, "\"L%011d$spb\"", label);
16624 fprintf (file, "\tmflr r0\n");
16625 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
16626 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
16627 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
16628 lazy_ptr_name, local_label_0);
16629 fprintf (file, "\tmtlr r0\n");
16630 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16631 lazy_ptr_name, local_label_0);
16632 fprintf (file, "\tmtctr r12\n");
16633 fprintf (file, "\tbctr\n");
16637 fprintf (file, "\t.align 4\n");
16639 fprintf (file, "%s:\n", stub);
16640 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16642 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
16643 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
16644 fprintf (file, "\tmtctr r12\n");
16645 fprintf (file, "\tbctr\n");
16648 machopic_lazy_symbol_ptr_section ();
16649 fprintf (file, "%s:\n", lazy_ptr_name);
16650 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16651 fprintf (file, "%sdyld_stub_binding_helper\n",
16652 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
16655 /* Legitimize PIC addresses. If the address is already
16656 position-independent, we return ORIG. Newly generated
16657 position-independent addresses go into a reg. This is REG if non
16658 zero, otherwise we allocate register(s) as necessary. */
16660 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16663 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
16668 if (reg == NULL && ! reload_in_progress && ! reload_completed)
16669 reg = gen_reg_rtx (Pmode);
16671 if (GET_CODE (orig) == CONST)
16673 if (GET_CODE (XEXP (orig, 0)) == PLUS
16674 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
16677 if (GET_CODE (XEXP (orig, 0)) == PLUS)
16679 /* Use a different reg for the intermediate value, as
16680 it will be marked UNCHANGING. */
16681 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16684 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16687 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16693 if (GET_CODE (offset) == CONST_INT)
16695 if (SMALL_INT (offset))
16696 return plus_constant (base, INTVAL (offset));
16697 else if (! reload_in_progress && ! reload_completed)
16698 offset = force_reg (Pmode, offset);
16701 rtx mem = force_const_mem (Pmode, orig);
16702 return machopic_legitimize_pic_address (mem, Pmode, reg);
16705 return gen_rtx_PLUS (Pmode, base, offset);
16708 /* Fall back on generic machopic code. */
16709 return machopic_legitimize_pic_address (orig, mode, reg);
16712 /* This is just a placeholder to make linking work without having to
16713 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16714 ever needed for Darwin (not too likely!) this would have to get a
16715 real definition. */
16722 #endif /* TARGET_MACHO */
16725 static unsigned int
16726 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
16728 return default_section_type_flags_1 (decl, name, reloc,
16729 flag_pic || DEFAULT_ABI == ABI_AIX);
16732 /* Record an element in the table of global constructors. SYMBOL is
16733 a SYMBOL_REF of the function to be called; PRIORITY is a number
16734 between 0 and MAX_INIT_PRIORITY.
16736 This differs from default_named_section_asm_out_constructor in
16737 that we have special handling for -mrelocatable. */
16740 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
16742 const char *section = ".ctors";
16745 if (priority != DEFAULT_INIT_PRIORITY)
16747 sprintf (buf, ".ctors.%.5u",
16748 /* Invert the numbering so the linker puts us in the proper
16749 order; constructors are run from right to left, and the
16750 linker sorts in increasing order. */
16751 MAX_INIT_PRIORITY - priority);
16755 named_section_flags (section, SECTION_WRITE);
16756 assemble_align (POINTER_SIZE);
16758 if (TARGET_RELOCATABLE)
16760 fputs ("\t.long (", asm_out_file);
16761 output_addr_const (asm_out_file, symbol);
16762 fputs (")@fixup\n", asm_out_file);
16765 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16769 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
16771 const char *section = ".dtors";
16774 if (priority != DEFAULT_INIT_PRIORITY)
16776 sprintf (buf, ".dtors.%.5u",
16777 /* Invert the numbering so the linker puts us in the proper
16778 order; constructors are run from right to left, and the
16779 linker sorts in increasing order. */
16780 MAX_INIT_PRIORITY - priority);
16784 named_section_flags (section, SECTION_WRITE);
16785 assemble_align (POINTER_SIZE);
16787 if (TARGET_RELOCATABLE)
16789 fputs ("\t.long (", asm_out_file);
16790 output_addr_const (asm_out_file, symbol);
16791 fputs (")@fixup\n", asm_out_file);
16794 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16798 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
16802 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
16803 ASM_OUTPUT_LABEL (file, name);
16804 fputs (DOUBLE_INT_ASM_OP, file);
16806 assemble_name (file, name);
16807 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
16808 assemble_name (file, name);
16809 fputs (",24\n\t.type\t.", file);
16810 assemble_name (file, name);
16811 fputs (",@function\n", file);
16812 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
16814 fputs ("\t.globl\t.", file);
16815 assemble_name (file, name);
16818 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16820 ASM_OUTPUT_LABEL (file, name);
16824 if (TARGET_RELOCATABLE
16825 && (get_pool_size () != 0 || current_function_profile)
16830 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
16832 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
16833 fprintf (file, "\t.long ");
16834 assemble_name (file, buf);
16836 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
16837 assemble_name (file, buf);
16841 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
16842 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16844 if (DEFAULT_ABI == ABI_AIX)
16846 const char *desc_name, *orig_name;
16848 orig_name = (*targetm.strip_name_encoding) (name);
16849 desc_name = orig_name;
16850 while (*desc_name == '.')
16853 if (TREE_PUBLIC (decl))
16854 fprintf (file, "\t.globl %s\n", desc_name);
16856 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
16857 fprintf (file, "%s:\n", desc_name);
16858 fprintf (file, "\t.long %s\n", orig_name);
16859 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
16860 if (DEFAULT_ABI == ABI_AIX)
16861 fputs ("\t.long 0\n", file);
16862 fprintf (file, "\t.previous\n");
16864 ASM_OUTPUT_LABEL (file, name);
16870 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
16872 fputs (GLOBAL_ASM_OP, stream);
16873 RS6000_OUTPUT_BASENAME (stream, name);
16874 putc ('\n', stream);
16878 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
16881 static const char * const suffix[3] = { "PR", "RO", "RW" };
16883 if (flags & SECTION_CODE)
16885 else if (flags & SECTION_WRITE)
16890 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
16891 (flags & SECTION_CODE) ? "." : "",
16892 name, suffix[smclass], flags & SECTION_ENTSIZE);
16896 rs6000_xcoff_select_section (tree decl, int reloc,
16897 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16899 if (decl_readonly_section_1 (decl, reloc, 1))
16901 if (TREE_PUBLIC (decl))
16902 read_only_data_section ();
16904 read_only_private_data_section ();
16908 if (TREE_PUBLIC (decl))
16911 private_data_section ();
16916 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
16920 /* Use select_section for private and uninitialized data. */
16921 if (!TREE_PUBLIC (decl)
16922 || DECL_COMMON (decl)
16923 || DECL_INITIAL (decl) == NULL_TREE
16924 || DECL_INITIAL (decl) == error_mark_node
16925 || (flag_zero_initialized_in_bss
16926 && initializer_zerop (DECL_INITIAL (decl))))
16929 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
16930 name = (*targetm.strip_name_encoding) (name);
16931 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
16934 /* Select section for constant in constant pool.
16936 On RS/6000, all constants are in the private read-only data area.
16937 However, if this is being placed in the TOC it must be output as a
16941 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
16942 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16944 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16947 read_only_private_data_section ();
16950 /* Remove any trailing [DS] or the like from the symbol name. */
16952 static const char *
16953 rs6000_xcoff_strip_name_encoding (const char *name)
16958 len = strlen (name);
16959 if (name[len - 1] == ']')
16960 return ggc_alloc_string (name, len - 4);
16965 /* Section attributes. AIX is always PIC. */
16967 static unsigned int
16968 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
16970 unsigned int align;
16971 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
16973 /* Align to at least UNIT size. */
16974 if (flags & SECTION_CODE)
16975 align = MIN_UNITS_PER_WORD;
16977 /* Increase alignment of large objects if not already stricter. */
16978 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
16979 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
16980 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
16982 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
16985 /* Output at beginning of assembler file.
16987 Initialize the section names for the RS/6000 at this point.
16989 Specify filename, including full path, to assembler.
16991 We want to go into the TOC section so at least one .toc will be emitted.
16992 Also, in order to output proper .bs/.es pairs, we need at least one static
16993 [RW] section emitted.
16995 Finally, declare mcount when profiling to make the assembler happy. */
16998 rs6000_xcoff_file_start (void)
17000 rs6000_gen_section_name (&xcoff_bss_section_name,
17001 main_input_filename, ".bss_");
17002 rs6000_gen_section_name (&xcoff_private_data_section_name,
17003 main_input_filename, ".rw_");
17004 rs6000_gen_section_name (&xcoff_read_only_section_name,
17005 main_input_filename, ".ro_");
17007 fputs ("\t.file\t", asm_out_file);
17008 output_quoted_string (asm_out_file, main_input_filename);
17009 fputc ('\n', asm_out_file);
17011 if (write_symbols != NO_DEBUG)
17012 private_data_section ();
17015 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
17016 rs6000_file_start ();
17019 /* Output at end of assembler file.
17020 On the RS/6000, referencing data should automatically pull in text. */
17023 rs6000_xcoff_file_end (void)
17026 fputs ("_section_.text:\n", asm_out_file);
17028 fputs (TARGET_32BIT
17029 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17032 #endif /* TARGET_XCOFF */
17035 /* Cross-module name binding. Darwin does not support overriding
17036 functions at dynamic-link time. */
17039 rs6000_binds_local_p (tree decl)
17041 return default_binds_local_p_1 (decl, 0);
17045 /* Compute a (partial) cost for rtx X. Return true if the complete
17046 cost has been computed, and false if subexpressions should be
17047 scanned. In either case, *TOTAL contains the cost result. */
17050 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
17053 enum machine_mode mode = GET_MODE (x);
17057 /* On the RS/6000, if it is valid in the insn, it is free. */
17059 if (((outer_code == SET
17060 || outer_code == PLUS
17061 || outer_code == MINUS)
17062 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17063 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17064 || ((outer_code == IOR || outer_code == XOR)
17065 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17066 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
17067 || ((outer_code == DIV || outer_code == UDIV
17068 || outer_code == MOD || outer_code == UMOD)
17069 && exact_log2 (INTVAL (x)) >= 0)
17070 || (outer_code == AND
17071 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17072 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17073 || mask_operand (x, VOIDmode)))
17074 || outer_code == ASHIFT
17075 || outer_code == ASHIFTRT
17076 || outer_code == LSHIFTRT
17077 || outer_code == ROTATE
17078 || outer_code == ROTATERT
17079 || outer_code == ZERO_EXTRACT
17080 || (outer_code == MULT
17081 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
17082 || (outer_code == COMPARE
17083 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
17084 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K'))))
17089 else if ((outer_code == PLUS
17090 && reg_or_add_cint64_operand (x, VOIDmode))
17091 || (outer_code == MINUS
17092 && reg_or_sub_cint64_operand (x, VOIDmode))
17093 || ((outer_code == SET
17094 || outer_code == IOR
17095 || outer_code == XOR)
17097 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
17099 *total = COSTS_N_INSNS (1);
17106 && ((outer_code == AND
17107 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
17108 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
17109 || mask64_operand (x, DImode)))
17110 || ((outer_code == IOR || outer_code == XOR)
17111 && CONST_DOUBLE_HIGH (x) == 0
17112 && (CONST_DOUBLE_LOW (x)
17113 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
17118 else if (mode == DImode
17119 && (outer_code == SET
17120 || outer_code == IOR
17121 || outer_code == XOR)
17122 && CONST_DOUBLE_HIGH (x) == 0)
17124 *total = COSTS_N_INSNS (1);
17133 /* When optimizing for size, MEM should be slightly more expensive
17134 than generating address, e.g., (plus (reg) (const)).
17135 L1 cache latecy is about two instructions. */
17136 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17144 if (mode == DFmode)
17146 if (GET_CODE (XEXP (x, 0)) == MULT)
17148 /* FNMA accounted in outer NEG. */
17149 if (outer_code == NEG)
17150 *total = rs6000_cost->dmul - rs6000_cost->fp;
17152 *total = rs6000_cost->dmul;
17155 *total = rs6000_cost->fp;
17157 else if (mode == SFmode)
17159 /* FNMA accounted in outer NEG. */
17160 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17163 *total = rs6000_cost->fp;
17165 else if (GET_CODE (XEXP (x, 0)) == MULT)
17167 /* The rs6000 doesn't have shift-and-add instructions. */
17168 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
17169 *total += COSTS_N_INSNS (1);
17172 *total = COSTS_N_INSNS (1);
17176 if (mode == DFmode)
17178 if (GET_CODE (XEXP (x, 0)) == MULT)
17180 /* FNMA accounted in outer NEG. */
17181 if (outer_code == NEG)
17184 *total = rs6000_cost->dmul;
17187 *total = rs6000_cost->fp;
17189 else if (mode == SFmode)
17191 /* FNMA accounted in outer NEG. */
17192 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
17195 *total = rs6000_cost->fp;
17197 else if (GET_CODE (XEXP (x, 0)) == MULT)
17199 /* The rs6000 doesn't have shift-and-sub instructions. */
17200 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
17201 *total += COSTS_N_INSNS (1);
17204 *total = COSTS_N_INSNS (1);
17208 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
17210 if (INTVAL (XEXP (x, 1)) >= -256
17211 && INTVAL (XEXP (x, 1)) <= 255)
17212 *total = rs6000_cost->mulsi_const9;
17214 *total = rs6000_cost->mulsi_const;
17216 /* FMA accounted in outer PLUS/MINUS. */
17217 else if ((mode == DFmode || mode == SFmode)
17218 && (outer_code == PLUS || outer_code == MINUS))
17220 else if (mode == DFmode)
17221 *total = rs6000_cost->dmul;
17222 else if (mode == SFmode)
17223 *total = rs6000_cost->fp;
17224 else if (mode == DImode)
17225 *total = rs6000_cost->muldi;
17227 *total = rs6000_cost->mulsi;
17232 if (FLOAT_MODE_P (mode))
17234 *total = mode == DFmode ? rs6000_cost->ddiv
17235 : rs6000_cost->sdiv;
17242 if (GET_CODE (XEXP (x, 1)) == CONST_INT
17243 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
17245 if (code == DIV || code == MOD)
17247 *total = COSTS_N_INSNS (2);
17250 *total = COSTS_N_INSNS (1);
17254 if (GET_MODE (XEXP (x, 1)) == DImode)
17255 *total = rs6000_cost->divdi;
17257 *total = rs6000_cost->divsi;
17259 /* Add in shift and subtract for MOD. */
17260 if (code == MOD || code == UMOD)
17261 *total += COSTS_N_INSNS (2);
17265 *total = COSTS_N_INSNS (4);
17269 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
17280 *total = COSTS_N_INSNS (1);
17288 /* Handle mul_highpart. */
17289 if (outer_code == TRUNCATE
17290 && GET_CODE (XEXP (x, 0)) == MULT)
17292 if (mode == DImode)
17293 *total = rs6000_cost->muldi;
17295 *total = rs6000_cost->mulsi;
17298 else if (outer_code == AND)
17301 *total = COSTS_N_INSNS (1);
17306 if (GET_CODE (XEXP (x, 0)) == MEM)
17309 *total = COSTS_N_INSNS (1);
17315 if (!FLOAT_MODE_P (mode))
17317 *total = COSTS_N_INSNS (1);
17323 case UNSIGNED_FLOAT:
17327 case FLOAT_TRUNCATE:
17328 *total = rs6000_cost->fp;
17332 switch (XINT (x, 1))
17335 *total = rs6000_cost->fp;
17347 *total = COSTS_N_INSNS (1);
17350 else if (FLOAT_MODE_P (mode)
17351 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
17353 *total = rs6000_cost->fp;
17366 /* A C expression returning the cost of moving data from a register of class
17367 CLASS1 to one of CLASS2. */
17370 rs6000_register_move_cost (enum machine_mode mode,
17371 enum reg_class from, enum reg_class to)
17373 /* Moves from/to GENERAL_REGS. */
17374 if (reg_classes_intersect_p (to, GENERAL_REGS)
17375 || reg_classes_intersect_p (from, GENERAL_REGS))
17377 if (! reg_classes_intersect_p (to, GENERAL_REGS))
17380 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
17381 return (rs6000_memory_move_cost (mode, from, 0)
17382 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
17384 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
17385 else if (from == CR_REGS)
17389 /* A move will cost one instruction per GPR moved. */
17390 return 2 * HARD_REGNO_NREGS (0, mode);
17393 /* Moving between two similar registers is just one instruction. */
17394 else if (reg_classes_intersect_p (to, from))
17395 return mode == TFmode ? 4 : 2;
17397 /* Everything else has to go through GENERAL_REGS. */
17399 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
17400 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
17403 /* A C expressions returning the cost of moving data of MODE from a register to
17407 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
17408 int in ATTRIBUTE_UNUSED)
17410 if (reg_classes_intersect_p (class, GENERAL_REGS))
17411 return 4 * HARD_REGNO_NREGS (0, mode);
17412 else if (reg_classes_intersect_p (class, FLOAT_REGS))
17413 return 4 * HARD_REGNO_NREGS (32, mode);
17414 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
17415 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
17417 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
17420 /* Return an RTX representing where to find the function value of a
17421 function returning MODE. */
17423 rs6000_complex_function_value (enum machine_mode mode)
17425 unsigned int regno;
17427 enum machine_mode inner = GET_MODE_INNER (mode);
17428 unsigned int inner_bytes = GET_MODE_SIZE (inner);
17430 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
17431 regno = FP_ARG_RETURN;
17434 regno = GP_ARG_RETURN;
17436 /* 32-bit is OK since it'll go in r3/r4. */
17437 if (TARGET_32BIT && inner_bytes >= 4)
17438 return gen_rtx_REG (mode, regno);
17441 if (inner_bytes >= 8)
17442 return gen_rtx_REG (mode, regno);
17444 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
17446 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
17447 GEN_INT (inner_bytes));
17448 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
17451 /* Define how to find the value returned by a function.
17452 VALTYPE is the data type of the value (as a tree).
17453 If the precise function being called is known, FUNC is its FUNCTION_DECL;
17454 otherwise, FUNC is 0.
17456 On the SPE, both FPs and vectors are returned in r3.
17458 On RS/6000 an integer value is in r3 and a floating-point value is in
17459 fp1, unless -msoft-float. */
17462 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
17464 enum machine_mode mode;
17465 unsigned int regno;
17467 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
17469 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17470 return gen_rtx_PARALLEL (DImode,
17472 gen_rtx_EXPR_LIST (VOIDmode,
17473 gen_rtx_REG (SImode, GP_ARG_RETURN),
17475 gen_rtx_EXPR_LIST (VOIDmode,
17476 gen_rtx_REG (SImode,
17477 GP_ARG_RETURN + 1),
17481 if ((INTEGRAL_TYPE_P (valtype)
17482 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
17483 || POINTER_TYPE_P (valtype))
17484 mode = TARGET_32BIT ? SImode : DImode;
17486 mode = TYPE_MODE (valtype);
17488 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
17489 regno = FP_ARG_RETURN;
17490 else if (TREE_CODE (valtype) == COMPLEX_TYPE
17491 && targetm.calls.split_complex_arg)
17492 return rs6000_complex_function_value (mode);
17493 else if (TREE_CODE (valtype) == VECTOR_TYPE
17494 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
17495 && ALTIVEC_VECTOR_MODE(mode))
17496 regno = ALTIVEC_ARG_RETURN;
17498 regno = GP_ARG_RETURN;
17500 return gen_rtx_REG (mode, regno);
17503 /* Define how to find the value returned by a library function
17504 assuming the value has mode MODE. */
17506 rs6000_libcall_value (enum machine_mode mode)
17508 unsigned int regno;
17510 if (GET_MODE_CLASS (mode) == MODE_FLOAT
17511 && TARGET_HARD_FLOAT && TARGET_FPRS)
17512 regno = FP_ARG_RETURN;
17513 else if (ALTIVEC_VECTOR_MODE (mode)
17514 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
17515 regno = ALTIVEC_ARG_RETURN;
17516 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
17517 return rs6000_complex_function_value (mode);
17519 regno = GP_ARG_RETURN;
17521 return gen_rtx_REG (mode, regno);
17524 /* Define the offset between two registers, FROM to be eliminated and its
17525 replacement TO, at the start of a routine. */
17527 rs6000_initial_elimination_offset (int from, int to)
17529 rs6000_stack_t *info = rs6000_stack_info ();
17530 HOST_WIDE_INT offset;
17532 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17533 offset = info->push_p ? 0 : -info->total_size;
17534 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
17535 offset = info->total_size;
17536 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17537 offset = info->push_p ? info->total_size : 0;
17538 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
17546 /* Return true if TYPE is of type __ev64_opaque__. */
17549 is_ev64_opaque_type (tree type)
17552 && (type == opaque_V2SI_type_node
17553 || type == opaque_V2SF_type_node
17554 || type == opaque_p_V2SI_type_node));
17558 rs6000_dwarf_register_span (rtx reg)
17562 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
17565 regno = REGNO (reg);
17567 /* The duality of the SPE register size wreaks all kinds of havoc.
17568 This is a way of distinguishing r0 in 32-bits from r0 in
17571 gen_rtx_PARALLEL (VOIDmode,
17574 gen_rtx_REG (SImode, regno + 1200),
17575 gen_rtx_REG (SImode, regno))
17577 gen_rtx_REG (SImode, regno),
17578 gen_rtx_REG (SImode, regno + 1200)));
17581 /* Map internal gcc register numbers to DWARF2 register numbers. */
17584 rs6000_dbx_register_number (unsigned int regno)
17586 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
17588 if (regno == MQ_REGNO)
17590 if (regno == LINK_REGISTER_REGNUM)
17592 if (regno == COUNT_REGISTER_REGNUM)
17594 if (CR_REGNO_P (regno))
17595 return regno - CR0_REGNO + 86;
17596 if (regno == XER_REGNO)
17598 if (ALTIVEC_REGNO_P (regno))
17599 return regno - FIRST_ALTIVEC_REGNO + 1124;
17600 if (regno == VRSAVE_REGNO)
17602 if (regno == VSCR_REGNO)
17604 if (regno == SPE_ACC_REGNO)
17606 if (regno == SPEFSCR_REGNO)
17608 /* SPE high reg number. We get these values of regno from
17609 rs6000_dwarf_register_span. */
17610 if (regno >= 1200 && regno < 1232)
17616 /* target hook eh_return_filter_mode */
17617 static enum machine_mode
17618 rs6000_eh_return_filter_mode (void)
17620 return TARGET_32BIT ? SImode : word_mode;
17623 #include "gt-rs6000.h"