1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
67 #define min(A,B) ((A) < (B) ? (A) : (B))
68 #define max(A,B) ((A) > (B) ? (A) : (B))
70 /* Structure used to define the rs6000 stack */
71 typedef struct rs6000_stack {
72 int first_gp_reg_save; /* first callee saved GP register used */
73 int first_fp_reg_save; /* first callee saved FP register used */
74 int first_altivec_reg_save; /* first callee saved AltiVec register used */
75 int lr_save_p; /* true if the link reg needs to be saved */
76 int cr_save_p; /* true if the CR reg needs to be saved */
77 unsigned int vrsave_mask; /* mask of vec registers to save */
78 int toc_save_p; /* true if the TOC needs to be saved */
79 int push_p; /* true if we need to allocate stack space */
80 int calls_p; /* true if the function makes any calls */
81 enum rs6000_abi abi; /* which ABI to use */
82 int gp_save_offset; /* offset to save GP regs from initial SP */
83 int fp_save_offset; /* offset to save FP regs from initial SP */
84 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
85 int lr_save_offset; /* offset to save LR from initial SP */
86 int cr_save_offset; /* offset to save CR from initial SP */
87 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
88 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
89 int toc_save_offset; /* offset to save the TOC pointer */
90 int varargs_save_offset; /* offset to save the varargs registers */
91 int ehrd_offset; /* offset to EH return data */
92 int reg_size; /* register size (4 or 8) */
93 int varargs_size; /* size to hold V.4 args passed in regs */
94 HOST_WIDE_INT vars_size; /* variable save area size */
95 int parm_size; /* outgoing parameter size */
96 int save_size; /* save area size */
97 int fixed_size; /* fixed size of stack frame */
98 int gp_size; /* size of saved GP registers */
99 int fp_size; /* size of saved FP registers */
100 int altivec_size; /* size of saved AltiVec registers */
101 int cr_size; /* size to hold CR if not in save_size */
102 int lr_size; /* size to hold LR if not in save_size */
103 int vrsave_size; /* size to hold VRSAVE if not in save_size */
104 int altivec_padding_size; /* size of altivec alignment padding if
106 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
107 int spe_padding_size;
108 int toc_size; /* size to hold TOC if not in save_size */
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
113 /* Target cpu type */
115 enum processor_type rs6000_cpu;
116 struct rs6000_cpu_select rs6000_select[3] =
118 /* switch name, tune arch */
119 { (const char *)0, "--with-cpu=", 1, 1 },
120 { (const char *)0, "-mcpu=", 1, 1 },
121 { (const char *)0, "-mtune=", 1, 0 },
124 /* Always emit branch hint bits. */
125 static GTY(()) bool rs6000_always_hint;
127 /* Schedule instructions for group formation. */
128 static GTY(()) bool rs6000_sched_groups;
130 /* Support adjust_priority scheduler hook
131 and -mprioritize-restricted-insns= option. */
132 const char *rs6000_sched_restricted_insns_priority_str;
133 int rs6000_sched_restricted_insns_priority;
135 /* Support for -msched-costly-dep option. */
136 const char *rs6000_sched_costly_dep_str;
137 enum rs6000_dependence_cost rs6000_sched_costly_dep;
139 /* Support for -minsert-sched-nops option. */
140 const char *rs6000_sched_insert_nops_str;
141 enum rs6000_nop_insertion rs6000_sched_insert_nops;
143 /* Size of long double */
144 const char *rs6000_long_double_size_string;
145 int rs6000_long_double_type_size;
147 /* Whether -mabi=altivec has appeared */
148 int rs6000_altivec_abi;
150 /* Whether VRSAVE instructions should be generated. */
151 int rs6000_altivec_vrsave;
153 /* String from -mvrsave= option. */
154 const char *rs6000_altivec_vrsave_string;
156 /* Nonzero if we want SPE ABI extensions. */
159 /* Whether isel instructions should be generated. */
162 /* Whether SPE simd instructions should be generated. */
165 /* Nonzero if floating point operations are done in the GPRs. */
166 int rs6000_float_gprs = 0;
168 /* String from -mfloat-gprs=. */
169 const char *rs6000_float_gprs_string;
171 /* String from -misel=. */
172 const char *rs6000_isel_string;
174 /* String from -mspe=. */
175 const char *rs6000_spe_string;
177 /* Set to nonzero once AIX common-mode calls have been defined. */
178 static GTY(()) int common_mode_defined;
180 /* Save information from a "cmpxx" operation until the branch or scc is
182 rtx rs6000_compare_op0, rs6000_compare_op1;
183 int rs6000_compare_fp_p;
185 /* Label number of label created for -mrelocatable, to call to so we can
186 get the address of the GOT section */
187 int rs6000_pic_labelno;
190 /* Which abi to adhere to */
191 const char *rs6000_abi_name;
193 /* Semantics of the small data area */
194 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
196 /* Which small data model to use */
197 const char *rs6000_sdata_name = (char *)0;
199 /* Counter for labels which are to be placed in .fixup. */
200 int fixuplabelno = 0;
203 /* Bit size of immediate TLS offsets and string from which it is decoded. */
204 int rs6000_tls_size = 32;
205 const char *rs6000_tls_size_string;
207 /* ABI enumeration available for subtarget to use. */
208 enum rs6000_abi rs6000_current_abi;
210 /* ABI string from -mabi= option. */
211 const char *rs6000_abi_string;
214 const char *rs6000_debug_name;
215 int rs6000_debug_stack; /* debug stack applications */
216 int rs6000_debug_arg; /* debug argument handling */
218 /* Value is TRUE if register/mode pair is accepatable. */
219 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
222 static GTY(()) tree opaque_V2SI_type_node;
223 static GTY(()) tree opaque_V2SF_type_node;
224 static GTY(()) tree opaque_p_V2SI_type_node;
225 static GTY(()) tree V16QI_type_node;
226 static GTY(()) tree V2SI_type_node;
227 static GTY(()) tree V2SF_type_node;
228 static GTY(()) tree V4HI_type_node;
229 static GTY(()) tree V4SI_type_node;
230 static GTY(()) tree V4SF_type_node;
231 static GTY(()) tree V8HI_type_node;
232 static GTY(()) tree unsigned_V16QI_type_node;
233 static GTY(()) tree unsigned_V8HI_type_node;
234 static GTY(()) tree unsigned_V4SI_type_node;
235 static GTY(()) tree bool_char_type_node; /* __bool char */
236 static GTY(()) tree bool_short_type_node; /* __bool short */
237 static GTY(()) tree bool_int_type_node; /* __bool int */
238 static GTY(()) tree pixel_type_node; /* __pixel */
239 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
240 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
241 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
242 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
244 int rs6000_warn_altivec_long = 1; /* On by default. */
245 const char *rs6000_warn_altivec_long_switch;
247 const char *rs6000_traceback_name;
249 traceback_default = 0,
255 /* Flag to say the TOC is initialized */
257 char toc_label_name[10];
259 /* Alias set for saves and restores from the rs6000 stack. */
260 static GTY(()) int rs6000_sr_alias_set;
262 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
263 The only place that looks at this is rs6000_set_default_type_attributes;
264 everywhere else should rely on the presence or absence of a longcall
265 attribute on the function declaration. */
266 int rs6000_default_long_calls;
267 const char *rs6000_longcall_switch;
269 /* Control alignment for fields within structures. */
270 /* String from -malign-XXXXX. */
271 const char *rs6000_alignment_string;
272 int rs6000_alignment_flags;
274 struct builtin_description
276 /* mask is not const because we're going to alter it below. This
277 nonsense will go away when we rewrite the -march infrastructure
278 to give us more target flag bits. */
280 const enum insn_code icode;
281 const char *const name;
282 const enum rs6000_builtins code;
285 static bool rs6000_function_ok_for_sibcall (tree, tree);
286 static int num_insns_constant_wide (HOST_WIDE_INT);
287 static void validate_condition_mode (enum rtx_code, enum machine_mode);
288 static rtx rs6000_generate_compare (enum rtx_code);
289 static void rs6000_maybe_dead (rtx);
290 static void rs6000_emit_stack_tie (void);
291 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
292 static rtx spe_synthesize_frame_save (rtx);
293 static bool spe_func_has_64bit_regs_p (void);
294 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
296 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
297 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
298 static unsigned rs6000_hash_constant (rtx);
299 static unsigned toc_hash_function (const void *);
300 static int toc_hash_eq (const void *, const void *);
301 static int constant_pool_expr_1 (rtx, int *, int *);
302 static bool constant_pool_expr_p (rtx);
303 static bool toc_relative_expr_p (rtx);
304 static bool legitimate_small_data_p (enum machine_mode, rtx);
305 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
306 static bool legitimate_indexed_address_p (rtx, int);
307 static bool legitimate_indirect_address_p (rtx, int);
308 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
309 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
310 static struct machine_function * rs6000_init_machine_status (void);
311 static bool rs6000_assemble_integer (rtx, unsigned int, int);
312 #ifdef HAVE_GAS_HIDDEN
313 static void rs6000_assemble_visibility (tree, int);
315 static int rs6000_ra_ever_killed (void);
316 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
317 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
318 static const char *rs6000_mangle_fundamental_type (tree);
319 extern const struct attribute_spec rs6000_attribute_table[];
320 static void rs6000_set_default_type_attributes (tree);
321 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
322 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
323 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
325 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
326 static bool rs6000_return_in_memory (tree, tree);
327 static void rs6000_file_start (void);
329 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
330 static void rs6000_elf_asm_out_constructor (rtx, int);
331 static void rs6000_elf_asm_out_destructor (rtx, int);
332 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
333 static void rs6000_elf_unique_section (tree, int);
334 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
335 unsigned HOST_WIDE_INT);
336 static void rs6000_elf_encode_section_info (tree, rtx, int)
338 static bool rs6000_elf_in_small_data_p (tree);
341 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
342 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
343 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
344 static void rs6000_xcoff_unique_section (tree, int);
345 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
346 unsigned HOST_WIDE_INT);
347 static const char * rs6000_xcoff_strip_name_encoding (const char *);
348 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
349 static void rs6000_xcoff_file_start (void);
350 static void rs6000_xcoff_file_end (void);
353 static bool rs6000_binds_local_p (tree);
355 static int rs6000_use_dfa_pipeline_interface (void);
356 static int rs6000_variable_issue (FILE *, int, rtx, int);
357 static bool rs6000_rtx_costs (rtx, int, int, int *);
358 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
359 static bool is_microcoded_insn (rtx);
360 static int is_dispatch_slot_restricted (rtx);
361 static bool is_cracked_insn (rtx);
362 static bool is_branch_slot_insn (rtx);
363 static int rs6000_adjust_priority (rtx, int);
364 static int rs6000_issue_rate (void);
365 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
366 static rtx get_next_active_insn (rtx, rtx);
367 static bool insn_terminates_group_p (rtx , enum group_termination);
368 static bool is_costly_group (rtx *, rtx);
369 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
370 static int redefine_groups (FILE *, int, rtx, rtx);
371 static int pad_groups (FILE *, int, rtx, rtx);
372 static void rs6000_sched_finish (FILE *, int);
373 static int rs6000_use_sched_lookahead (void);
375 static void rs6000_init_builtins (void);
376 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
377 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
378 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
379 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
380 static void altivec_init_builtins (void);
381 static void rs6000_common_init_builtins (void);
382 static void rs6000_init_libfuncs (void);
384 static void enable_mask_for_builtins (struct builtin_description *, int,
385 enum rs6000_builtins,
386 enum rs6000_builtins);
387 static tree build_opaque_vector_type (tree, int);
388 static void spe_init_builtins (void);
389 static rtx spe_expand_builtin (tree, rtx, bool *);
390 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
391 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
392 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
393 static rs6000_stack_t *rs6000_stack_info (void);
394 static void debug_stack_info (rs6000_stack_t *);
396 static rtx altivec_expand_builtin (tree, rtx, bool *);
397 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
398 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
399 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
400 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
401 static rtx altivec_expand_predicate_builtin (enum insn_code,
402 const char *, tree, rtx);
403 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
404 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
405 static void rs6000_parse_abi_options (void);
406 static void rs6000_parse_alignment_option (void);
407 static void rs6000_parse_tls_size_option (void);
408 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
409 static int first_altivec_reg_to_save (void);
410 static unsigned int compute_vrsave_mask (void);
411 static void is_altivec_return_reg (rtx, void *);
412 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
413 int easy_vector_constant (rtx, enum machine_mode);
414 static int easy_vector_same (rtx, enum machine_mode);
415 static int easy_vector_splat_const (int, enum machine_mode);
416 static bool is_ev64_opaque_type (tree);
417 static rtx rs6000_dwarf_register_span (rtx);
418 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
419 static rtx rs6000_tls_get_addr (void);
420 static rtx rs6000_got_sym (void);
421 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
422 static const char *rs6000_get_some_local_dynamic_name (void);
423 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
424 static rtx rs6000_complex_function_value (enum machine_mode);
425 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
426 enum machine_mode, tree);
427 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
428 enum machine_mode, tree, int);
429 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
430 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
431 enum machine_mode, tree,
434 static void macho_branch_islands (void);
435 static void add_compiler_branch_island (tree, tree, int);
436 static int no_previous_def (tree function_name);
437 static tree get_prev_label (tree function_name);
440 static tree rs6000_build_builtin_va_list (void);
442 /* Hash table stuff for keeping track of TOC entries. */
444 struct toc_hash_struct GTY(())
446 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
447 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
449 enum machine_mode key_mode;
453 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
455 /* Default register names. */
456 char rs6000_reg_names[][8] =
458 "0", "1", "2", "3", "4", "5", "6", "7",
459 "8", "9", "10", "11", "12", "13", "14", "15",
460 "16", "17", "18", "19", "20", "21", "22", "23",
461 "24", "25", "26", "27", "28", "29", "30", "31",
462 "0", "1", "2", "3", "4", "5", "6", "7",
463 "8", "9", "10", "11", "12", "13", "14", "15",
464 "16", "17", "18", "19", "20", "21", "22", "23",
465 "24", "25", "26", "27", "28", "29", "30", "31",
466 "mq", "lr", "ctr","ap",
467 "0", "1", "2", "3", "4", "5", "6", "7",
469 /* AltiVec registers. */
470 "0", "1", "2", "3", "4", "5", "6", "7",
471 "8", "9", "10", "11", "12", "13", "14", "15",
472 "16", "17", "18", "19", "20", "21", "22", "23",
473 "24", "25", "26", "27", "28", "29", "30", "31",
479 #ifdef TARGET_REGNAMES
480 static const char alt_reg_names[][8] =
482 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
483 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
484 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
485 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
486 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
487 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
488 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
489 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
490 "mq", "lr", "ctr", "ap",
491 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
493 /* AltiVec registers. */
494 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
495 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
496 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
497 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
504 #ifndef MASK_STRICT_ALIGN
505 #define MASK_STRICT_ALIGN 0
507 #ifndef TARGET_PROFILE_KERNEL
508 #define TARGET_PROFILE_KERNEL 0
511 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
512 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
514 /* Return 1 for a symbol ref for a thread-local storage symbol. */
515 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
516 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
518 /* Initialize the GCC target structure. */
519 #undef TARGET_ATTRIBUTE_TABLE
520 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
521 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
522 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
524 #undef TARGET_ASM_ALIGNED_DI_OP
525 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
527 /* Default unaligned ops are only provided for ELF. Find the ops needed
528 for non-ELF systems. */
529 #ifndef OBJECT_FORMAT_ELF
531 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
533 #undef TARGET_ASM_UNALIGNED_HI_OP
534 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
535 #undef TARGET_ASM_UNALIGNED_SI_OP
536 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
537 #undef TARGET_ASM_UNALIGNED_DI_OP
538 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
541 #undef TARGET_ASM_UNALIGNED_HI_OP
542 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
543 #undef TARGET_ASM_UNALIGNED_SI_OP
544 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
548 /* This hook deals with fixups for relocatable code and DI-mode objects
550 #undef TARGET_ASM_INTEGER
551 #define TARGET_ASM_INTEGER rs6000_assemble_integer
553 #ifdef HAVE_GAS_HIDDEN
554 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
555 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
558 #undef TARGET_HAVE_TLS
559 #define TARGET_HAVE_TLS HAVE_AS_TLS
561 #undef TARGET_CANNOT_FORCE_CONST_MEM
562 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
564 #undef TARGET_ASM_FUNCTION_PROLOGUE
565 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
566 #undef TARGET_ASM_FUNCTION_EPILOGUE
567 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
569 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
570 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
571 #undef TARGET_SCHED_VARIABLE_ISSUE
572 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
574 #undef TARGET_SCHED_ISSUE_RATE
575 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
576 #undef TARGET_SCHED_ADJUST_COST
577 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
578 #undef TARGET_SCHED_ADJUST_PRIORITY
579 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
580 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
581 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
582 #undef TARGET_SCHED_FINISH
583 #define TARGET_SCHED_FINISH rs6000_sched_finish
585 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
586 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
588 #undef TARGET_INIT_BUILTINS
589 #define TARGET_INIT_BUILTINS rs6000_init_builtins
591 #undef TARGET_EXPAND_BUILTIN
592 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
594 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
595 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
597 #undef TARGET_INIT_LIBFUNCS
598 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
601 #undef TARGET_BINDS_LOCAL_P
602 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
605 #undef TARGET_ASM_OUTPUT_MI_THUNK
606 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
608 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
609 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
611 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
612 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
614 #undef TARGET_RTX_COSTS
615 #define TARGET_RTX_COSTS rs6000_rtx_costs
616 #undef TARGET_ADDRESS_COST
617 #define TARGET_ADDRESS_COST hook_int_rtx_0
619 #undef TARGET_VECTOR_OPAQUE_P
620 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
622 #undef TARGET_DWARF_REGISTER_SPAN
623 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
625 /* On rs6000, function arguments are promoted, as are function return
627 #undef TARGET_PROMOTE_FUNCTION_ARGS
628 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
629 #undef TARGET_PROMOTE_FUNCTION_RETURN
630 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
632 #undef TARGET_RETURN_IN_MEMORY
633 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
635 #undef TARGET_SETUP_INCOMING_VARARGS
636 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
638 /* Always strict argument naming on rs6000. */
639 #undef TARGET_STRICT_ARGUMENT_NAMING
640 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
641 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
642 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
643 #undef TARGET_SPLIT_COMPLEX_ARG
644 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
646 #undef TARGET_BUILD_BUILTIN_VA_LIST
647 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
649 struct gcc_target targetm = TARGET_INITIALIZER;
652 /* Value is 1 if hard register REGNO can hold a value of machine-mode
655 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
657 /* The GPRs can hold any mode, but values bigger than one register
658 cannot go past R31. */
659 if (INT_REGNO_P (regno))
660 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
662 /* The float registers can only hold floating modes and DImode. */
663 if (FP_REGNO_P (regno))
665 (GET_MODE_CLASS (mode) == MODE_FLOAT
666 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
667 || (GET_MODE_CLASS (mode) == MODE_INT
668 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
670 /* The CR register can only hold CC modes. */
671 if (CR_REGNO_P (regno))
672 return GET_MODE_CLASS (mode) == MODE_CC;
674 if (XER_REGNO_P (regno))
675 return mode == PSImode;
677 /* AltiVec only in AldyVec registers. */
678 if (ALTIVEC_REGNO_P (regno))
679 return ALTIVEC_VECTOR_MODE (mode);
681 /* ...but GPRs can hold SIMD data on the SPE in one register. */
682 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
685 /* We cannot put TImode anywhere except general register and it must be
686 able to fit within the register set. */
688 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
691 /* Initialize rs6000_hard_regno_mode_ok_p table. */
693 rs6000_init_hard_regno_mode_ok (void)
697 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
698 for (m = 0; m < NUM_MACHINE_MODES; ++m)
699 if (rs6000_hard_regno_mode_ok (r, m))
700 rs6000_hard_regno_mode_ok_p[m][r] = true;
703 /* Override command line options. Mostly we process the processor
704 type and sometimes adjust other TARGET_ options. */
707 rs6000_override_options (const char *default_cpu)
710 struct rs6000_cpu_select *ptr;
713 /* Simplifications for entries below. */
716 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
717 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
720 /* This table occasionally claims that a processor does not support
721 a particular feature even though it does, but the feature is slower
722 than the alternative. Thus, it shouldn't be relied on as a
723 complete description of the processor's support.
725 Please keep this list in order, and don't forget to update the
726 documentation in invoke.texi when adding a new processor or
730 const char *const name; /* Canonical processor name. */
731 const enum processor_type processor; /* Processor type enum value. */
732 const int target_enable; /* Target flags to enable. */
733 } const processor_target_table[]
734 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
735 {"403", PROCESSOR_PPC403,
736 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
737 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
738 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
739 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
740 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
741 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
742 {"601", PROCESSOR_PPC601,
743 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
744 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
745 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
746 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
747 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
748 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
749 {"620", PROCESSOR_PPC620,
750 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
751 {"630", PROCESSOR_PPC630,
752 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
753 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
754 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
755 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
756 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
757 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
758 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
759 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
760 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
761 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
762 {"970", PROCESSOR_POWER4,
763 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
764 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
765 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
766 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
767 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
768 {"G5", PROCESSOR_POWER4,
769 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
770 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
771 {"power2", PROCESSOR_POWER,
772 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
773 {"power3", PROCESSOR_PPC630,
774 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
775 {"power4", PROCESSOR_POWER4,
776 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
777 {"power5", PROCESSOR_POWER5,
778 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
779 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
780 {"powerpc64", PROCESSOR_POWERPC64,
781 POWERPC_BASE_MASK | MASK_POWERPC64},
782 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
783 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
784 {"rios2", PROCESSOR_RIOS2,
785 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
786 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
787 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
788 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
791 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
793 /* Some OSs don't support saving the high part of 64-bit registers on
794 context switch. Other OSs don't support saving Altivec registers.
795 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
796 settings; if the user wants either, the user must explicitly specify
797 them and we won't interfere with the user's specification. */
800 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
801 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
802 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
806 rs6000_init_hard_regno_mode_ok ();
808 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
809 #ifdef OS_MISSING_POWERPC64
810 if (OS_MISSING_POWERPC64)
811 set_masks &= ~MASK_POWERPC64;
813 #ifdef OS_MISSING_ALTIVEC
814 if (OS_MISSING_ALTIVEC)
815 set_masks &= ~MASK_ALTIVEC;
818 /* Don't override these by the processor default if given explicitly. */
819 set_masks &= ~(target_flags_explicit
820 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
822 /* Identify the processor type. */
823 rs6000_select[0].string = default_cpu;
824 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
826 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
828 ptr = &rs6000_select[i];
829 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
831 for (j = 0; j < ptt_size; j++)
832 if (! strcmp (ptr->string, processor_target_table[j].name))
835 rs6000_cpu = processor_target_table[j].processor;
839 target_flags &= ~set_masks;
840 target_flags |= (processor_target_table[j].target_enable
847 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
854 /* If we are optimizing big endian systems for space, use the load/store
855 multiple and string instructions. */
856 if (BYTES_BIG_ENDIAN && optimize_size)
857 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
859 /* Don't allow -mmultiple or -mstring on little endian systems
860 unless the cpu is a 750, because the hardware doesn't support the
861 instructions used in little endian mode, and causes an alignment
862 trap. The 750 does not cause an alignment trap (except when the
863 target is unaligned). */
865 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
869 target_flags &= ~MASK_MULTIPLE;
870 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
871 warning ("-mmultiple is not supported on little endian systems");
876 target_flags &= ~MASK_STRING;
877 if ((target_flags_explicit & MASK_STRING) != 0)
878 warning ("-mstring is not supported on little endian systems");
882 /* Set debug flags */
883 if (rs6000_debug_name)
885 if (! strcmp (rs6000_debug_name, "all"))
886 rs6000_debug_stack = rs6000_debug_arg = 1;
887 else if (! strcmp (rs6000_debug_name, "stack"))
888 rs6000_debug_stack = 1;
889 else if (! strcmp (rs6000_debug_name, "arg"))
890 rs6000_debug_arg = 1;
892 error ("unknown -mdebug-%s switch", rs6000_debug_name);
895 if (rs6000_traceback_name)
897 if (! strncmp (rs6000_traceback_name, "full", 4))
898 rs6000_traceback = traceback_full;
899 else if (! strncmp (rs6000_traceback_name, "part", 4))
900 rs6000_traceback = traceback_part;
901 else if (! strncmp (rs6000_traceback_name, "no", 2))
902 rs6000_traceback = traceback_none;
904 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
905 rs6000_traceback_name);
908 /* Set size of long double */
909 rs6000_long_double_type_size = 64;
910 if (rs6000_long_double_size_string)
913 int size = strtol (rs6000_long_double_size_string, &tail, 10);
914 if (*tail != '\0' || (size != 64 && size != 128))
915 error ("Unknown switch -mlong-double-%s",
916 rs6000_long_double_size_string);
918 rs6000_long_double_type_size = size;
921 /* Set Altivec ABI as default for powerpc64 linux. */
922 if (TARGET_ELF && TARGET_64BIT)
924 rs6000_altivec_abi = 1;
925 rs6000_altivec_vrsave = 1;
928 /* Handle -mabi= options. */
929 rs6000_parse_abi_options ();
931 /* Handle -malign-XXXXX option. */
932 rs6000_parse_alignment_option ();
934 /* Handle generic -mFOO=YES/NO options. */
935 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
936 &rs6000_altivec_vrsave);
937 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
939 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
940 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
943 /* Handle -mtls-size option. */
944 rs6000_parse_tls_size_option ();
946 #ifdef SUBTARGET_OVERRIDE_OPTIONS
947 SUBTARGET_OVERRIDE_OPTIONS;
949 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
950 SUBSUBTARGET_OVERRIDE_OPTIONS;
956 error ("AltiVec and E500 instructions cannot coexist");
958 /* The e500 does not have string instructions, and we set
959 MASK_STRING above when optimizing for size. */
960 if ((target_flags & MASK_STRING) != 0)
961 target_flags = target_flags & ~MASK_STRING;
963 /* No SPE means 64-bit long doubles, even if an E500. */
964 if (rs6000_spe_string != 0
965 && !strcmp (rs6000_spe_string, "no"))
966 rs6000_long_double_type_size = 64;
968 else if (rs6000_select[1].string != NULL)
970 /* For the powerpc-eabispe configuration, we set all these by
971 default, so let's unset them if we manually set another
972 CPU that is not the E500. */
973 if (rs6000_abi_string == 0)
975 if (rs6000_spe_string == 0)
977 if (rs6000_float_gprs_string == 0)
978 rs6000_float_gprs = 0;
979 if (rs6000_isel_string == 0)
981 if (rs6000_long_double_size_string == 0)
982 rs6000_long_double_type_size = 64;
985 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
986 && rs6000_cpu != PROCESSOR_POWER5);
987 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
988 || rs6000_cpu == PROCESSOR_POWER5);
990 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
991 using TARGET_OPTIONS to handle a toggle switch, but we're out of
992 bits in target_flags so TARGET_SWITCHES cannot be used.
993 Assumption here is that rs6000_longcall_switch points into the
994 text of the complete option, rather than being a copy, so we can
995 scan back for the presence or absence of the no- modifier. */
996 if (rs6000_longcall_switch)
998 const char *base = rs6000_longcall_switch;
999 while (base[-1] != 'm') base--;
1001 if (*rs6000_longcall_switch != '\0')
1002 error ("invalid option `%s'", base);
1003 rs6000_default_long_calls = (base[0] != 'n');
1006 /* Handle -m(no-)warn-altivec-long similarly. */
1007 if (rs6000_warn_altivec_long_switch)
1009 const char *base = rs6000_warn_altivec_long_switch;
1010 while (base[-1] != 'm') base--;
1012 if (*rs6000_warn_altivec_long_switch != '\0')
1013 error ("invalid option `%s'", base);
1014 rs6000_warn_altivec_long = (base[0] != 'n');
1017 /* Handle -mprioritize-restricted-insns option. */
1018 rs6000_sched_restricted_insns_priority
1019 = (rs6000_sched_groups ? 1 : 0);
1020 if (rs6000_sched_restricted_insns_priority_str)
1021 rs6000_sched_restricted_insns_priority =
1022 atoi (rs6000_sched_restricted_insns_priority_str);
1024 /* Handle -msched-costly-dep option. */
1025 rs6000_sched_costly_dep
1026 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1027 if (rs6000_sched_costly_dep_str)
1029 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1030 rs6000_sched_costly_dep = no_dep_costly;
1031 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1032 rs6000_sched_costly_dep = all_deps_costly;
1033 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1034 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1035 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1036 rs6000_sched_costly_dep = store_to_load_dep_costly;
1038 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1041 /* Handle -minsert-sched-nops option. */
1042 rs6000_sched_insert_nops
1043 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1044 if (rs6000_sched_insert_nops_str)
1046 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1047 rs6000_sched_insert_nops = sched_finish_none;
1048 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1049 rs6000_sched_insert_nops = sched_finish_pad_groups;
1050 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1051 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1053 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1056 #ifdef TARGET_REGNAMES
1057 /* If the user desires alternate register names, copy in the
1058 alternate names now. */
1059 if (TARGET_REGNAMES)
1060 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1063 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1064 If -maix-struct-return or -msvr4-struct-return was explicitly
1065 used, don't override with the ABI default. */
1066 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1068 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1069 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1071 target_flags |= MASK_AIX_STRUCT_RET;
1074 if (TARGET_LONG_DOUBLE_128
1075 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1076 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1078 /* Allocate an alias set for register saves & restores from stack. */
1079 rs6000_sr_alias_set = new_alias_set ();
1082 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1084 /* We can only guarantee the availability of DI pseudo-ops when
1085 assembling for 64-bit targets. */
1088 targetm.asm_out.aligned_op.di = NULL;
1089 targetm.asm_out.unaligned_op.di = NULL;
1092 /* Set maximum branch target alignment at two instructions, eight bytes. */
1093 align_jumps_max_skip = 8;
1094 align_loops_max_skip = 8;
1096 /* Arrange to save and restore machine status around nested functions. */
1097 init_machine_status = rs6000_init_machine_status;
1099 /* We should always be splitting complex arguments, but we can't break
1100 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1101 if (DEFAULT_ABI != ABI_AIX)
1102 targetm.calls.split_complex_arg = NULL;
1105 /* Handle generic options of the form -mfoo=yes/no.
1106 NAME is the option name.
1107 VALUE is the option value.
1108 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1109 whether the option value is 'yes' or 'no' respectively. */
1111 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1115 else if (!strcmp (value, "yes"))
1117 else if (!strcmp (value, "no"))
1120 error ("unknown -m%s= option specified: '%s'", name, value);
1123 /* Handle -mabi= options. */
1125 rs6000_parse_abi_options (void)
1127 if (rs6000_abi_string == 0)
1129 else if (! strcmp (rs6000_abi_string, "altivec"))
1131 rs6000_altivec_abi = 1;
1134 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1135 rs6000_altivec_abi = 0;
1136 else if (! strcmp (rs6000_abi_string, "spe"))
1139 rs6000_altivec_abi = 0;
1140 if (!TARGET_SPE_ABI)
1141 error ("not configured for ABI: '%s'", rs6000_abi_string);
1144 else if (! strcmp (rs6000_abi_string, "no-spe"))
1147 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1150 /* Handle -malign-XXXXXX options. */
1152 rs6000_parse_alignment_option (void)
1154 if (rs6000_alignment_string == 0)
1156 else if (! strcmp (rs6000_alignment_string, "power"))
1157 rs6000_alignment_flags = MASK_ALIGN_POWER;
1158 else if (! strcmp (rs6000_alignment_string, "natural"))
1159 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1161 error ("unknown -malign-XXXXX option specified: '%s'",
1162 rs6000_alignment_string);
1165 /* Validate and record the size specified with the -mtls-size option. */
1168 rs6000_parse_tls_size_option (void)
1170 if (rs6000_tls_size_string == 0)
1172 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1173 rs6000_tls_size = 16;
1174 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1175 rs6000_tls_size = 32;
1176 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1177 rs6000_tls_size = 64;
1179 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1183 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1187 /* Do anything needed at the start of the asm file. */
1190 rs6000_file_start (void)
1194 const char *start = buffer;
1195 struct rs6000_cpu_select *ptr;
1196 const char *default_cpu = TARGET_CPU_DEFAULT;
1197 FILE *file = asm_out_file;
1199 default_file_start ();
1201 #ifdef TARGET_BI_ARCH
1202 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1206 if (flag_verbose_asm)
1208 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1209 rs6000_select[0].string = default_cpu;
1211 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1213 ptr = &rs6000_select[i];
1214 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1216 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1221 #ifdef USING_ELFOS_H
1222 switch (rs6000_sdata)
1224 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1225 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1226 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1227 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1230 if (rs6000_sdata && g_switch_value)
1232 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1243 /* Return nonzero if this function is known to have a null epilogue. */
1246 direct_return (void)
1248 if (reload_completed)
1250 rs6000_stack_t *info = rs6000_stack_info ();
1252 if (info->first_gp_reg_save == 32
1253 && info->first_fp_reg_save == 64
1254 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1255 && ! info->lr_save_p
1256 && ! info->cr_save_p
1257 && info->vrsave_mask == 0
1265 /* Returns 1 always. */
1268 any_operand (rtx op ATTRIBUTE_UNUSED,
1269 enum machine_mode mode ATTRIBUTE_UNUSED)
1274 /* Returns 1 if op is the count register. */
1276 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1278 if (GET_CODE (op) != REG)
1281 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1284 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1290 /* Returns 1 if op is an altivec register. */
1292 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1295 return (register_operand (op, mode)
1296 && (GET_CODE (op) != REG
1297 || REGNO (op) > FIRST_PSEUDO_REGISTER
1298 || ALTIVEC_REGNO_P (REGNO (op))));
1302 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1304 if (GET_CODE (op) != REG)
1307 if (XER_REGNO_P (REGNO (op)))
1313 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1314 by such constants completes more quickly. */
1317 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1319 return ( GET_CODE (op) == CONST_INT
1320 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1323 /* Return 1 if OP is a constant that can fit in a D field. */
1326 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1328 return (GET_CODE (op) == CONST_INT
1329 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1332 /* Similar for an unsigned D field. */
1335 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1337 return (GET_CODE (op) == CONST_INT
1338 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1341 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1344 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1346 return (GET_CODE (op) == CONST_INT
1347 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1350 /* Returns 1 if OP is a CONST_INT that is a positive value
1351 and an exact power of 2. */
1354 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1356 return (GET_CODE (op) == CONST_INT
1358 && exact_log2 (INTVAL (op)) >= 0);
1361 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1365 gpc_reg_operand (rtx op, enum machine_mode mode)
1367 return (register_operand (op, mode)
1368 && (GET_CODE (op) != REG
1369 || (REGNO (op) >= ARG_POINTER_REGNUM
1370 && !XER_REGNO_P (REGNO (op)))
1371 || REGNO (op) < MQ_REGNO));
1374 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1378 cc_reg_operand (rtx op, enum machine_mode mode)
1380 return (register_operand (op, mode)
1381 && (GET_CODE (op) != REG
1382 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1383 || CR_REGNO_P (REGNO (op))));
1386 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1387 CR field that isn't CR0. */
1390 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1392 return (register_operand (op, mode)
1393 && (GET_CODE (op) != REG
1394 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1395 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1398 /* Returns 1 if OP is either a constant integer valid for a D-field or
1399 a non-special register. If a register, it must be in the proper
1400 mode unless MODE is VOIDmode. */
1403 reg_or_short_operand (rtx op, enum machine_mode mode)
1405 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1408 /* Similar, except check if the negation of the constant would be
1409 valid for a D-field. */
1412 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1414 if (GET_CODE (op) == CONST_INT)
1415 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1417 return gpc_reg_operand (op, mode);
1420 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1421 a non-special register. If a register, it must be in the proper
1422 mode unless MODE is VOIDmode. */
1425 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1427 if (gpc_reg_operand (op, mode))
1429 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1436 /* Return 1 if the operand is either a register or an integer whose
1437 high-order 16 bits are zero. */
1440 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1442 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1445 /* Return 1 is the operand is either a non-special register or ANY
1446 constant integer. */
1449 reg_or_cint_operand (rtx op, enum machine_mode mode)
1451 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1454 /* Return 1 is the operand is either a non-special register or ANY
1455 32-bit signed constant integer. */
1458 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1460 return (gpc_reg_operand (op, mode)
1461 || (GET_CODE (op) == CONST_INT
1462 #if HOST_BITS_PER_WIDE_INT != 32
1463 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1464 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1469 /* Return 1 is the operand is either a non-special register or a 32-bit
1470 signed constant integer valid for 64-bit addition. */
1473 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1475 return (gpc_reg_operand (op, mode)
1476 || (GET_CODE (op) == CONST_INT
1477 #if HOST_BITS_PER_WIDE_INT == 32
1478 && INTVAL (op) < 0x7fff8000
1480 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1486 /* Return 1 is the operand is either a non-special register or a 32-bit
1487 signed constant integer valid for 64-bit subtraction. */
1490 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1492 return (gpc_reg_operand (op, mode)
1493 || (GET_CODE (op) == CONST_INT
1494 #if HOST_BITS_PER_WIDE_INT == 32
1495 && (- INTVAL (op)) < 0x7fff8000
1497 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1503 /* Return 1 is the operand is either a non-special register or ANY
1504 32-bit unsigned constant integer. */
1507 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1509 if (GET_CODE (op) == CONST_INT)
1511 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1513 if (GET_MODE_BITSIZE (mode) <= 32)
1516 if (INTVAL (op) < 0)
1520 return ((INTVAL (op) & GET_MODE_MASK (mode)
1521 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1523 else if (GET_CODE (op) == CONST_DOUBLE)
1525 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1529 return CONST_DOUBLE_HIGH (op) == 0;
1532 return gpc_reg_operand (op, mode);
1535 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1538 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1540 return (GET_CODE (op) == SYMBOL_REF
1541 || GET_CODE (op) == CONST
1542 || GET_CODE (op) == LABEL_REF);
1545 /* Return 1 if the operand is a simple references that can be loaded via
1546 the GOT (labels involving addition aren't allowed). */
1549 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1551 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1554 /* Return the number of instructions it takes to form a constant in an
1555 integer register. */
1558 num_insns_constant_wide (HOST_WIDE_INT value)
1560 /* signed constant loadable with {cal|addi} */
1561 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1564 /* constant loadable with {cau|addis} */
1565 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1568 #if HOST_BITS_PER_WIDE_INT == 64
1569 else if (TARGET_POWERPC64)
1571 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1572 HOST_WIDE_INT high = value >> 31;
1574 if (high == 0 || high == -1)
1580 return num_insns_constant_wide (high) + 1;
1582 return (num_insns_constant_wide (high)
1583 + num_insns_constant_wide (low) + 1);
1592 num_insns_constant (rtx op, enum machine_mode mode)
1594 if (GET_CODE (op) == CONST_INT)
1596 #if HOST_BITS_PER_WIDE_INT == 64
1597 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1598 && mask64_operand (op, mode))
1602 return num_insns_constant_wide (INTVAL (op));
1605 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1610 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1611 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1612 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1615 else if (GET_CODE (op) == CONST_DOUBLE)
1621 int endian = (WORDS_BIG_ENDIAN == 0);
1623 if (mode == VOIDmode || mode == DImode)
1625 high = CONST_DOUBLE_HIGH (op);
1626 low = CONST_DOUBLE_LOW (op);
1630 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1631 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1633 low = l[1 - endian];
1637 return (num_insns_constant_wide (low)
1638 + num_insns_constant_wide (high));
1642 if (high == 0 && low >= 0)
1643 return num_insns_constant_wide (low);
1645 else if (high == -1 && low < 0)
1646 return num_insns_constant_wide (low);
1648 else if (mask64_operand (op, mode))
1652 return num_insns_constant_wide (high) + 1;
1655 return (num_insns_constant_wide (high)
1656 + num_insns_constant_wide (low) + 1);
1664 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1665 register with one instruction per word. We only do this if we can
1666 safely read CONST_DOUBLE_{LOW,HIGH}. */
1669 easy_fp_constant (rtx op, enum machine_mode mode)
1671 if (GET_CODE (op) != CONST_DOUBLE
1672 || GET_MODE (op) != mode
1673 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1676 /* Consider all constants with -msoft-float to be easy. */
1677 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1681 /* If we are using V.4 style PIC, consider all constants to be hard. */
1682 if (flag_pic && DEFAULT_ABI == ABI_V4)
1685 #ifdef TARGET_RELOCATABLE
1686 /* Similarly if we are using -mrelocatable, consider all constants
1688 if (TARGET_RELOCATABLE)
1697 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1698 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1700 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1701 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1702 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1703 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1706 else if (mode == DFmode)
1711 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1712 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1714 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1715 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1718 else if (mode == SFmode)
1723 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1724 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1726 return num_insns_constant_wide (l) == 1;
1729 else if (mode == DImode)
1730 return ((TARGET_POWERPC64
1731 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1732 || (num_insns_constant (op, DImode) <= 2));
1734 else if (mode == SImode)
1740 /* Returns the constant for the splat instruction, if exists. */
1743 easy_vector_splat_const (int cst, enum machine_mode mode)
1748 if (EASY_VECTOR_15 (cst)
1749 || EASY_VECTOR_15_ADD_SELF (cst))
1751 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1755 if (EASY_VECTOR_15 (cst)
1756 || EASY_VECTOR_15_ADD_SELF (cst))
1758 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1762 if (EASY_VECTOR_15 (cst)
1763 || EASY_VECTOR_15_ADD_SELF (cst))
1772 /* Return nonzero if all elements of a vector have the same value. */
1775 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1779 units = CONST_VECTOR_NUNITS (op);
1781 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1782 for (i = 1; i < units; ++i)
1783 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1785 if (i == units && easy_vector_splat_const (cst, mode))
1790 /* Return 1 if the operand is a CONST_INT and can be put into a
1791 register without using memory. */
1794 easy_vector_constant (rtx op, enum machine_mode mode)
1798 if (GET_CODE (op) != CONST_VECTOR
1803 if (zero_constant (op, mode)
1804 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1805 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1808 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1811 if (TARGET_SPE && mode == V1DImode)
1814 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1815 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1817 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1819 evmergelo r0, r0, r0
1822 I don't know how efficient it would be to allow bigger constants,
1823 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1824 instructions is better than a 64-bit memory load, but I don't
1825 have the e500 timing specs. */
1826 if (TARGET_SPE && mode == V2SImode
1827 && cst >= -0x7fff && cst <= 0x7fff
1828 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1832 && easy_vector_same (op, mode))
1834 cst = easy_vector_splat_const (cst, mode);
1835 if (EASY_VECTOR_15_ADD_SELF (cst)
1836 || EASY_VECTOR_15 (cst))
1842 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1845 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1849 && GET_CODE (op) == CONST_VECTOR
1850 && easy_vector_same (op, mode))
1852 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1853 if (EASY_VECTOR_15_ADD_SELF (cst))
1859 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
1862 gen_easy_vector_constant_add_self (rtx op)
1866 units = GET_MODE_NUNITS (GET_MODE (op));
1867 v = rtvec_alloc (units);
1869 for (i = 0; i < units; i++)
1871 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1872 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1876 output_vec_const_move (rtx *operands)
1879 enum machine_mode mode;
1885 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1886 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1887 mode = GET_MODE (dest);
1891 if (zero_constant (vec, mode))
1892 return "vxor %0,%0,%0";
1893 else if (easy_vector_constant (vec, mode))
1895 operands[1] = GEN_INT (cst);
1899 if (EASY_VECTOR_15 (cst))
1901 operands[1] = GEN_INT (cst);
1902 return "vspltisw %0,%1";
1904 else if (EASY_VECTOR_15_ADD_SELF (cst))
1908 if (EASY_VECTOR_15 (cst))
1910 operands[1] = GEN_INT (cst);
1911 return "vspltish %0,%1";
1913 else if (EASY_VECTOR_15_ADD_SELF (cst))
1917 if (EASY_VECTOR_15 (cst))
1919 operands[1] = GEN_INT (cst);
1920 return "vspltisb %0,%1";
1922 else if (EASY_VECTOR_15_ADD_SELF (cst))
1934 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1935 pattern of V1DI, V4HI, and V2SF.
1937 FIXME: We should probably return # and add post reload
1938 splitters for these, but this way is so easy ;-).
1940 operands[1] = GEN_INT (cst);
1941 operands[2] = GEN_INT (cst2);
1943 return "li %0,%1\n\tevmergelo %0,%0,%0";
1945 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1951 /* Return 1 if the operand is the constant 0. This works for scalars
1952 as well as vectors. */
1954 zero_constant (rtx op, enum machine_mode mode)
1956 return op == CONST0_RTX (mode);
1959 /* Return 1 if the operand is 0.0. */
1961 zero_fp_constant (rtx op, enum machine_mode mode)
1963 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1966 /* Return 1 if the operand is in volatile memory. Note that during
1967 the RTL generation phase, memory_operand does not return TRUE for
1968 volatile memory references. So this function allows us to
1969 recognize volatile references where its safe. */
1972 volatile_mem_operand (rtx op, enum machine_mode mode)
1974 if (GET_CODE (op) != MEM)
1977 if (!MEM_VOLATILE_P (op))
1980 if (mode != GET_MODE (op))
1983 if (reload_completed)
1984 return memory_operand (op, mode);
1986 if (reload_in_progress)
1987 return strict_memory_address_p (mode, XEXP (op, 0));
1989 return memory_address_p (mode, XEXP (op, 0));
1992 /* Return 1 if the operand is an offsettable memory operand. */
1995 offsettable_mem_operand (rtx op, enum machine_mode mode)
1997 return ((GET_CODE (op) == MEM)
1998 && offsettable_address_p (reload_completed || reload_in_progress,
1999 mode, XEXP (op, 0)));
2002 /* Return 1 if the operand is either an easy FP constant (see above) or
2006 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2008 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2011 /* Return 1 if the operand is either a non-special register or an item
2012 that can be used as the operand of a `mode' add insn. */
2015 add_operand (rtx op, enum machine_mode mode)
2017 if (GET_CODE (op) == CONST_INT)
2018 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2019 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2021 return gpc_reg_operand (op, mode);
2024 /* Return 1 if OP is a constant but not a valid add_operand. */
2027 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2029 return (GET_CODE (op) == CONST_INT
2030 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2031 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2034 /* Return 1 if the operand is a non-special register or a constant that
2035 can be used as the operand of an OR or XOR insn on the RS/6000. */
2038 logical_operand (rtx op, enum machine_mode mode)
2040 HOST_WIDE_INT opl, oph;
2042 if (gpc_reg_operand (op, mode))
2045 if (GET_CODE (op) == CONST_INT)
2047 opl = INTVAL (op) & GET_MODE_MASK (mode);
2049 #if HOST_BITS_PER_WIDE_INT <= 32
2050 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2054 else if (GET_CODE (op) == CONST_DOUBLE)
2056 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2059 opl = CONST_DOUBLE_LOW (op);
2060 oph = CONST_DOUBLE_HIGH (op);
2067 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2068 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2071 /* Return 1 if C is a constant that is not a logical operand (as
2072 above), but could be split into one. */
2075 non_logical_cint_operand (rtx op, enum machine_mode mode)
2077 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2078 && ! logical_operand (op, mode)
2079 && reg_or_logical_cint_operand (op, mode));
2082 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2083 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2084 Reject all ones and all zeros, since these should have been optimized
2085 away and confuse the making of MB and ME. */
2088 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2090 HOST_WIDE_INT c, lsb;
2092 if (GET_CODE (op) != CONST_INT)
2097 /* Fail in 64-bit mode if the mask wraps around because the upper
2098 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2099 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2102 /* We don't change the number of transitions by inverting,
2103 so make sure we start with the LS bit zero. */
2107 /* Reject all zeros or all ones. */
2111 /* Find the first transition. */
2114 /* Invert to look for a second transition. */
2117 /* Erase first transition. */
2120 /* Find the second transition (if any). */
2123 /* Match if all the bits above are 1's (or c is zero). */
2127 /* Return 1 for the PowerPC64 rlwinm corner case. */
2130 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2132 HOST_WIDE_INT c, lsb;
2134 if (GET_CODE (op) != CONST_INT)
2139 if ((c & 0x80000001) != 0x80000001)
2153 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2154 It is if there are no more than one 1->0 or 0->1 transitions.
2155 Reject all zeros, since zero should have been optimized away and
2156 confuses the making of MB and ME. */
2159 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2161 if (GET_CODE (op) == CONST_INT)
2163 HOST_WIDE_INT c, lsb;
2167 /* Reject all zeros. */
2171 /* We don't change the number of transitions by inverting,
2172 so make sure we start with the LS bit zero. */
2176 /* Find the transition, and check that all bits above are 1's. */
2179 /* Match if all the bits above are 1's (or c is zero). */
2185 /* Like mask64_operand, but allow up to three transitions. This
2186 predicate is used by insn patterns that generate two rldicl or
2187 rldicr machine insns. */
2190 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2192 if (GET_CODE (op) == CONST_INT)
2194 HOST_WIDE_INT c, lsb;
2198 /* Disallow all zeros. */
2202 /* We don't change the number of transitions by inverting,
2203 so make sure we start with the LS bit zero. */
2207 /* Find the first transition. */
2210 /* Invert to look for a second transition. */
2213 /* Erase first transition. */
2216 /* Find the second transition. */
2219 /* Invert to look for a third transition. */
2222 /* Erase second transition. */
2225 /* Find the third transition (if any). */
2228 /* Match if all the bits above are 1's (or c is zero). */
2234 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2235 implement ANDing by the mask IN. */
2237 build_mask64_2_operands (rtx in, rtx *out)
2239 #if HOST_BITS_PER_WIDE_INT >= 64
2240 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2243 if (GET_CODE (in) != CONST_INT)
2249 /* Assume c initially something like 0x00fff000000fffff. The idea
2250 is to rotate the word so that the middle ^^^^^^ group of zeros
2251 is at the MS end and can be cleared with an rldicl mask. We then
2252 rotate back and clear off the MS ^^ group of zeros with a
2254 c = ~c; /* c == 0xff000ffffff00000 */
2255 lsb = c & -c; /* lsb == 0x0000000000100000 */
2256 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2257 c = ~c; /* c == 0x00fff000000fffff */
2258 c &= -lsb; /* c == 0x00fff00000000000 */
2259 lsb = c & -c; /* lsb == 0x0000100000000000 */
2260 c = ~c; /* c == 0xff000fffffffffff */
2261 c &= -lsb; /* c == 0xff00000000000000 */
2263 while ((lsb >>= 1) != 0)
2264 shift++; /* shift == 44 on exit from loop */
2265 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2266 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2267 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2271 /* Assume c initially something like 0xff000f0000000000. The idea
2272 is to rotate the word so that the ^^^ middle group of zeros
2273 is at the LS end and can be cleared with an rldicr mask. We then
2274 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2276 lsb = c & -c; /* lsb == 0x0000010000000000 */
2277 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2278 c = ~c; /* c == 0x00fff0ffffffffff */
2279 c &= -lsb; /* c == 0x00fff00000000000 */
2280 lsb = c & -c; /* lsb == 0x0000100000000000 */
2281 c = ~c; /* c == 0xff000fffffffffff */
2282 c &= -lsb; /* c == 0xff00000000000000 */
2284 while ((lsb >>= 1) != 0)
2285 shift++; /* shift == 44 on exit from loop */
2286 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2287 m1 >>= shift; /* m1 == 0x0000000000000fff */
2288 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2291 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2292 masks will be all 1's. We are guaranteed more than one transition. */
2293 out[0] = GEN_INT (64 - shift);
2294 out[1] = GEN_INT (m1);
2295 out[2] = GEN_INT (shift);
2296 out[3] = GEN_INT (m2);
2304 /* Return 1 if the operand is either a non-special register or a constant
2305 that can be used as the operand of a PowerPC64 logical AND insn. */
2308 and64_operand (rtx op, enum machine_mode mode)
2310 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2311 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2313 return (logical_operand (op, mode) || mask64_operand (op, mode));
2316 /* Like the above, but also match constants that can be implemented
2317 with two rldicl or rldicr insns. */
2320 and64_2_operand (rtx op, enum machine_mode mode)
2322 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2323 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2325 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2328 /* Return 1 if the operand is either a non-special register or a
2329 constant that can be used as the operand of an RS/6000 logical AND insn. */
2332 and_operand (rtx op, enum machine_mode mode)
2334 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2335 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2337 return (logical_operand (op, mode) || mask_operand (op, mode));
2340 /* Return 1 if the operand is a general register or memory operand. */
2343 reg_or_mem_operand (rtx op, enum machine_mode mode)
2345 return (gpc_reg_operand (op, mode)
2346 || memory_operand (op, mode)
2347 || macho_lo_sum_memory_operand (op, mode)
2348 || volatile_mem_operand (op, mode));
2351 /* Return 1 if the operand is a general register or memory operand without
2352 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2356 lwa_operand (rtx op, enum machine_mode mode)
2360 if (reload_completed && GET_CODE (inner) == SUBREG)
2361 inner = SUBREG_REG (inner);
2363 return gpc_reg_operand (inner, mode)
2364 || (memory_operand (inner, mode)
2365 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2366 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2367 && (GET_CODE (XEXP (inner, 0)) != PLUS
2368 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2369 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2372 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2375 symbol_ref_operand (rtx op, enum machine_mode mode)
2377 if (mode != VOIDmode && GET_MODE (op) != mode)
2380 return (GET_CODE (op) == SYMBOL_REF
2381 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2384 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2385 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2388 call_operand (rtx op, enum machine_mode mode)
2390 if (mode != VOIDmode && GET_MODE (op) != mode)
2393 return (GET_CODE (op) == SYMBOL_REF
2394 || (GET_CODE (op) == REG
2395 && (REGNO (op) == LINK_REGISTER_REGNUM
2396 || REGNO (op) == COUNT_REGISTER_REGNUM
2397 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2400 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2404 current_file_function_operand (rtx op,
2405 enum machine_mode mode ATTRIBUTE_UNUSED)
2407 return (GET_CODE (op) == SYMBOL_REF
2408 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2409 && (SYMBOL_REF_LOCAL_P (op)
2410 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2413 /* Return 1 if this operand is a valid input for a move insn. */
2416 input_operand (rtx op, enum machine_mode mode)
2418 /* Memory is always valid. */
2419 if (memory_operand (op, mode))
2422 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2423 if (GET_CODE (op) == CONSTANT_P_RTX)
2426 /* For floating-point, easy constants are valid. */
2427 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2429 && easy_fp_constant (op, mode))
2432 /* Allow any integer constant. */
2433 if (GET_MODE_CLASS (mode) == MODE_INT
2434 && (GET_CODE (op) == CONST_INT
2435 || GET_CODE (op) == CONST_DOUBLE))
2438 /* Allow easy vector constants. */
2439 if (GET_CODE (op) == CONST_VECTOR
2440 && easy_vector_constant (op, mode))
2443 /* For floating-point or multi-word mode, the only remaining valid type
2445 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2446 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2447 return register_operand (op, mode);
2449 /* The only cases left are integral modes one word or smaller (we
2450 do not get called for MODE_CC values). These can be in any
2452 if (register_operand (op, mode))
2455 /* A SYMBOL_REF referring to the TOC is valid. */
2456 if (legitimate_constant_pool_address_p (op))
2459 /* A constant pool expression (relative to the TOC) is valid */
2460 if (toc_relative_expr_p (op))
2463 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2465 if (DEFAULT_ABI == ABI_V4
2466 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2467 && small_data_operand (op, Pmode))
2474 /* Darwin, AIX increases natural record alignment to doubleword if the first
2475 field is an FP double while the FP fields remain word aligned. */
2478 rs6000_special_round_type_align (tree type, int computed, int specified)
2480 tree field = TYPE_FIELDS (type);
2482 /* Skip all the static variables only if ABI is greater than
2484 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2485 field = TREE_CHAIN (field);
2487 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2488 return MAX (computed, specified);
2490 return MAX (MAX (computed, specified), 64);
2493 /* Return 1 for an operand in small memory on V.4/eabi. */
2496 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2497 enum machine_mode mode ATTRIBUTE_UNUSED)
2502 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2505 if (DEFAULT_ABI != ABI_V4)
2508 if (GET_CODE (op) == SYMBOL_REF)
2511 else if (GET_CODE (op) != CONST
2512 || GET_CODE (XEXP (op, 0)) != PLUS
2513 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2514 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2519 rtx sum = XEXP (op, 0);
2520 HOST_WIDE_INT summand;
2522 /* We have to be careful here, because it is the referenced address
2523 that must be 32k from _SDA_BASE_, not just the symbol. */
2524 summand = INTVAL (XEXP (sum, 1));
2525 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2528 sym_ref = XEXP (sum, 0);
2531 return SYMBOL_REF_SMALL_P (sym_ref);
2537 /* Return true, if operand is a memory operand and has a
2538 displacement divisible by 4. */
2541 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2546 if (!memory_operand (op, mode))
2549 addr = XEXP (op, 0);
2550 if (GET_CODE (addr) == PLUS
2551 && GET_CODE (XEXP (addr, 0)) == REG
2552 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2553 off = INTVAL (XEXP (addr, 1));
2555 return (off % 4) == 0;
2558 /* Return true if either operand is a general purpose register. */
2561 gpr_or_gpr_p (rtx op0, rtx op1)
2563 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2564 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2568 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2571 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2573 switch (GET_CODE(op))
2576 if (RS6000_SYMBOL_REF_TLS_P (op))
2578 else if (CONSTANT_POOL_ADDRESS_P (op))
2580 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2588 else if (! strcmp (XSTR (op, 0), toc_label_name))
2597 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2598 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2600 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2609 constant_pool_expr_p (rtx op)
2613 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2617 toc_relative_expr_p (rtx op)
2621 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2624 /* SPE offset addressing is limited to 5-bits worth of double words. */
2625 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2628 legitimate_constant_pool_address_p (rtx x)
2631 && GET_CODE (x) == PLUS
2632 && GET_CODE (XEXP (x, 0)) == REG
2633 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2634 && constant_pool_expr_p (XEXP (x, 1)));
2638 legitimate_small_data_p (enum machine_mode mode, rtx x)
2640 return (DEFAULT_ABI == ABI_V4
2641 && !flag_pic && !TARGET_TOC
2642 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2643 && small_data_operand (x, mode));
2647 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2649 unsigned HOST_WIDE_INT offset, extra;
2651 if (GET_CODE (x) != PLUS)
2653 if (GET_CODE (XEXP (x, 0)) != REG)
2655 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2657 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2660 offset = INTVAL (XEXP (x, 1));
2668 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2669 which leaves the only valid constant offset of zero, which by
2670 canonicalization rules is also invalid. */
2677 /* SPE vector modes. */
2678 return SPE_CONST_OFFSET_OK (offset);
2682 if (mode == DFmode || !TARGET_POWERPC64)
2684 else if (offset & 3)
2690 if (mode == TFmode || !TARGET_POWERPC64)
2692 else if (offset & 3)
2703 return (offset < 0x10000) && (offset + extra < 0x10000);
2707 legitimate_indexed_address_p (rtx x, int strict)
2711 if (GET_CODE (x) != PLUS)
2716 if (!REG_P (op0) || !REG_P (op1))
2719 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2720 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2721 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2722 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2726 legitimate_indirect_address_p (rtx x, int strict)
2728 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2732 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2734 if (!TARGET_MACHO || !flag_pic
2735 || mode != SImode || GET_CODE(x) != MEM)
2739 if (GET_CODE (x) != LO_SUM)
2741 if (GET_CODE (XEXP (x, 0)) != REG)
2743 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2747 return CONSTANT_P (x);
2751 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2753 if (GET_CODE (x) != LO_SUM)
2755 if (GET_CODE (XEXP (x, 0)) != REG)
2757 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2761 if (TARGET_ELF || TARGET_MACHO)
2763 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2767 if (GET_MODE_NUNITS (mode) != 1)
2769 if (GET_MODE_BITSIZE (mode) > 32
2770 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2773 return CONSTANT_P (x);
2780 /* Try machine-dependent ways of modifying an illegitimate address
2781 to be legitimate. If we find one, return the new, valid address.
2782 This is used from only one place: `memory_address' in explow.c.
2784 OLDX is the address as it was before break_out_memory_refs was
2785 called. In some cases it is useful to look at this to decide what
2788 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2790 It is always safe for this function to do nothing. It exists to
2791 recognize opportunities to optimize the output.
2793 On RS/6000, first check for the sum of a register with a constant
2794 integer that is out of range. If so, generate code to add the
2795 constant with the low-order 16 bits masked to the register and force
2796 this result into another register (this can be done with `cau').
2797 Then generate an address of REG+(CONST&0xffff), allowing for the
2798 possibility of bit 16 being a one.
2800 Then check for the sum of a register and something not constant, try to
2801 load the other things into a register and return the sum. */
2804 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2805 enum machine_mode mode)
2807 if (GET_CODE (x) == SYMBOL_REF)
2809 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2811 return rs6000_legitimize_tls_address (x, model);
2814 if (GET_CODE (x) == PLUS
2815 && GET_CODE (XEXP (x, 0)) == REG
2816 && GET_CODE (XEXP (x, 1)) == CONST_INT
2817 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2819 HOST_WIDE_INT high_int, low_int;
2821 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2822 high_int = INTVAL (XEXP (x, 1)) - low_int;
2823 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2824 GEN_INT (high_int)), 0);
2825 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2827 else if (GET_CODE (x) == PLUS
2828 && GET_CODE (XEXP (x, 0)) == REG
2829 && GET_CODE (XEXP (x, 1)) != CONST_INT
2830 && GET_MODE_NUNITS (mode) == 1
2831 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2833 || (mode != DFmode && mode != TFmode))
2834 && (TARGET_POWERPC64 || mode != DImode)
2837 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2838 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2840 else if (ALTIVEC_VECTOR_MODE (mode))
2844 /* Make sure both operands are registers. */
2845 if (GET_CODE (x) == PLUS)
2846 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2847 force_reg (Pmode, XEXP (x, 1)));
2849 reg = force_reg (Pmode, x);
2852 else if (SPE_VECTOR_MODE (mode))
2854 /* We accept [reg + reg] and [reg + OFFSET]. */
2856 if (GET_CODE (x) == PLUS)
2858 rtx op1 = XEXP (x, 0);
2859 rtx op2 = XEXP (x, 1);
2861 op1 = force_reg (Pmode, op1);
2863 if (GET_CODE (op2) != REG
2864 && (GET_CODE (op2) != CONST_INT
2865 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2866 op2 = force_reg (Pmode, op2);
2868 return gen_rtx_PLUS (Pmode, op1, op2);
2871 return force_reg (Pmode, x);
2877 && GET_CODE (x) != CONST_INT
2878 && GET_CODE (x) != CONST_DOUBLE
2880 && GET_MODE_NUNITS (mode) == 1
2881 && (GET_MODE_BITSIZE (mode) <= 32
2882 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2884 rtx reg = gen_reg_rtx (Pmode);
2885 emit_insn (gen_elf_high (reg, x));
2886 return gen_rtx_LO_SUM (Pmode, reg, x);
2888 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2891 && ! MACHO_DYNAMIC_NO_PIC_P
2893 && GET_CODE (x) != CONST_INT
2894 && GET_CODE (x) != CONST_DOUBLE
2896 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2900 rtx reg = gen_reg_rtx (Pmode);
2901 emit_insn (gen_macho_high (reg, x));
2902 return gen_rtx_LO_SUM (Pmode, reg, x);
2905 && constant_pool_expr_p (x)
2906 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2908 return create_TOC_reference (x);
2914 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2915 We need to emit DTP-relative relocations. */
2918 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2923 fputs ("\t.long\t", file);
2926 fputs (DOUBLE_INT_ASM_OP, file);
2931 output_addr_const (file, x);
2932 fputs ("@dtprel+0x8000", file);
2935 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2937 static GTY(()) rtx rs6000_tls_symbol;
2939 rs6000_tls_get_addr (void)
2941 if (!rs6000_tls_symbol)
2942 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2944 return rs6000_tls_symbol;
2947 /* Construct the SYMBOL_REF for TLS GOT references. */
2949 static GTY(()) rtx rs6000_got_symbol;
2951 rs6000_got_sym (void)
2953 if (!rs6000_got_symbol)
2955 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2956 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2957 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2960 return rs6000_got_symbol;
2963 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2964 this (thread-local) address. */
2967 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2971 dest = gen_reg_rtx (Pmode);
2972 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2978 tlsreg = gen_rtx_REG (Pmode, 13);
2979 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2983 tlsreg = gen_rtx_REG (Pmode, 2);
2984 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2988 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2992 tmp = gen_reg_rtx (Pmode);
2995 tlsreg = gen_rtx_REG (Pmode, 13);
2996 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3000 tlsreg = gen_rtx_REG (Pmode, 2);
3001 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3005 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3007 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3012 rtx r3, got, tga, tmp1, tmp2, eqv;
3015 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3019 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3022 rtx gsym = rs6000_got_sym ();
3023 got = gen_reg_rtx (Pmode);
3025 rs6000_emit_move (got, gsym, Pmode);
3029 static int tls_got_labelno = 0;
3030 rtx tempLR, lab, tmp3, mem;
3033 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3034 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3035 tempLR = gen_reg_rtx (Pmode);
3036 tmp1 = gen_reg_rtx (Pmode);
3037 tmp2 = gen_reg_rtx (Pmode);
3038 tmp3 = gen_reg_rtx (Pmode);
3039 mem = gen_rtx_MEM (Pmode, tmp1);
3040 RTX_UNCHANGING_P (mem) = 1;
3042 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3044 emit_move_insn (tmp1, tempLR);
3045 emit_move_insn (tmp2, mem);
3046 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3047 last = emit_move_insn (got, tmp3);
3048 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3050 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3052 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3058 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3060 r3 = gen_rtx_REG (Pmode, 3);
3062 insn = gen_tls_gd_64 (r3, got, addr);
3064 insn = gen_tls_gd_32 (r3, got, addr);
3067 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3068 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3069 insn = emit_call_insn (insn);
3070 CONST_OR_PURE_CALL_P (insn) = 1;
3071 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3072 insn = get_insns ();
3074 emit_libcall_block (insn, dest, r3, addr);
3076 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3078 r3 = gen_rtx_REG (Pmode, 3);
3080 insn = gen_tls_ld_64 (r3, got);
3082 insn = gen_tls_ld_32 (r3, got);
3085 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3086 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3087 insn = emit_call_insn (insn);
3088 CONST_OR_PURE_CALL_P (insn) = 1;
3089 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3090 insn = get_insns ();
3092 tmp1 = gen_reg_rtx (Pmode);
3093 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3095 emit_libcall_block (insn, tmp1, r3, eqv);
3096 if (rs6000_tls_size == 16)
3099 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3101 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3103 else if (rs6000_tls_size == 32)
3105 tmp2 = gen_reg_rtx (Pmode);
3107 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3109 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3112 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3114 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3118 tmp2 = gen_reg_rtx (Pmode);
3120 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3122 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3124 insn = gen_rtx_SET (Pmode, dest,
3125 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3131 /* IE, or 64 bit offset LE. */
3132 tmp2 = gen_reg_rtx (Pmode);
3134 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3136 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3139 insn = gen_tls_tls_64 (dest, tmp2, addr);
3141 insn = gen_tls_tls_32 (dest, tmp2, addr);
3149 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3150 instruction definitions. */
3153 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3155 return RS6000_SYMBOL_REF_TLS_P (x);
3158 /* Return 1 if X contains a thread-local symbol. */
3161 rs6000_tls_referenced_p (rtx x)
3163 if (! TARGET_HAVE_TLS)
3166 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3169 /* Return 1 if *X is a thread-local symbol. This is the same as
3170 rs6000_tls_symbol_ref except for the type of the unused argument. */
3173 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3175 return RS6000_SYMBOL_REF_TLS_P (*x);
3178 /* The convention appears to be to define this wherever it is used.
3179 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3180 is now used here. */
3181 #ifndef REG_MODE_OK_FOR_BASE_P
3182 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3185 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3186 replace the input X, or the original X if no replacement is called for.
3187 The output parameter *WIN is 1 if the calling macro should goto WIN,
3190 For RS/6000, we wish to handle large displacements off a base
3191 register by splitting the addend across an addiu/addis and the mem insn.
3192 This cuts number of extra insns needed from 3 to 1.
3194 On Darwin, we use this to generate code for floating point constants.
3195 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3196 The Darwin code is inside #if TARGET_MACHO because only then is
3197 machopic_function_base_name() defined. */
3199 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3200 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3202 /* We must recognize output that we have already generated ourselves. */
3203 if (GET_CODE (x) == PLUS
3204 && GET_CODE (XEXP (x, 0)) == PLUS
3205 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3206 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3207 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3209 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3210 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3211 opnum, (enum reload_type)type);
3217 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3218 && GET_CODE (x) == LO_SUM
3219 && GET_CODE (XEXP (x, 0)) == PLUS
3220 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3221 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3222 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3223 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3224 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3225 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3226 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3228 /* Result of previous invocation of this function on Darwin
3229 floating point constant. */
3230 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3231 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3232 opnum, (enum reload_type)type);
3237 if (GET_CODE (x) == PLUS
3238 && GET_CODE (XEXP (x, 0)) == REG
3239 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3240 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3241 && GET_CODE (XEXP (x, 1)) == CONST_INT
3242 && !SPE_VECTOR_MODE (mode)
3243 && !ALTIVEC_VECTOR_MODE (mode))
3245 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3246 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3248 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3250 /* Check for 32-bit overflow. */
3251 if (high + low != val)
3257 /* Reload the high part into a base reg; leave the low part
3258 in the mem directly. */
3260 x = gen_rtx_PLUS (GET_MODE (x),
3261 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3265 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3266 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3267 opnum, (enum reload_type)type);
3272 if (GET_CODE (x) == SYMBOL_REF
3273 && DEFAULT_ABI == ABI_DARWIN
3274 && !ALTIVEC_VECTOR_MODE (mode)
3275 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3276 /* Don't do this for TFmode, since the result isn't offsettable. */
3281 rtx offset = gen_rtx_CONST (Pmode,
3282 gen_rtx_MINUS (Pmode, x,
3283 gen_rtx_SYMBOL_REF (Pmode,
3284 machopic_function_base_name ())));
3285 x = gen_rtx_LO_SUM (GET_MODE (x),
3286 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3287 gen_rtx_HIGH (Pmode, offset)), offset);
3290 x = gen_rtx_LO_SUM (GET_MODE (x),
3291 gen_rtx_HIGH (Pmode, x), x);
3293 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3294 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3295 opnum, (enum reload_type)type);
3301 && constant_pool_expr_p (x)
3302 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3304 (x) = create_TOC_reference (x);
3312 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3313 that is a valid memory address for an instruction.
3314 The MODE argument is the machine mode for the MEM expression
3315 that wants to use this address.
3317 On the RS/6000, there are four valid address: a SYMBOL_REF that
3318 refers to a constant pool entry of an address (or the sum of it
3319 plus a constant), a short (16-bit signed) constant plus a register,
3320 the sum of two registers, or a register indirect, possibly with an
3321 auto-increment. For DFmode and DImode with a constant plus register,
3322 we must ensure that both words are addressable or PowerPC64 with offset
3325 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3326 32-bit DImode, TImode), indexed addressing cannot be used because
3327 adjacent memory cells are accessed by adding word-sized offsets
3328 during assembly output. */
3330 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3332 if (RS6000_SYMBOL_REF_TLS_P (x))
3334 if (legitimate_indirect_address_p (x, reg_ok_strict))
3336 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3337 && !ALTIVEC_VECTOR_MODE (mode)
3338 && !SPE_VECTOR_MODE (mode)
3340 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3342 if (legitimate_small_data_p (mode, x))
3344 if (legitimate_constant_pool_address_p (x))
3346 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3348 && GET_CODE (x) == PLUS
3349 && GET_CODE (XEXP (x, 0)) == REG
3350 && (XEXP (x, 0) == virtual_stack_vars_rtx
3351 || XEXP (x, 0) == arg_pointer_rtx)
3352 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3354 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3357 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3359 || (mode != DFmode && mode != TFmode))
3360 && (TARGET_POWERPC64 || mode != DImode)
3361 && legitimate_indexed_address_p (x, reg_ok_strict))
3363 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3368 /* Go to LABEL if ADDR (a legitimate address expression)
3369 has an effect that depends on the machine mode it is used for.
3371 On the RS/6000 this is true of all integral offsets (since AltiVec
3372 modes don't allow them) or is a pre-increment or decrement.
3374 ??? Except that due to conceptual problems in offsettable_address_p
3375 we can't really report the problems of integral offsets. So leave
3376 this assuming that the adjustable offset must be valid for the
3377 sub-words of a TFmode operand, which is what we had before. */
3380 rs6000_mode_dependent_address (rtx addr)
3382 switch (GET_CODE (addr))
3385 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3387 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3388 return val + 12 + 0x8000 >= 0x10000;
3397 return TARGET_UPDATE;
3406 /* Return number of consecutive hard regs needed starting at reg REGNO
3407 to hold something of mode MODE.
3408 This is ordinarily the length in words of a value of mode MODE
3409 but can be less for certain modes in special long registers.
3411 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3412 scalar instructions. The upper 32 bits are only available to the
3415 POWER and PowerPC GPRs hold 32 bits worth;
3416 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3419 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3421 if (FP_REGNO_P (regno))
3422 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3424 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3425 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3427 if (ALTIVEC_REGNO_P (regno))
3429 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3431 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3434 /* Change register usage conditional on target flags. */
3436 rs6000_conditional_register_usage (void)
3440 /* Set MQ register fixed (already call_used) if not POWER
3441 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3446 /* 64-bit AIX reserves GPR13 for thread-private data. */
3448 fixed_regs[13] = call_used_regs[13]
3449 = call_really_used_regs[13] = 1;
3451 /* Conditionally disable FPRs. */
3452 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3453 for (i = 32; i < 64; i++)
3454 fixed_regs[i] = call_used_regs[i]
3455 = call_really_used_regs[i] = 1;
3457 if (DEFAULT_ABI == ABI_V4
3458 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3460 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3462 if (DEFAULT_ABI == ABI_V4
3463 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3465 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3466 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3467 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3469 if (DEFAULT_ABI == ABI_DARWIN
3470 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3471 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3472 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3473 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3474 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3477 global_regs[VSCR_REGNO] = 1;
3481 global_regs[SPEFSCR_REGNO] = 1;
3482 fixed_regs[FIXED_SCRATCH]
3483 = call_used_regs[FIXED_SCRATCH]
3484 = call_really_used_regs[FIXED_SCRATCH] = 1;
3487 if (! TARGET_ALTIVEC)
3489 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3490 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3491 call_really_used_regs[VRSAVE_REGNO] = 1;
3494 if (TARGET_ALTIVEC_ABI)
3495 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3496 call_used_regs[i] = call_really_used_regs[i] = 1;
3499 /* Try to output insns to set TARGET equal to the constant C if it can
3500 be done in less than N insns. Do all computations in MODE.
3501 Returns the place where the output has been placed if it can be
3502 done and the insns have been emitted. If it would take more than N
3503 insns, zero is returned and no insns and emitted. */
3506 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3507 rtx source, int n ATTRIBUTE_UNUSED)
3509 rtx result, insn, set;
3510 HOST_WIDE_INT c0, c1;
3512 if (mode == QImode || mode == HImode)
3515 dest = gen_reg_rtx (mode);
3516 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3519 else if (mode == SImode)
3521 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3523 emit_insn (gen_rtx_SET (VOIDmode, result,
3524 GEN_INT (INTVAL (source)
3525 & (~ (HOST_WIDE_INT) 0xffff))));
3526 emit_insn (gen_rtx_SET (VOIDmode, dest,
3527 gen_rtx_IOR (SImode, result,
3528 GEN_INT (INTVAL (source) & 0xffff))));
3531 else if (mode == DImode)
3533 if (GET_CODE (source) == CONST_INT)
3535 c0 = INTVAL (source);
3538 else if (GET_CODE (source) == CONST_DOUBLE)
3540 #if HOST_BITS_PER_WIDE_INT >= 64
3541 c0 = CONST_DOUBLE_LOW (source);
3544 c0 = CONST_DOUBLE_LOW (source);
3545 c1 = CONST_DOUBLE_HIGH (source);
3551 result = rs6000_emit_set_long_const (dest, c0, c1);
3556 insn = get_last_insn ();
3557 set = single_set (insn);
3558 if (! CONSTANT_P (SET_SRC (set)))
3559 set_unique_reg_note (insn, REG_EQUAL, source);
3564 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3565 fall back to a straight forward decomposition. We do this to avoid
3566 exponential run times encountered when looking for longer sequences
3567 with rs6000_emit_set_const. */
3569 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3571 if (!TARGET_POWERPC64)
3573 rtx operand1, operand2;
3575 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3577 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3579 emit_move_insn (operand1, GEN_INT (c1));
3580 emit_move_insn (operand2, GEN_INT (c2));
3584 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3587 ud2 = (c1 & 0xffff0000) >> 16;
3588 #if HOST_BITS_PER_WIDE_INT >= 64
3592 ud4 = (c2 & 0xffff0000) >> 16;
3594 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3595 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3598 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3600 emit_move_insn (dest, GEN_INT (ud1));
3603 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3604 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3607 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3610 emit_move_insn (dest, GEN_INT (ud2 << 16));
3612 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3614 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3615 || (ud4 == 0 && ! (ud3 & 0x8000)))
3618 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3621 emit_move_insn (dest, GEN_INT (ud3 << 16));
3624 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3625 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3627 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3632 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3635 emit_move_insn (dest, GEN_INT (ud4 << 16));
3638 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3640 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3642 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3643 GEN_INT (ud2 << 16)));
3645 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3651 /* Emit a move from SOURCE to DEST in mode MODE. */
3653 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3657 operands[1] = source;
3659 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3660 if (GET_CODE (operands[1]) == CONST_DOUBLE
3661 && ! FLOAT_MODE_P (mode)
3662 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3664 /* FIXME. This should never happen. */
3665 /* Since it seems that it does, do the safe thing and convert
3667 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3669 if (GET_CODE (operands[1]) == CONST_DOUBLE
3670 && ! FLOAT_MODE_P (mode)
3671 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3672 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3673 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3674 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3677 /* Check if GCC is setting up a block move that will end up using FP
3678 registers as temporaries. We must make sure this is acceptable. */
3679 if (GET_CODE (operands[0]) == MEM
3680 && GET_CODE (operands[1]) == MEM
3682 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3683 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3684 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3685 ? 32 : MEM_ALIGN (operands[0])))
3686 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3688 : MEM_ALIGN (operands[1]))))
3689 && ! MEM_VOLATILE_P (operands [0])
3690 && ! MEM_VOLATILE_P (operands [1]))
3692 emit_move_insn (adjust_address (operands[0], SImode, 0),
3693 adjust_address (operands[1], SImode, 0));
3694 emit_move_insn (adjust_address (operands[0], SImode, 4),
3695 adjust_address (operands[1], SImode, 4));
3699 if (!no_new_pseudos)
3701 if (GET_CODE (operands[1]) == MEM && optimize > 0
3702 && (mode == QImode || mode == HImode || mode == SImode)
3703 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3705 rtx reg = gen_reg_rtx (word_mode);
3707 emit_insn (gen_rtx_SET (word_mode, reg,
3708 gen_rtx_ZERO_EXTEND (word_mode,
3710 operands[1] = gen_lowpart (mode, reg);
3712 if (GET_CODE (operands[0]) != REG)
3713 operands[1] = force_reg (mode, operands[1]);
3716 if (mode == SFmode && ! TARGET_POWERPC
3717 && TARGET_HARD_FLOAT && TARGET_FPRS
3718 && GET_CODE (operands[0]) == MEM)
3722 if (reload_in_progress || reload_completed)
3723 regnum = true_regnum (operands[1]);
3724 else if (GET_CODE (operands[1]) == REG)
3725 regnum = REGNO (operands[1]);
3729 /* If operands[1] is a register, on POWER it may have
3730 double-precision data in it, so truncate it to single
3732 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3735 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3736 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3737 operands[1] = newreg;
3741 /* Recognize the case where operand[1] is a reference to thread-local
3742 data and load its address to a register. */
3743 if (GET_CODE (operands[1]) == SYMBOL_REF)
3745 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3747 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3750 /* Handle the case where reload calls us with an invalid address. */
3751 if (reload_in_progress && mode == Pmode
3752 && (! general_operand (operands[1], mode)
3753 || ! nonimmediate_operand (operands[0], mode)))
3756 /* Handle the case of CONSTANT_P_RTX. */
3757 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3760 /* 128-bit constant floating-point values on Darwin should really be
3761 loaded as two parts. */
3762 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3763 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3764 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3766 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3767 know how to get a DFmode SUBREG of a TFmode. */
3768 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3769 simplify_gen_subreg (DImode, operands[1], mode, 0),
3771 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3772 GET_MODE_SIZE (DImode)),
3773 simplify_gen_subreg (DImode, operands[1], mode,
3774 GET_MODE_SIZE (DImode)),
3779 /* FIXME: In the long term, this switch statement should go away
3780 and be replaced by a sequence of tests based on things like
3786 if (CONSTANT_P (operands[1])
3787 && GET_CODE (operands[1]) != CONST_INT)
3788 operands[1] = force_const_mem (mode, operands[1]);
3794 if (CONSTANT_P (operands[1])
3795 && ! easy_fp_constant (operands[1], mode))
3796 operands[1] = force_const_mem (mode, operands[1]);
3807 if (CONSTANT_P (operands[1])
3808 && !easy_vector_constant (operands[1], mode))
3809 operands[1] = force_const_mem (mode, operands[1]);
3814 /* Use default pattern for address of ELF small data */
3817 && DEFAULT_ABI == ABI_V4
3818 && (GET_CODE (operands[1]) == SYMBOL_REF
3819 || GET_CODE (operands[1]) == CONST)
3820 && small_data_operand (operands[1], mode))
3822 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3826 if (DEFAULT_ABI == ABI_V4
3827 && mode == Pmode && mode == SImode
3828 && flag_pic == 1 && got_operand (operands[1], mode))
3830 emit_insn (gen_movsi_got (operands[0], operands[1]));
3834 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3838 && CONSTANT_P (operands[1])
3839 && GET_CODE (operands[1]) != HIGH
3840 && GET_CODE (operands[1]) != CONST_INT)
3842 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3844 /* If this is a function address on -mcall-aixdesc,
3845 convert it to the address of the descriptor. */
3846 if (DEFAULT_ABI == ABI_AIX
3847 && GET_CODE (operands[1]) == SYMBOL_REF
3848 && XSTR (operands[1], 0)[0] == '.')
3850 const char *name = XSTR (operands[1], 0);
3852 while (*name == '.')
3854 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3855 CONSTANT_POOL_ADDRESS_P (new_ref)
3856 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3857 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3858 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3859 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3860 operands[1] = new_ref;
3863 if (DEFAULT_ABI == ABI_DARWIN)
3866 if (MACHO_DYNAMIC_NO_PIC_P)
3868 /* Take care of any required data indirection. */
3869 operands[1] = rs6000_machopic_legitimize_pic_address (
3870 operands[1], mode, operands[0]);
3871 if (operands[0] != operands[1])
3872 emit_insn (gen_rtx_SET (VOIDmode,
3873 operands[0], operands[1]));
3877 emit_insn (gen_macho_high (target, operands[1]));
3878 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3882 emit_insn (gen_elf_high (target, operands[1]));
3883 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3887 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3888 and we have put it in the TOC, we just need to make a TOC-relative
3891 && GET_CODE (operands[1]) == SYMBOL_REF
3892 && constant_pool_expr_p (operands[1])
3893 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3894 get_pool_mode (operands[1])))
3896 operands[1] = create_TOC_reference (operands[1]);
3898 else if (mode == Pmode
3899 && CONSTANT_P (operands[1])
3900 && ((GET_CODE (operands[1]) != CONST_INT
3901 && ! easy_fp_constant (operands[1], mode))
3902 || (GET_CODE (operands[1]) == CONST_INT
3903 && num_insns_constant (operands[1], mode) > 2)
3904 || (GET_CODE (operands[0]) == REG
3905 && FP_REGNO_P (REGNO (operands[0]))))
3906 && GET_CODE (operands[1]) != HIGH
3907 && ! legitimate_constant_pool_address_p (operands[1])
3908 && ! toc_relative_expr_p (operands[1]))
3910 /* Emit a USE operation so that the constant isn't deleted if
3911 expensive optimizations are turned on because nobody
3912 references it. This should only be done for operands that
3913 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3914 This should not be done for operands that contain LABEL_REFs.
3915 For now, we just handle the obvious case. */
3916 if (GET_CODE (operands[1]) != LABEL_REF)
3917 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3920 /* Darwin uses a special PIC legitimizer. */
3921 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3924 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3926 if (operands[0] != operands[1])
3927 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3932 /* If we are to limit the number of things we put in the TOC and
3933 this is a symbol plus a constant we can add in one insn,
3934 just put the symbol in the TOC and add the constant. Don't do
3935 this if reload is in progress. */
3936 if (GET_CODE (operands[1]) == CONST
3937 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3938 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3939 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3940 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3941 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3942 && ! side_effects_p (operands[0]))
3945 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3946 rtx other = XEXP (XEXP (operands[1], 0), 1);
3948 sym = force_reg (mode, sym);
3950 emit_insn (gen_addsi3 (operands[0], sym, other));
3952 emit_insn (gen_adddi3 (operands[0], sym, other));
3956 operands[1] = force_const_mem (mode, operands[1]);
3959 && constant_pool_expr_p (XEXP (operands[1], 0))
3960 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3961 get_pool_constant (XEXP (operands[1], 0)),
3962 get_pool_mode (XEXP (operands[1], 0))))
3965 = gen_rtx_MEM (mode,
3966 create_TOC_reference (XEXP (operands[1], 0)));
3967 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3968 RTX_UNCHANGING_P (operands[1]) = 1;
3974 if (GET_CODE (operands[0]) == MEM
3975 && GET_CODE (XEXP (operands[0], 0)) != REG
3976 && ! reload_in_progress)
3978 = replace_equiv_address (operands[0],
3979 copy_addr_to_reg (XEXP (operands[0], 0)));
3981 if (GET_CODE (operands[1]) == MEM
3982 && GET_CODE (XEXP (operands[1], 0)) != REG
3983 && ! reload_in_progress)
3985 = replace_equiv_address (operands[1],
3986 copy_addr_to_reg (XEXP (operands[1], 0)));
3989 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3991 gen_rtx_SET (VOIDmode,
3992 operands[0], operands[1]),
3993 gen_rtx_CLOBBER (VOIDmode,
3994 gen_rtx_SCRATCH (SImode)))));
4003 /* Above, we may have called force_const_mem which may have returned
4004 an invalid address. If we can, fix this up; otherwise, reload will
4005 have to deal with it. */
4006 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4007 operands[1] = validize_mem (operands[1]);
4010 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4013 /* Nonzero if we can use a floating-point register to pass this arg. */
4014 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4015 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4016 && (CUM)->fregno <= FP_ARG_MAX_REG \
4017 && TARGET_HARD_FLOAT && TARGET_FPRS)
4019 /* Nonzero if we can use an AltiVec register to pass this arg. */
4020 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4021 (ALTIVEC_VECTOR_MODE (MODE) \
4022 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4023 && TARGET_ALTIVEC_ABI \
4026 /* Return a nonzero value to say to return the function value in
4027 memory, just as large structures are always returned. TYPE will be
4028 the data type of the value, and FNTYPE will be the type of the
4029 function doing the returning, or @code{NULL} for libcalls.
4031 The AIX ABI for the RS/6000 specifies that all structures are
4032 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4033 specifies that structures <= 8 bytes are returned in r3/r4, but a
4034 draft put them in memory, and GCC used to implement the draft
4035 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4036 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4037 compatibility can change DRAFT_V4_STRUCT_RET to override the
4038 default, and -m switches get the final word. See
4039 rs6000_override_options for more details.
4041 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4042 long double support is enabled. These values are returned in memory.
4044 int_size_in_bytes returns -1 for variable size objects, which go in
4045 memory always. The cast to unsigned makes -1 > 8. */
4048 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4050 if (AGGREGATE_TYPE_P (type)
4051 && (TARGET_AIX_STRUCT_RET
4052 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4054 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4059 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4060 for a call to a function whose data type is FNTYPE.
4061 For a library call, FNTYPE is 0.
4063 For incoming args we set the number of arguments in the prototype large
4064 so we never return a PARALLEL. */
4067 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4068 rtx libname ATTRIBUTE_UNUSED, int incoming,
4069 int libcall, int n_named_args)
4071 static CUMULATIVE_ARGS zero_cumulative;
4073 *cum = zero_cumulative;
4075 cum->fregno = FP_ARG_MIN_REG;
4076 cum->vregno = ALTIVEC_ARG_MIN_REG;
4077 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4078 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4079 ? CALL_LIBCALL : CALL_NORMAL);
4080 cum->sysv_gregno = GP_ARG_MIN_REG;
4081 cum->stdarg = fntype
4082 && (TYPE_ARG_TYPES (fntype) != 0
4083 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4084 != void_type_node));
4086 cum->nargs_prototype = 0;
4087 if (incoming || cum->prototype)
4088 cum->nargs_prototype = n_named_args;
4090 /* Check for a longcall attribute. */
4092 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4093 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4094 cum->call_cookie = CALL_LONG;
4096 if (TARGET_DEBUG_ARG)
4098 fprintf (stderr, "\ninit_cumulative_args:");
4101 tree ret_type = TREE_TYPE (fntype);
4102 fprintf (stderr, " ret code = %s,",
4103 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4106 if (cum->call_cookie & CALL_LONG)
4107 fprintf (stderr, " longcall,");
4109 fprintf (stderr, " proto = %d, nargs = %d\n",
4110 cum->prototype, cum->nargs_prototype);
4115 && TARGET_ALTIVEC_ABI
4116 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4118 error ("Cannot return value in vector register because"
4119 " altivec instructions are disabled, use -maltivec"
4120 " to enable them.");
4124 /* If defined, a C expression which determines whether, and in which
4125 direction, to pad out an argument with extra space. The value
4126 should be of type `enum direction': either `upward' to pad above
4127 the argument, `downward' to pad below, or `none' to inhibit
4130 For the AIX ABI structs are always stored left shifted in their
4134 function_arg_padding (enum machine_mode mode, tree type)
4136 #ifndef AGGREGATE_PADDING_FIXED
4137 #define AGGREGATE_PADDING_FIXED 0
4139 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4140 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4143 if (!AGGREGATE_PADDING_FIXED)
4145 /* GCC used to pass structures of the same size as integer types as
4146 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4147 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4148 passed padded downward, except that -mstrict-align further
4149 muddied the water in that multi-component structures of 2 and 4
4150 bytes in size were passed padded upward.
4152 The following arranges for best compatibility with previous
4153 versions of gcc, but removes the -mstrict-align dependency. */
4154 if (BYTES_BIG_ENDIAN)
4156 HOST_WIDE_INT size = 0;
4158 if (mode == BLKmode)
4160 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4161 size = int_size_in_bytes (type);
4164 size = GET_MODE_SIZE (mode);
4166 if (size == 1 || size == 2 || size == 4)
4172 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4174 if (type != 0 && AGGREGATE_TYPE_P (type))
4178 /* Fall back to the default. */
4179 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4182 /* If defined, a C expression that gives the alignment boundary, in bits,
4183 of an argument with the specified mode and type. If it is not defined,
4184 PARM_BOUNDARY is used for all arguments.
4186 V.4 wants long longs to be double word aligned. */
4189 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4191 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
4193 else if (SPE_VECTOR_MODE (mode))
4195 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4198 return PARM_BOUNDARY;
4201 /* Compute the size (in words) of a function argument. */
4203 static unsigned long
4204 rs6000_arg_size (enum machine_mode mode, tree type)
4208 if (mode != BLKmode)
4209 size = GET_MODE_SIZE (mode);
4211 size = int_size_in_bytes (type);
4214 return (size + 3) >> 2;
4216 return (size + 7) >> 3;
4219 /* Update the data in CUM to advance over an argument
4220 of mode MODE and data type TYPE.
4221 (TYPE is null for libcalls where that information may not be available.) */
4224 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4225 tree type, int named)
4227 cum->nargs_prototype--;
4229 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4231 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4234 if (!TARGET_ALTIVEC)
4235 error ("Cannot pass argument in vector register because"
4236 " altivec instructions are disabled, use -maltivec"
4237 " to enable them.");
4239 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
4240 even if it is going to be passed in a vector register.
4241 Darwin does the same for variable-argument functions. */
4242 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4243 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4247 /* Vector parameters must be 16-byte aligned. This places
4248 them at 2 mod 4 in terms of words in 32-bit mode, since
4249 the parameter save area starts at offset 24 from the
4250 stack. In 64-bit mode, they just have to start on an
4251 even word, since the parameter save area is 16-byte
4252 aligned. Space for GPRs is reserved even if the argument
4253 will be passed in memory. */
4255 align = ((6 - (cum->words & 3)) & 3);
4257 align = cum->words & 1;
4258 cum->words += align + rs6000_arg_size (mode, type);
4260 if (TARGET_DEBUG_ARG)
4262 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4264 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4265 cum->nargs_prototype, cum->prototype,
4266 GET_MODE_NAME (mode));
4270 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4272 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4274 else if (DEFAULT_ABI == ABI_V4)
4276 if (TARGET_HARD_FLOAT && TARGET_FPRS
4277 && (mode == SFmode || mode == DFmode))
4279 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4284 cum->words += cum->words & 1;
4285 cum->words += rs6000_arg_size (mode, type);
4291 int gregno = cum->sysv_gregno;
4293 /* Aggregates and IEEE quad get passed by reference. */
4294 if ((type && AGGREGATE_TYPE_P (type))
4298 n_words = rs6000_arg_size (mode, type);
4300 /* Long long and SPE vectors are put in odd registers. */
4301 if (n_words == 2 && (gregno & 1) == 0)
4304 /* Long long and SPE vectors are not split between registers
4306 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4308 /* Long long is aligned on the stack. */
4310 cum->words += cum->words & 1;
4311 cum->words += n_words;
4314 /* Note: continuing to accumulate gregno past when we've started
4315 spilling to the stack indicates the fact that we've started
4316 spilling to the stack to expand_builtin_saveregs. */
4317 cum->sysv_gregno = gregno + n_words;
4320 if (TARGET_DEBUG_ARG)
4322 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4323 cum->words, cum->fregno);
4324 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4325 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4326 fprintf (stderr, "mode = %4s, named = %d\n",
4327 GET_MODE_NAME (mode), named);
4332 int align = (TARGET_32BIT && (cum->words & 1) != 0
4333 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4335 cum->words += align + rs6000_arg_size (mode, type);
4337 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4338 && TARGET_HARD_FLOAT && TARGET_FPRS)
4339 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4341 if (TARGET_DEBUG_ARG)
4343 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4344 cum->words, cum->fregno);
4345 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4346 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4347 fprintf (stderr, "named = %d, align = %d\n", named, align);
4352 /* Determine where to put a SIMD argument on the SPE. */
4355 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4360 int gregno = cum->sysv_gregno;
4361 int n_words = rs6000_arg_size (mode, type);
4363 /* SPE vectors are put in odd registers. */
4364 if (n_words == 2 && (gregno & 1) == 0)
4367 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4370 enum machine_mode m = SImode;
4372 r1 = gen_rtx_REG (m, gregno);
4373 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4374 r2 = gen_rtx_REG (m, gregno + 1);
4375 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4376 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4383 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4384 return gen_rtx_REG (mode, cum->sysv_gregno);
4390 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4393 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4394 tree type, int align_words)
4398 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4399 in vararg list into zero, one or two GPRs */
4400 if (align_words >= GP_ARG_NUM_REG)
4401 return gen_rtx_PARALLEL (DFmode,
4403 gen_rtx_EXPR_LIST (VOIDmode,
4404 NULL_RTX, const0_rtx),
4405 gen_rtx_EXPR_LIST (VOIDmode,
4409 else if (align_words + rs6000_arg_size (mode, type)
4411 /* If this is partially on the stack, then we only
4412 include the portion actually in registers here. */
4413 return gen_rtx_PARALLEL (DFmode,
4415 gen_rtx_EXPR_LIST (VOIDmode,
4416 gen_rtx_REG (SImode,
4420 gen_rtx_EXPR_LIST (VOIDmode,
4425 /* split a DFmode arg into two GPRs */
4426 return gen_rtx_PARALLEL (DFmode,
4428 gen_rtx_EXPR_LIST (VOIDmode,
4429 gen_rtx_REG (SImode,
4433 gen_rtx_EXPR_LIST (VOIDmode,
4434 gen_rtx_REG (SImode,
4438 gen_rtx_EXPR_LIST (VOIDmode,
4439 gen_rtx_REG (mode, cum->fregno),
4442 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4444 else if (mode == DImode)
4446 if (align_words < GP_ARG_NUM_REG - 1)
4447 return gen_rtx_PARALLEL (DImode,
4449 gen_rtx_EXPR_LIST (VOIDmode,
4450 gen_rtx_REG (SImode,
4454 gen_rtx_EXPR_LIST (VOIDmode,
4455 gen_rtx_REG (SImode,
4459 else if (align_words == GP_ARG_NUM_REG - 1)
4460 return gen_rtx_PARALLEL (DImode,
4462 gen_rtx_EXPR_LIST (VOIDmode,
4463 NULL_RTX, const0_rtx),
4464 gen_rtx_EXPR_LIST (VOIDmode,
4465 gen_rtx_REG (SImode,
4470 else if (ALTIVEC_VECTOR_MODE (mode) && align_words == GP_ARG_NUM_REG - 2)
4472 /* Varargs vector regs must be saved in R9-R10. */
4473 return gen_rtx_PARALLEL (mode,
4475 gen_rtx_EXPR_LIST (VOIDmode,
4476 NULL_RTX, const0_rtx),
4477 gen_rtx_EXPR_LIST (VOIDmode,
4478 gen_rtx_REG (SImode,
4482 gen_rtx_EXPR_LIST (VOIDmode,
4483 gen_rtx_REG (SImode,
4488 else if ((mode == BLKmode || ALTIVEC_VECTOR_MODE (mode))
4489 && align_words <= (GP_ARG_NUM_REG - 1))
4491 /* AltiVec vector regs are saved in R5-R8. */
4493 int size = int_size_in_bytes (type);
4494 int no_units = ((size - 1) / 4) + 1;
4495 int max_no_words = GP_ARG_NUM_REG - align_words;
4496 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4497 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4499 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4501 for (k=0; k < rtlvec_len; k++)
4502 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4503 gen_rtx_REG (SImode,
4506 k == 0 ? const0_rtx : GEN_INT (k*4));
4508 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rtlvec));
4513 /* Determine where to put an argument to a function.
4514 Value is zero to push the argument on the stack,
4515 or a hard register in which to store the argument.
4517 MODE is the argument's machine mode.
4518 TYPE is the data type of the argument (as a tree).
4519 This is null for libcalls where that information may
4521 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4522 the preceding args and about the function being called.
4523 NAMED is nonzero if this argument is a named parameter
4524 (otherwise it is an extra parameter matching an ellipsis).
4526 On RS/6000 the first eight words of non-FP are normally in registers
4527 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4528 Under V.4, the first 8 FP args are in registers.
4530 If this is floating-point and no prototype is specified, we use
4531 both an FP and integer register (or possibly FP reg and stack). Library
4532 functions (when CALL_LIBCALL is set) always have the proper types for args,
4533 so we can pass the FP value just in one register. emit_library_function
4534 doesn't support PARALLEL anyway. */
4537 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4538 tree type, int named)
4540 enum rs6000_abi abi = DEFAULT_ABI;
4542 /* Return a marker to indicate whether CR1 needs to set or clear the
4543 bit that V.4 uses to say fp args were passed in registers.
4544 Assume that we don't need the marker for software floating point,
4545 or compiler generated library calls. */
4546 if (mode == VOIDmode)
4549 && cum->nargs_prototype < 0
4550 && (cum->call_cookie & CALL_LIBCALL) == 0
4551 && (cum->prototype || TARGET_NO_PROTOTYPE))
4553 /* For the SPE, we need to crxor CR6 always. */
4555 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4556 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4557 return GEN_INT (cum->call_cookie
4558 | ((cum->fregno == FP_ARG_MIN_REG)
4559 ? CALL_V4_SET_FP_ARGS
4560 : CALL_V4_CLEAR_FP_ARGS));
4563 return GEN_INT (cum->call_cookie);
4566 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4567 if (TARGET_64BIT && ! cum->prototype)
4569 /* Vector parameters get passed in vector register
4570 and also in GPRs or memory, in absence of prototype. */
4573 align_words = (cum->words + 1) & ~1;
4575 if (align_words >= GP_ARG_NUM_REG)
4581 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4583 return gen_rtx_PARALLEL (mode,
4585 gen_rtx_EXPR_LIST (VOIDmode,
4587 gen_rtx_EXPR_LIST (VOIDmode,
4588 gen_rtx_REG (mode, cum->vregno),
4592 return gen_rtx_REG (mode, cum->vregno);
4593 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4595 if (named || abi == ABI_V4)
4599 /* Vector parameters to varargs functions under AIX or Darwin
4600 get passed in memory and possibly also in GPRs. */
4601 int align, align_words;
4602 enum machine_mode part_mode = mode;
4604 /* Vector parameters must be 16-byte aligned. This places them at
4605 2 mod 4 in terms of words in 32-bit mode, since the parameter
4606 save area starts at offset 24 from the stack. In 64-bit mode,
4607 they just have to start on an even word, since the parameter
4608 save area is 16-byte aligned. */
4610 align = ((6 - (cum->words & 3)) & 3);
4612 align = cum->words & 1;
4613 align_words = cum->words + align;
4615 /* Out of registers? Memory, then. */
4616 if (align_words >= GP_ARG_NUM_REG)
4619 /* The vector value goes in GPRs. Only the part of the
4620 value in GPRs is reported here. */
4621 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4623 /* Fortunately, there are only two possibilities, the value
4624 is either wholly in GPRs or half in GPRs and half not. */
4628 && (TARGET_POWERPC64 || (align_words == GP_ARG_NUM_REG - 2)))
4629 return rs6000_mixed_function_arg (cum, part_mode, type, align_words);
4631 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4634 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4635 return rs6000_spe_function_arg (cum, mode, type);
4636 else if (abi == ABI_V4)
4638 if (TARGET_HARD_FLOAT && TARGET_FPRS
4639 && (mode == SFmode || mode == DFmode))
4641 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4642 return gen_rtx_REG (mode, cum->fregno);
4649 int gregno = cum->sysv_gregno;
4651 /* Aggregates and IEEE quad get passed by reference. */
4652 if ((type && AGGREGATE_TYPE_P (type))
4656 n_words = rs6000_arg_size (mode, type);
4658 /* Long long and SPE vectors are put in odd registers. */
4659 if (n_words == 2 && (gregno & 1) == 0)
4662 /* Long long does not split between registers and stack. */
4663 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4664 return gen_rtx_REG (mode, gregno);
4671 int align = (TARGET_32BIT && (cum->words & 1) != 0
4672 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4673 int align_words = cum->words + align;
4675 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4678 if (TARGET_32BIT && TARGET_POWERPC64
4679 && (mode == DImode || mode == BLKmode))
4680 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4682 if (USE_FP_FOR_ARG_P (cum, mode, type))
4687 enum machine_mode fmode = mode;
4689 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4691 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4693 /* Long double split over regs and memory. */
4694 if (fmode == TFmode)
4697 /* Currently, we only ever need one reg here because complex
4698 doubles are split. */
4699 if (cum->fregno != FP_ARG_MAX_REG - 1)
4702 fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4704 /* Do we also need to pass this arg in the parameter save
4707 && (cum->nargs_prototype <= 0
4708 || (DEFAULT_ABI == ABI_AIX
4710 && align_words >= GP_ARG_NUM_REG)));
4712 if (!needs_psave && mode == fmode)
4715 if (TARGET_32BIT && TARGET_POWERPC64
4716 && mode == DFmode && cum->stdarg)
4717 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4719 /* Describe where this piece goes. */
4721 *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4726 /* Now describe the part that goes in gprs or the stack.
4727 This piece must come first, before the fprs. */
4729 if (align_words < GP_ARG_NUM_REG)
4731 unsigned long n_words = rs6000_arg_size (mode, type);
4732 enum machine_mode rmode = mode;
4734 if (align_words + n_words > GP_ARG_NUM_REG)
4735 /* If this is partially on the stack, then we only
4736 include the portion actually in registers here.
4737 We know this can only be one register because
4738 complex doubles are splt. */
4740 reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4742 *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4746 return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4748 else if (align_words < GP_ARG_NUM_REG)
4749 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4755 /* For an arg passed partly in registers and partly in memory,
4756 this is the number of registers used.
4757 For args passed entirely in registers or entirely in memory, zero. */
4760 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4761 tree type, int named)
4765 if (DEFAULT_ABI == ABI_V4)
4768 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4769 && cum->nargs_prototype >= 0)
4772 if (USE_FP_FOR_ARG_P (cum, mode, type))
4774 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4775 ret = FP_ARG_MAX_REG - cum->fregno;
4776 else if (cum->nargs_prototype >= 0)
4780 if (cum->words < GP_ARG_NUM_REG
4781 && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4782 ret = GP_ARG_NUM_REG - cum->words;
4784 if (ret != 0 && TARGET_DEBUG_ARG)
4785 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4790 /* A C expression that indicates when an argument must be passed by
4791 reference. If nonzero for an argument, a copy of that argument is
4792 made in memory and a pointer to the argument is passed instead of
4793 the argument itself. The pointer is passed in whatever way is
4794 appropriate for passing a pointer to that type.
4796 Under V.4, structures and unions are passed by reference.
4798 As an extension to all ABIs, variable sized types are passed by
4802 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4803 enum machine_mode mode ATTRIBUTE_UNUSED,
4804 tree type, int named ATTRIBUTE_UNUSED)
4806 if (DEFAULT_ABI == ABI_V4
4807 && ((type && AGGREGATE_TYPE_P (type))
4810 if (TARGET_DEBUG_ARG)
4811 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4815 return type && int_size_in_bytes (type) < 0;
4819 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4822 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4827 for (i = 0; i < nregs; i++)
4829 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4830 if (reload_completed)
4832 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4835 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4836 i * GET_MODE_SIZE(reg_mode));
4839 tem = replace_equiv_address (tem, XEXP (tem, 0));
4841 if (tem == NULL_RTX)
4844 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4849 /* Perform any needed actions needed for a function that is receiving a
4850 variable number of arguments.
4854 MODE and TYPE are the mode and type of the current parameter.
4856 PRETEND_SIZE is a variable that should be set to the amount of stack
4857 that must be pushed by the prolog to pretend that our caller pushed
4860 Normally, this macro will push all remaining incoming registers on the
4861 stack and set PRETEND_SIZE to the length of the registers pushed. */
4864 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4865 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4867 CUMULATIVE_ARGS next_cum;
4868 int reg_size = TARGET_32BIT ? 4 : 8;
4869 rtx save_area = NULL_RTX, mem;
4870 int first_reg_offset, set;
4872 /* Skip the last named argument. */
4874 function_arg_advance (&next_cum, mode, type, 1);
4876 if (DEFAULT_ABI == ABI_V4)
4878 /* Indicate to allocate space on the stack for varargs save area. */
4879 cfun->machine->sysv_varargs_p = 1;
4881 save_area = plus_constant (virtual_stack_vars_rtx,
4882 - RS6000_VARARGS_SIZE);
4884 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4888 first_reg_offset = next_cum.words;
4889 save_area = virtual_incoming_args_rtx;
4890 cfun->machine->sysv_varargs_p = 0;
4892 if (MUST_PASS_IN_STACK (mode, type))
4893 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4896 set = get_varargs_alias_set ();
4897 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4899 mem = gen_rtx_MEM (BLKmode,
4900 plus_constant (save_area,
4901 first_reg_offset * reg_size)),
4902 set_mem_alias_set (mem, set);
4903 set_mem_align (mem, BITS_PER_WORD);
4905 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4906 GP_ARG_NUM_REG - first_reg_offset);
4909 /* Save FP registers if needed. */
4910 if (DEFAULT_ABI == ABI_V4
4911 && TARGET_HARD_FLOAT && TARGET_FPRS
4913 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4915 int fregno = next_cum.fregno;
4916 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4917 rtx lab = gen_label_rtx ();
4918 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4920 emit_jump_insn (gen_rtx_SET (VOIDmode,
4922 gen_rtx_IF_THEN_ELSE (VOIDmode,
4923 gen_rtx_NE (VOIDmode, cr1,
4925 gen_rtx_LABEL_REF (VOIDmode, lab),
4928 while (fregno <= FP_ARG_V4_MAX_REG)
4930 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4931 set_mem_alias_set (mem, set);
4932 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4941 /* Create the va_list data type. */
4944 rs6000_build_builtin_va_list (void)
4946 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4948 /* For AIX, prefer 'char *' because that's what the system
4949 header files like. */
4950 if (DEFAULT_ABI != ABI_V4)
4951 return build_pointer_type (char_type_node);
4953 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4954 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4956 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4957 unsigned_char_type_node);
4958 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4959 unsigned_char_type_node);
4960 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4962 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4963 short_unsigned_type_node);
4964 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4966 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4969 DECL_FIELD_CONTEXT (f_gpr) = record;
4970 DECL_FIELD_CONTEXT (f_fpr) = record;
4971 DECL_FIELD_CONTEXT (f_res) = record;
4972 DECL_FIELD_CONTEXT (f_ovf) = record;
4973 DECL_FIELD_CONTEXT (f_sav) = record;
4975 TREE_CHAIN (record) = type_decl;
4976 TYPE_NAME (record) = type_decl;
4977 TYPE_FIELDS (record) = f_gpr;
4978 TREE_CHAIN (f_gpr) = f_fpr;
4979 TREE_CHAIN (f_fpr) = f_res;
4980 TREE_CHAIN (f_res) = f_ovf;
4981 TREE_CHAIN (f_ovf) = f_sav;
4983 layout_type (record);
4985 /* The correct type is an array type of one element. */
4986 return build_array_type (record, build_index_type (size_zero_node));
4989 /* Implement va_start. */
4992 rs6000_va_start (tree valist, rtx nextarg)
4994 HOST_WIDE_INT words, n_gpr, n_fpr;
4995 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4996 tree gpr, fpr, ovf, sav, t;
4998 /* Only SVR4 needs something special. */
4999 if (DEFAULT_ABI != ABI_V4)
5001 std_expand_builtin_va_start (valist, nextarg);
5005 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5006 f_fpr = TREE_CHAIN (f_gpr);
5007 f_res = TREE_CHAIN (f_fpr);
5008 f_ovf = TREE_CHAIN (f_res);
5009 f_sav = TREE_CHAIN (f_ovf);
5011 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5012 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5013 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5014 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5015 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5017 /* Count number of gp and fp argument registers used. */
5018 words = current_function_args_info.words;
5019 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5020 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5022 if (TARGET_DEBUG_ARG)
5023 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5024 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5025 words, n_gpr, n_fpr);
5027 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5028 TREE_SIDE_EFFECTS (t) = 1;
5029 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5031 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5032 TREE_SIDE_EFFECTS (t) = 1;
5033 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5035 /* Find the overflow area. */
5036 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5038 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5039 build_int_2 (words * UNITS_PER_WORD, 0));
5040 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5041 TREE_SIDE_EFFECTS (t) = 1;
5042 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5044 /* Find the register save area. */
5045 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5046 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5047 build_int_2 (-RS6000_VARARGS_SIZE, -1));
5048 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5049 TREE_SIDE_EFFECTS (t) = 1;
5050 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5053 /* Implement va_arg. */
5056 rs6000_va_arg (tree valist, tree type)
5058 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5059 tree gpr, fpr, ovf, sav, reg, t, u;
5060 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
5061 rtx lab_false, lab_over, addr_rtx, r;
5063 if (DEFAULT_ABI != ABI_V4)
5065 /* Variable sized types are passed by reference. */
5066 if (int_size_in_bytes (type) < 0)
5068 u = build_pointer_type (type);
5070 /* Args grow upward. */
5071 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
5072 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
5073 TREE_SIDE_EFFECTS (t) = 1;
5075 t = build1 (NOP_EXPR, build_pointer_type (u), t);
5076 TREE_SIDE_EFFECTS (t) = 1;
5078 t = build1 (INDIRECT_REF, u, t);
5079 TREE_SIDE_EFFECTS (t) = 1;
5081 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
5083 if (targetm.calls.split_complex_arg
5084 && TREE_CODE (type) == COMPLEX_TYPE)
5086 tree elem_type = TREE_TYPE (type);
5087 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5088 int elem_size = GET_MODE_SIZE (elem_mode);
5090 if (elem_size < UNITS_PER_WORD)
5092 rtx real_part, imag_part, dest_real, rr;
5094 real_part = rs6000_va_arg (valist, elem_type);
5095 imag_part = rs6000_va_arg (valist, elem_type);
5097 /* We're not returning the value here, but the address.
5098 real_part and imag_part are not contiguous, and we know
5099 there is space available to pack real_part next to
5100 imag_part. float _Complex is not promoted to
5101 double _Complex by the default promotion rules that
5102 promote float to double. */
5103 if (2 * elem_size > UNITS_PER_WORD)
5106 real_part = gen_rtx_MEM (elem_mode, real_part);
5107 imag_part = gen_rtx_MEM (elem_mode, imag_part);
5109 dest_real = adjust_address (imag_part, elem_mode, -elem_size);
5110 rr = gen_reg_rtx (elem_mode);
5111 emit_move_insn (rr, real_part);
5112 emit_move_insn (dest_real, rr);
5114 return XEXP (dest_real, 0);
5118 return std_expand_builtin_va_arg (valist, type);
5121 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5122 f_fpr = TREE_CHAIN (f_gpr);
5123 f_res = TREE_CHAIN (f_fpr);
5124 f_ovf = TREE_CHAIN (f_res);
5125 f_sav = TREE_CHAIN (f_ovf);
5127 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5128 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5129 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5130 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5131 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5133 size = int_size_in_bytes (type);
5134 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5136 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
5138 /* Aggregates and long doubles are passed by reference. */
5144 size = UNITS_PER_WORD;
5147 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
5149 /* FP args go in FP registers, if present. */
5158 /* Otherwise into GP registers. */
5166 /* Pull the value out of the saved registers.... */
5168 lab_false = gen_label_rtx ();
5169 lab_over = gen_label_rtx ();
5170 addr_rtx = gen_reg_rtx (Pmode);
5172 /* AltiVec vectors never go in registers. */
5173 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
5175 TREE_THIS_VOLATILE (reg) = 1;
5176 emit_cmp_and_jump_insns
5177 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
5178 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5181 /* Long long is aligned in the registers. */
5184 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5185 build_int_2 (n_reg - 1, 0));
5186 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
5187 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
5188 TREE_SIDE_EFFECTS (u) = 1;
5189 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5193 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5197 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5198 build_int_2 (n_reg, 0));
5199 TREE_SIDE_EFFECTS (u) = 1;
5201 u = build1 (CONVERT_EXPR, integer_type_node, u);
5202 TREE_SIDE_EFFECTS (u) = 1;
5204 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5205 TREE_SIDE_EFFECTS (u) = 1;
5207 t = build (PLUS_EXPR, ptr_type_node, t, u);
5208 TREE_SIDE_EFFECTS (t) = 1;
5210 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5212 emit_move_insn (addr_rtx, r);
5214 emit_jump_insn (gen_jump (lab_over));
5218 emit_label (lab_false);
5220 /* ... otherwise out of the overflow area. */
5222 /* Make sure we don't find reg 7 for the next int arg.
5224 All AltiVec vectors go in the overflow area. So in the AltiVec
5225 case we need to get the vectors from the overflow area, but
5226 remember where the GPRs and FPRs are. */
5227 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
5228 || !TARGET_ALTIVEC))
5230 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5231 TREE_SIDE_EFFECTS (t) = 1;
5232 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5235 /* Care for on-stack alignment if needed. */
5242 /* AltiVec vectors are 16 byte aligned. */
5243 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
5248 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
5249 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
5253 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5255 emit_move_insn (addr_rtx, r);
5257 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5258 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5259 TREE_SIDE_EFFECTS (t) = 1;
5260 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5262 emit_label (lab_over);
5266 r = gen_rtx_MEM (Pmode, addr_rtx);
5267 set_mem_alias_set (r, get_varargs_alias_set ());
5268 emit_move_insn (addr_rtx, r);
5276 #define def_builtin(MASK, NAME, TYPE, CODE) \
5278 if ((MASK) & target_flags) \
5279 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5283 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5285 static const struct builtin_description bdesc_3arg[] =
5287 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5288 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5289 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5290 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5291 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5292 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5293 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5294 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5295 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5296 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5297 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5298 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5299 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5300 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5301 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5302 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5303 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5304 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5305 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5306 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5307 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5308 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5309 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5312 /* DST operations: void foo (void *, const int, const char). */
5314 static const struct builtin_description bdesc_dst[] =
5316 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5317 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5318 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5319 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5322 /* Simple binary operations: VECc = foo (VECa, VECb). */
5324 static struct builtin_description bdesc_2arg[] =
5326 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5327 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5328 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5329 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5330 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5331 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5332 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5333 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5334 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5335 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5336 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5337 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5338 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5339 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5340 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5341 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5342 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5343 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5344 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5345 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5346 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5347 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5348 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5349 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5350 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5351 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5352 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5353 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5354 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5355 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5356 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5357 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5358 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5359 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5360 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5361 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5362 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5363 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5364 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5365 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5366 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5367 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5368 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5369 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5370 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5371 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5372 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5373 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5374 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5375 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5376 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5377 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5378 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5379 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5380 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5381 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5382 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5383 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5384 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5385 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5386 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5387 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5388 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5389 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5390 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5391 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5392 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5393 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5394 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5395 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5396 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5397 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5398 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5399 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5400 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5401 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5402 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5403 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5404 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5405 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5406 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5407 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5408 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5409 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5410 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5411 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5412 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5413 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5414 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5415 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5416 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5417 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5418 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5419 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5420 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5421 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5422 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5423 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5424 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5425 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5426 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5427 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5428 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5429 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5430 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5431 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5432 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5433 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5434 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5435 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5436 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5437 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5438 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5440 /* Place holder, leave as first spe builtin. */
5441 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5442 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5443 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5444 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5445 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5446 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5447 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5448 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5449 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5450 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5451 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5452 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5453 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5454 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5455 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5456 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5457 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5458 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5459 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5460 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5461 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5462 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5463 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5464 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5465 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5466 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5467 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5468 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5469 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5470 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5471 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5472 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5473 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5474 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5475 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5476 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5477 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5478 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5479 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5480 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5481 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5482 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5483 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5484 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5485 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5486 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5487 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5488 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5489 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5490 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5491 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5492 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5493 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5494 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5495 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5496 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5497 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5498 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5499 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5500 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5501 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5502 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5503 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5504 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5505 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5506 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5507 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5508 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5509 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5510 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5511 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5512 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5513 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5514 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5515 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5516 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5517 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5518 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5519 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5520 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5521 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5522 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5523 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5524 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5525 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5526 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5527 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5528 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5529 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5530 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5531 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5532 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5533 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5534 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5535 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5536 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5537 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5538 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5539 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5540 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5541 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5542 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5543 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5544 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5545 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5546 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5547 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5548 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5549 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5551 /* SPE binary operations expecting a 5-bit unsigned literal. */
5552 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5554 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5555 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5556 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5557 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5558 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5559 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5560 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5561 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5562 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5563 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5564 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5565 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5566 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5567 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5568 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5569 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5570 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5571 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5572 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5573 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5574 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5575 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5576 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5577 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5578 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5579 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5581 /* Place-holder. Leave as last binary SPE builtin. */
5582 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5585 /* AltiVec predicates. */
5587 struct builtin_description_predicates
5589 const unsigned int mask;
5590 const enum insn_code icode;
5592 const char *const name;
5593 const enum rs6000_builtins code;
5596 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5598 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5599 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5600 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5601 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5602 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5603 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5604 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5605 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5606 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5607 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5608 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5609 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5610 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5613 /* SPE predicates. */
5614 static struct builtin_description bdesc_spe_predicates[] =
5616 /* Place-holder. Leave as first. */
5617 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5618 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5619 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5620 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5621 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5622 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5623 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5624 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5625 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5626 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5627 /* Place-holder. Leave as last. */
5628 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5631 /* SPE evsel predicates. */
5632 static struct builtin_description bdesc_spe_evsel[] =
5634 /* Place-holder. Leave as first. */
5635 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5636 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5637 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5638 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5639 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5640 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5641 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5642 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5643 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5644 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5645 /* Place-holder. Leave as last. */
5646 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5649 /* ABS* operations. */
5651 static const struct builtin_description bdesc_abs[] =
5653 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5654 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5655 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5656 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5657 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5658 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5659 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5662 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5665 static struct builtin_description bdesc_1arg[] =
5667 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5668 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5673 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5675 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5685 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5686 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5687 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5688 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5689 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5690 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5691 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5692 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5693 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5694 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5695 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5696 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5697 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5698 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5699 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5700 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5701 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5702 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5703 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5704 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5705 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5706 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5707 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5708 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5709 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5710 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5711 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5712 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5713 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5714 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5716 /* Place-holder. Leave as last unary SPE builtin. */
5717 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5721 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5724 tree arg0 = TREE_VALUE (arglist);
5725 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5726 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5727 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5729 if (icode == CODE_FOR_nothing)
5730 /* Builtin not supported on this processor. */
5733 /* If we got invalid arguments bail out before generating bad rtl. */
5734 if (arg0 == error_mark_node)
5737 if (icode == CODE_FOR_altivec_vspltisb
5738 || icode == CODE_FOR_altivec_vspltish
5739 || icode == CODE_FOR_altivec_vspltisw
5740 || icode == CODE_FOR_spe_evsplatfi
5741 || icode == CODE_FOR_spe_evsplati)
5743 /* Only allow 5-bit *signed* literals. */
5744 if (GET_CODE (op0) != CONST_INT
5745 || INTVAL (op0) > 0x1f
5746 || INTVAL (op0) < -0x1f)
5748 error ("argument 1 must be a 5-bit signed literal");
5754 || GET_MODE (target) != tmode
5755 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5756 target = gen_reg_rtx (tmode);
5758 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5759 op0 = copy_to_mode_reg (mode0, op0);
5761 pat = GEN_FCN (icode) (target, op0);
5770 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5772 rtx pat, scratch1, scratch2;
5773 tree arg0 = TREE_VALUE (arglist);
5774 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5775 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5776 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5778 /* If we have invalid arguments, bail out before generating bad rtl. */
5779 if (arg0 == error_mark_node)
5783 || GET_MODE (target) != tmode
5784 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5785 target = gen_reg_rtx (tmode);
5787 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5788 op0 = copy_to_mode_reg (mode0, op0);
5790 scratch1 = gen_reg_rtx (mode0);
5791 scratch2 = gen_reg_rtx (mode0);
5793 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5802 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5805 tree arg0 = TREE_VALUE (arglist);
5806 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5807 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5808 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5809 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5810 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5811 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5813 if (icode == CODE_FOR_nothing)
5814 /* Builtin not supported on this processor. */
5817 /* If we got invalid arguments bail out before generating bad rtl. */
5818 if (arg0 == error_mark_node || arg1 == error_mark_node)
5821 if (icode == CODE_FOR_altivec_vcfux
5822 || icode == CODE_FOR_altivec_vcfsx
5823 || icode == CODE_FOR_altivec_vctsxs
5824 || icode == CODE_FOR_altivec_vctuxs
5825 || icode == CODE_FOR_altivec_vspltb
5826 || icode == CODE_FOR_altivec_vsplth
5827 || icode == CODE_FOR_altivec_vspltw
5828 || icode == CODE_FOR_spe_evaddiw
5829 || icode == CODE_FOR_spe_evldd
5830 || icode == CODE_FOR_spe_evldh
5831 || icode == CODE_FOR_spe_evldw
5832 || icode == CODE_FOR_spe_evlhhesplat
5833 || icode == CODE_FOR_spe_evlhhossplat
5834 || icode == CODE_FOR_spe_evlhhousplat
5835 || icode == CODE_FOR_spe_evlwhe
5836 || icode == CODE_FOR_spe_evlwhos
5837 || icode == CODE_FOR_spe_evlwhou
5838 || icode == CODE_FOR_spe_evlwhsplat
5839 || icode == CODE_FOR_spe_evlwwsplat
5840 || icode == CODE_FOR_spe_evrlwi
5841 || icode == CODE_FOR_spe_evslwi
5842 || icode == CODE_FOR_spe_evsrwis
5843 || icode == CODE_FOR_spe_evsubifw
5844 || icode == CODE_FOR_spe_evsrwiu)
5846 /* Only allow 5-bit unsigned literals. */
5848 if (TREE_CODE (arg1) != INTEGER_CST
5849 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5851 error ("argument 2 must be a 5-bit unsigned literal");
5857 || GET_MODE (target) != tmode
5858 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5859 target = gen_reg_rtx (tmode);
5861 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5862 op0 = copy_to_mode_reg (mode0, op0);
5863 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5864 op1 = copy_to_mode_reg (mode1, op1);
5866 pat = GEN_FCN (icode) (target, op0, op1);
5875 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5876 tree arglist, rtx target)
5879 tree cr6_form = TREE_VALUE (arglist);
5880 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5881 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5882 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5883 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5884 enum machine_mode tmode = SImode;
5885 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5886 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5889 if (TREE_CODE (cr6_form) != INTEGER_CST)
5891 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5895 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5900 /* If we have invalid arguments, bail out before generating bad rtl. */
5901 if (arg0 == error_mark_node || arg1 == error_mark_node)
5905 || GET_MODE (target) != tmode
5906 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5907 target = gen_reg_rtx (tmode);
5909 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5910 op0 = copy_to_mode_reg (mode0, op0);
5911 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5912 op1 = copy_to_mode_reg (mode1, op1);
5914 scratch = gen_reg_rtx (mode0);
5916 pat = GEN_FCN (icode) (scratch, op0, op1,
5917 gen_rtx_SYMBOL_REF (Pmode, opcode));
5922 /* The vec_any* and vec_all* predicates use the same opcodes for two
5923 different operations, but the bits in CR6 will be different
5924 depending on what information we want. So we have to play tricks
5925 with CR6 to get the right bits out.
5927 If you think this is disgusting, look at the specs for the
5928 AltiVec predicates. */
5930 switch (cr6_form_int)
5933 emit_insn (gen_cr6_test_for_zero (target));
5936 emit_insn (gen_cr6_test_for_zero_reverse (target));
5939 emit_insn (gen_cr6_test_for_lt (target));
5942 emit_insn (gen_cr6_test_for_lt_reverse (target));
5945 error ("argument 1 of __builtin_altivec_predicate is out of range");
5953 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5956 tree arg0 = TREE_VALUE (arglist);
5957 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5958 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5959 enum machine_mode mode0 = Pmode;
5960 enum machine_mode mode1 = Pmode;
5961 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5962 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5964 if (icode == CODE_FOR_nothing)
5965 /* Builtin not supported on this processor. */
5968 /* If we got invalid arguments bail out before generating bad rtl. */
5969 if (arg0 == error_mark_node || arg1 == error_mark_node)
5973 || GET_MODE (target) != tmode
5974 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5975 target = gen_reg_rtx (tmode);
5977 op1 = copy_to_mode_reg (mode1, op1);
5979 if (op0 == const0_rtx)
5981 addr = gen_rtx_MEM (tmode, op1);
5985 op0 = copy_to_mode_reg (mode0, op0);
5986 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5989 pat = GEN_FCN (icode) (target, addr);
5999 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6001 tree arg0 = TREE_VALUE (arglist);
6002 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6003 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6004 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6005 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6006 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6008 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6009 enum machine_mode mode1 = Pmode;
6010 enum machine_mode mode2 = Pmode;
6012 /* Invalid arguments. Bail before doing anything stoopid! */
6013 if (arg0 == error_mark_node
6014 || arg1 == error_mark_node
6015 || arg2 == error_mark_node)
6018 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6019 op0 = copy_to_mode_reg (tmode, op0);
6021 op2 = copy_to_mode_reg (mode2, op2);
6023 if (op1 == const0_rtx)
6025 addr = gen_rtx_MEM (tmode, op2);
6029 op1 = copy_to_mode_reg (mode1, op1);
6030 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6033 pat = GEN_FCN (icode) (addr, op0);
6040 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6043 tree arg0 = TREE_VALUE (arglist);
6044 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6045 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6046 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6047 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6048 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6049 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6050 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6051 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6052 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6054 if (icode == CODE_FOR_nothing)
6055 /* Builtin not supported on this processor. */
6058 /* If we got invalid arguments bail out before generating bad rtl. */
6059 if (arg0 == error_mark_node
6060 || arg1 == error_mark_node
6061 || arg2 == error_mark_node)
6064 if (icode == CODE_FOR_altivec_vsldoi_4sf
6065 || icode == CODE_FOR_altivec_vsldoi_4si
6066 || icode == CODE_FOR_altivec_vsldoi_8hi
6067 || icode == CODE_FOR_altivec_vsldoi_16qi)
6069 /* Only allow 4-bit unsigned literals. */
6071 if (TREE_CODE (arg2) != INTEGER_CST
6072 || TREE_INT_CST_LOW (arg2) & ~0xf)
6074 error ("argument 3 must be a 4-bit unsigned literal");
6080 || GET_MODE (target) != tmode
6081 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6082 target = gen_reg_rtx (tmode);
6084 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6085 op0 = copy_to_mode_reg (mode0, op0);
6086 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6087 op1 = copy_to_mode_reg (mode1, op1);
6088 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6089 op2 = copy_to_mode_reg (mode2, op2);
6091 pat = GEN_FCN (icode) (target, op0, op1, op2);
6099 /* Expand the lvx builtins. */
6101 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6103 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6104 tree arglist = TREE_OPERAND (exp, 1);
6105 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6107 enum machine_mode tmode, mode0;
6109 enum insn_code icode;
6113 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6114 icode = CODE_FOR_altivec_lvx_16qi;
6116 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6117 icode = CODE_FOR_altivec_lvx_8hi;
6119 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6120 icode = CODE_FOR_altivec_lvx_4si;
6122 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6123 icode = CODE_FOR_altivec_lvx_4sf;
6132 arg0 = TREE_VALUE (arglist);
6133 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6134 tmode = insn_data[icode].operand[0].mode;
6135 mode0 = insn_data[icode].operand[1].mode;
6138 || GET_MODE (target) != tmode
6139 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6140 target = gen_reg_rtx (tmode);
6142 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6143 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6145 pat = GEN_FCN (icode) (target, op0);
6152 /* Expand the stvx builtins. */
6154 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6157 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6158 tree arglist = TREE_OPERAND (exp, 1);
6159 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6161 enum machine_mode mode0, mode1;
6163 enum insn_code icode;
6167 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6168 icode = CODE_FOR_altivec_stvx_16qi;
6170 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6171 icode = CODE_FOR_altivec_stvx_8hi;
6173 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6174 icode = CODE_FOR_altivec_stvx_4si;
6176 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6177 icode = CODE_FOR_altivec_stvx_4sf;
6184 arg0 = TREE_VALUE (arglist);
6185 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6186 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6187 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6188 mode0 = insn_data[icode].operand[0].mode;
6189 mode1 = insn_data[icode].operand[1].mode;
6191 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6192 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6193 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6194 op1 = copy_to_mode_reg (mode1, op1);
6196 pat = GEN_FCN (icode) (op0, op1);
6204 /* Expand the dst builtins. */
6206 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6209 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6210 tree arglist = TREE_OPERAND (exp, 1);
6211 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6212 tree arg0, arg1, arg2;
6213 enum machine_mode mode0, mode1, mode2;
6214 rtx pat, op0, op1, op2;
6215 struct builtin_description *d;
6220 /* Handle DST variants. */
6221 d = (struct builtin_description *) bdesc_dst;
6222 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6223 if (d->code == fcode)
6225 arg0 = TREE_VALUE (arglist);
6226 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6227 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6228 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6229 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6230 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6231 mode0 = insn_data[d->icode].operand[0].mode;
6232 mode1 = insn_data[d->icode].operand[1].mode;
6233 mode2 = insn_data[d->icode].operand[2].mode;
6235 /* Invalid arguments, bail out before generating bad rtl. */
6236 if (arg0 == error_mark_node
6237 || arg1 == error_mark_node
6238 || arg2 == error_mark_node)
6243 if (TREE_CODE (arg2) != INTEGER_CST
6244 || TREE_INT_CST_LOW (arg2) & ~0x3)
6246 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6250 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6251 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6252 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6253 op1 = copy_to_mode_reg (mode1, op1);
6255 pat = GEN_FCN (d->icode) (op0, op1, op2);
6265 /* Expand the builtin in EXP and store the result in TARGET. Store
6266 true in *EXPANDEDP if we found a builtin to expand. */
6268 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6270 struct builtin_description *d;
6271 struct builtin_description_predicates *dp;
6273 enum insn_code icode;
6274 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6275 tree arglist = TREE_OPERAND (exp, 1);
6278 enum machine_mode tmode, mode0;
6279 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6281 target = altivec_expand_ld_builtin (exp, target, expandedp);
6285 target = altivec_expand_st_builtin (exp, target, expandedp);
6289 target = altivec_expand_dst_builtin (exp, target, expandedp);
6297 case ALTIVEC_BUILTIN_STVX:
6298 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6299 case ALTIVEC_BUILTIN_STVEBX:
6300 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6301 case ALTIVEC_BUILTIN_STVEHX:
6302 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6303 case ALTIVEC_BUILTIN_STVEWX:
6304 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6305 case ALTIVEC_BUILTIN_STVXL:
6306 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6308 case ALTIVEC_BUILTIN_MFVSCR:
6309 icode = CODE_FOR_altivec_mfvscr;
6310 tmode = insn_data[icode].operand[0].mode;
6313 || GET_MODE (target) != tmode
6314 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6315 target = gen_reg_rtx (tmode);
6317 pat = GEN_FCN (icode) (target);
6323 case ALTIVEC_BUILTIN_MTVSCR:
6324 icode = CODE_FOR_altivec_mtvscr;
6325 arg0 = TREE_VALUE (arglist);
6326 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6327 mode0 = insn_data[icode].operand[0].mode;
6329 /* If we got invalid arguments bail out before generating bad rtl. */
6330 if (arg0 == error_mark_node)
6333 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6334 op0 = copy_to_mode_reg (mode0, op0);
6336 pat = GEN_FCN (icode) (op0);
6341 case ALTIVEC_BUILTIN_DSSALL:
6342 emit_insn (gen_altivec_dssall ());
6345 case ALTIVEC_BUILTIN_DSS:
6346 icode = CODE_FOR_altivec_dss;
6347 arg0 = TREE_VALUE (arglist);
6349 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6350 mode0 = insn_data[icode].operand[0].mode;
6352 /* If we got invalid arguments bail out before generating bad rtl. */
6353 if (arg0 == error_mark_node)
6356 if (TREE_CODE (arg0) != INTEGER_CST
6357 || TREE_INT_CST_LOW (arg0) & ~0x3)
6359 error ("argument to dss must be a 2-bit unsigned literal");
6363 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6364 op0 = copy_to_mode_reg (mode0, op0);
6366 emit_insn (gen_altivec_dss (op0));
6369 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6370 arg0 = TREE_VALUE (arglist);
6371 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6372 arg0 = TREE_OPERAND (arg0, 0);
6373 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6374 TREE_STRING_POINTER (arg0));
6379 /* Expand abs* operations. */
6380 d = (struct builtin_description *) bdesc_abs;
6381 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6382 if (d->code == fcode)
6383 return altivec_expand_abs_builtin (d->icode, arglist, target);
6385 /* Expand the AltiVec predicates. */
6386 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6387 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6388 if (dp->code == fcode)
6389 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6391 /* LV* are funky. We initialized them differently. */
6394 case ALTIVEC_BUILTIN_LVSL:
6395 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6397 case ALTIVEC_BUILTIN_LVSR:
6398 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6400 case ALTIVEC_BUILTIN_LVEBX:
6401 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6403 case ALTIVEC_BUILTIN_LVEHX:
6404 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6406 case ALTIVEC_BUILTIN_LVEWX:
6407 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6409 case ALTIVEC_BUILTIN_LVXL:
6410 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6412 case ALTIVEC_BUILTIN_LVX:
6413 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6424 /* Binops that need to be initialized manually, but can be expanded
6425 automagically by rs6000_expand_binop_builtin. */
6426 static struct builtin_description bdesc_2arg_spe[] =
6428 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6429 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6430 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6431 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6432 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6433 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6434 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6435 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6436 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6437 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6438 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6439 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6440 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6441 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6442 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6443 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6444 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6445 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6446 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6447 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6448 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6449 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6452 /* Expand the builtin in EXP and store the result in TARGET. Store
6453 true in *EXPANDEDP if we found a builtin to expand.
6455 This expands the SPE builtins that are not simple unary and binary
6458 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6460 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6461 tree arglist = TREE_OPERAND (exp, 1);
6463 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6464 enum insn_code icode;
6465 enum machine_mode tmode, mode0;
6467 struct builtin_description *d;
6472 /* Syntax check for a 5-bit unsigned immediate. */
6475 case SPE_BUILTIN_EVSTDD:
6476 case SPE_BUILTIN_EVSTDH:
6477 case SPE_BUILTIN_EVSTDW:
6478 case SPE_BUILTIN_EVSTWHE:
6479 case SPE_BUILTIN_EVSTWHO:
6480 case SPE_BUILTIN_EVSTWWE:
6481 case SPE_BUILTIN_EVSTWWO:
6482 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6483 if (TREE_CODE (arg1) != INTEGER_CST
6484 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6486 error ("argument 2 must be a 5-bit unsigned literal");
6494 /* The evsplat*i instructions are not quite generic. */
6497 case SPE_BUILTIN_EVSPLATFI:
6498 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6500 case SPE_BUILTIN_EVSPLATI:
6501 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6507 d = (struct builtin_description *) bdesc_2arg_spe;
6508 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6509 if (d->code == fcode)
6510 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6512 d = (struct builtin_description *) bdesc_spe_predicates;
6513 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6514 if (d->code == fcode)
6515 return spe_expand_predicate_builtin (d->icode, arglist, target);
6517 d = (struct builtin_description *) bdesc_spe_evsel;
6518 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6519 if (d->code == fcode)
6520 return spe_expand_evsel_builtin (d->icode, arglist, target);
6524 case SPE_BUILTIN_EVSTDDX:
6525 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6526 case SPE_BUILTIN_EVSTDHX:
6527 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6528 case SPE_BUILTIN_EVSTDWX:
6529 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6530 case SPE_BUILTIN_EVSTWHEX:
6531 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6532 case SPE_BUILTIN_EVSTWHOX:
6533 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6534 case SPE_BUILTIN_EVSTWWEX:
6535 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6536 case SPE_BUILTIN_EVSTWWOX:
6537 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6538 case SPE_BUILTIN_EVSTDD:
6539 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6540 case SPE_BUILTIN_EVSTDH:
6541 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6542 case SPE_BUILTIN_EVSTDW:
6543 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6544 case SPE_BUILTIN_EVSTWHE:
6545 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6546 case SPE_BUILTIN_EVSTWHO:
6547 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6548 case SPE_BUILTIN_EVSTWWE:
6549 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6550 case SPE_BUILTIN_EVSTWWO:
6551 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6552 case SPE_BUILTIN_MFSPEFSCR:
6553 icode = CODE_FOR_spe_mfspefscr;
6554 tmode = insn_data[icode].operand[0].mode;
6557 || GET_MODE (target) != tmode
6558 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6559 target = gen_reg_rtx (tmode);
6561 pat = GEN_FCN (icode) (target);
6566 case SPE_BUILTIN_MTSPEFSCR:
6567 icode = CODE_FOR_spe_mtspefscr;
6568 arg0 = TREE_VALUE (arglist);
6569 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6570 mode0 = insn_data[icode].operand[0].mode;
6572 if (arg0 == error_mark_node)
6575 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6576 op0 = copy_to_mode_reg (mode0, op0);
6578 pat = GEN_FCN (icode) (op0);
6591 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6593 rtx pat, scratch, tmp;
6594 tree form = TREE_VALUE (arglist);
6595 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6596 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6597 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6598 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6599 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6600 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6604 if (TREE_CODE (form) != INTEGER_CST)
6606 error ("argument 1 of __builtin_spe_predicate must be a constant");
6610 form_int = TREE_INT_CST_LOW (form);
6615 if (arg0 == error_mark_node || arg1 == error_mark_node)
6619 || GET_MODE (target) != SImode
6620 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6621 target = gen_reg_rtx (SImode);
6623 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6624 op0 = copy_to_mode_reg (mode0, op0);
6625 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6626 op1 = copy_to_mode_reg (mode1, op1);
6628 scratch = gen_reg_rtx (CCmode);
6630 pat = GEN_FCN (icode) (scratch, op0, op1);
6635 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6636 _lower_. We use one compare, but look in different bits of the
6637 CR for each variant.
6639 There are 2 elements in each SPE simd type (upper/lower). The CR
6640 bits are set as follows:
6642 BIT0 | BIT 1 | BIT 2 | BIT 3
6643 U | L | (U | L) | (U & L)
6645 So, for an "all" relationship, BIT 3 would be set.
6646 For an "any" relationship, BIT 2 would be set. Etc.
6648 Following traditional nomenclature, these bits map to:
6650 BIT0 | BIT 1 | BIT 2 | BIT 3
6653 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6658 /* All variant. OV bit. */
6660 /* We need to get to the OV bit, which is the ORDERED bit. We
6661 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6662 that's ugly and will trigger a validate_condition_mode abort.
6663 So let's just use another pattern. */
6664 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6666 /* Any variant. EQ bit. */
6670 /* Upper variant. LT bit. */
6674 /* Lower variant. GT bit. */
6679 error ("argument 1 of __builtin_spe_predicate is out of range");
6683 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6684 emit_move_insn (target, tmp);
6689 /* The evsel builtins look like this:
6691 e = __builtin_spe_evsel_OP (a, b, c, d);
6695 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6696 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6700 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6703 tree arg0 = TREE_VALUE (arglist);
6704 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6705 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6706 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6707 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6708 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6709 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6710 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6711 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6712 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6717 if (arg0 == error_mark_node || arg1 == error_mark_node
6718 || arg2 == error_mark_node || arg3 == error_mark_node)
6722 || GET_MODE (target) != mode0
6723 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6724 target = gen_reg_rtx (mode0);
6726 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6727 op0 = copy_to_mode_reg (mode0, op0);
6728 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6729 op1 = copy_to_mode_reg (mode0, op1);
6730 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6731 op2 = copy_to_mode_reg (mode0, op2);
6732 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6733 op3 = copy_to_mode_reg (mode0, op3);
6735 /* Generate the compare. */
6736 scratch = gen_reg_rtx (CCmode);
6737 pat = GEN_FCN (icode) (scratch, op0, op1);
6742 if (mode0 == V2SImode)
6743 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6745 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6750 /* Expand an expression EXP that calls a built-in function,
6751 with result going to TARGET if that's convenient
6752 (and in mode MODE if that's convenient).
6753 SUBTARGET may be used as the target for computing one of EXP's operands.
6754 IGNORE is nonzero if the value is to be ignored. */
6757 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6758 enum machine_mode mode ATTRIBUTE_UNUSED,
6759 int ignore ATTRIBUTE_UNUSED)
6761 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6762 tree arglist = TREE_OPERAND (exp, 1);
6763 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6764 struct builtin_description *d;
6771 ret = altivec_expand_builtin (exp, target, &success);
6778 ret = spe_expand_builtin (exp, target, &success);
6784 if (TARGET_ALTIVEC || TARGET_SPE)
6786 /* Handle simple unary operations. */
6787 d = (struct builtin_description *) bdesc_1arg;
6788 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6789 if (d->code == fcode)
6790 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6792 /* Handle simple binary operations. */
6793 d = (struct builtin_description *) bdesc_2arg;
6794 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6795 if (d->code == fcode)
6796 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6798 /* Handle simple ternary operations. */
6799 d = (struct builtin_description *) bdesc_3arg;
6800 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6801 if (d->code == fcode)
6802 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6810 build_opaque_vector_type (tree node, int nunits)
6812 node = copy_node (node);
6813 TYPE_MAIN_VARIANT (node) = node;
6814 return build_vector_type (node, nunits);
6818 rs6000_init_builtins (void)
6820 V2SI_type_node = build_vector_type (intSI_type_node, 2);
6821 V2SF_type_node = build_vector_type (float_type_node, 2);
6822 V4HI_type_node = build_vector_type (intHI_type_node, 4);
6823 V4SI_type_node = build_vector_type (intSI_type_node, 4);
6824 V4SF_type_node = build_vector_type (float_type_node, 4);
6825 V8HI_type_node = build_vector_type (intHI_type_node, 8);
6826 V16QI_type_node = build_vector_type (intQI_type_node, 16);
6828 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
6829 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
6830 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
6832 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
6833 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6834 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6836 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6837 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6838 'vector unsigned short'. */
6840 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6841 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6842 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6843 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6844 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6845 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6846 pixel_type_node = copy_node (unsigned_intHI_type_node);
6847 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6849 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6850 get_identifier ("__bool char"),
6851 bool_char_type_node));
6852 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6853 get_identifier ("__bool short"),
6854 bool_short_type_node));
6855 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6856 get_identifier ("__bool int"),
6857 bool_int_type_node));
6858 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6859 get_identifier ("__pixel"),
6862 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
6863 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
6864 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
6865 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
6867 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6868 get_identifier ("__vector unsigned char"),
6869 unsigned_V16QI_type_node));
6870 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6871 get_identifier ("__vector signed char"),
6873 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6874 get_identifier ("__vector __bool char"),
6875 bool_V16QI_type_node));
6877 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6878 get_identifier ("__vector unsigned short"),
6879 unsigned_V8HI_type_node));
6880 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6881 get_identifier ("__vector signed short"),
6883 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6884 get_identifier ("__vector __bool short"),
6885 bool_V8HI_type_node));
6887 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6888 get_identifier ("__vector unsigned int"),
6889 unsigned_V4SI_type_node));
6890 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6891 get_identifier ("__vector signed int"),
6893 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6894 get_identifier ("__vector __bool int"),
6895 bool_V4SI_type_node));
6897 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6898 get_identifier ("__vector float"),
6900 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6901 get_identifier ("__vector __pixel"),
6902 pixel_V8HI_type_node));
6905 spe_init_builtins ();
6907 altivec_init_builtins ();
6908 if (TARGET_ALTIVEC || TARGET_SPE)
6909 rs6000_common_init_builtins ();
6912 /* Search through a set of builtins and enable the mask bits.
6913 DESC is an array of builtins.
6914 SIZE is the total number of builtins.
6915 START is the builtin enum at which to start.
6916 END is the builtin enum at which to end. */
6918 enable_mask_for_builtins (struct builtin_description *desc, int size,
6919 enum rs6000_builtins start,
6920 enum rs6000_builtins end)
6924 for (i = 0; i < size; ++i)
6925 if (desc[i].code == start)
6931 for (; i < size; ++i)
6933 /* Flip all the bits on. */
6934 desc[i].mask = target_flags;
6935 if (desc[i].code == end)
6941 spe_init_builtins (void)
6943 tree endlink = void_list_node;
6944 tree puint_type_node = build_pointer_type (unsigned_type_node);
6945 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6946 struct builtin_description *d;
6949 tree v2si_ftype_4_v2si
6950 = build_function_type
6951 (opaque_V2SI_type_node,
6952 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6953 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6954 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6955 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6958 tree v2sf_ftype_4_v2sf
6959 = build_function_type
6960 (opaque_V2SF_type_node,
6961 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6962 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6963 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6964 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6967 tree int_ftype_int_v2si_v2si
6968 = build_function_type
6970 tree_cons (NULL_TREE, integer_type_node,
6971 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6972 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6975 tree int_ftype_int_v2sf_v2sf
6976 = build_function_type
6978 tree_cons (NULL_TREE, integer_type_node,
6979 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6980 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6983 tree void_ftype_v2si_puint_int
6984 = build_function_type (void_type_node,
6985 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6986 tree_cons (NULL_TREE, puint_type_node,
6987 tree_cons (NULL_TREE,
6991 tree void_ftype_v2si_puint_char
6992 = build_function_type (void_type_node,
6993 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6994 tree_cons (NULL_TREE, puint_type_node,
6995 tree_cons (NULL_TREE,
6999 tree void_ftype_v2si_pv2si_int
7000 = build_function_type (void_type_node,
7001 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7002 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7003 tree_cons (NULL_TREE,
7007 tree void_ftype_v2si_pv2si_char
7008 = build_function_type (void_type_node,
7009 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7010 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7011 tree_cons (NULL_TREE,
7016 = build_function_type (void_type_node,
7017 tree_cons (NULL_TREE, integer_type_node, endlink));
7020 = build_function_type (integer_type_node, endlink);
7022 tree v2si_ftype_pv2si_int
7023 = build_function_type (opaque_V2SI_type_node,
7024 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7025 tree_cons (NULL_TREE, integer_type_node,
7028 tree v2si_ftype_puint_int
7029 = build_function_type (opaque_V2SI_type_node,
7030 tree_cons (NULL_TREE, puint_type_node,
7031 tree_cons (NULL_TREE, integer_type_node,
7034 tree v2si_ftype_pushort_int
7035 = build_function_type (opaque_V2SI_type_node,
7036 tree_cons (NULL_TREE, pushort_type_node,
7037 tree_cons (NULL_TREE, integer_type_node,
7040 tree v2si_ftype_signed_char
7041 = build_function_type (opaque_V2SI_type_node,
7042 tree_cons (NULL_TREE, signed_char_type_node,
7045 /* The initialization of the simple binary and unary builtins is
7046 done in rs6000_common_init_builtins, but we have to enable the
7047 mask bits here manually because we have run out of `target_flags'
7048 bits. We really need to redesign this mask business. */
7050 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7051 ARRAY_SIZE (bdesc_2arg),
7054 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7055 ARRAY_SIZE (bdesc_1arg),
7057 SPE_BUILTIN_EVSUBFUSIAAW);
7058 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7059 ARRAY_SIZE (bdesc_spe_predicates),
7060 SPE_BUILTIN_EVCMPEQ,
7061 SPE_BUILTIN_EVFSTSTLT);
7062 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7063 ARRAY_SIZE (bdesc_spe_evsel),
7064 SPE_BUILTIN_EVSEL_CMPGTS,
7065 SPE_BUILTIN_EVSEL_FSTSTEQ);
7067 (*lang_hooks.decls.pushdecl)
7068 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7069 opaque_V2SI_type_node));
7071 /* Initialize irregular SPE builtins. */
7073 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7074 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7075 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7076 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7077 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7078 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7079 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7080 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7081 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7082 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7083 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7084 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7085 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7086 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7087 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7088 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7089 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7090 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7093 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7094 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7095 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7096 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7097 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7098 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7099 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7100 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7101 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7102 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7103 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7104 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7105 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7106 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7107 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7108 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7109 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7110 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7111 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7112 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7113 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7114 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7117 d = (struct builtin_description *) bdesc_spe_predicates;
7118 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7122 switch (insn_data[d->icode].operand[1].mode)
7125 type = int_ftype_int_v2si_v2si;
7128 type = int_ftype_int_v2sf_v2sf;
7134 def_builtin (d->mask, d->name, type, d->code);
7137 /* Evsel predicates. */
7138 d = (struct builtin_description *) bdesc_spe_evsel;
7139 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7143 switch (insn_data[d->icode].operand[1].mode)
7146 type = v2si_ftype_4_v2si;
7149 type = v2sf_ftype_4_v2sf;
7155 def_builtin (d->mask, d->name, type, d->code);
7160 altivec_init_builtins (void)
7162 struct builtin_description *d;
7163 struct builtin_description_predicates *dp;
7165 tree pfloat_type_node = build_pointer_type (float_type_node);
7166 tree pint_type_node = build_pointer_type (integer_type_node);
7167 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7168 tree pchar_type_node = build_pointer_type (char_type_node);
7170 tree pvoid_type_node = build_pointer_type (void_type_node);
7172 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7173 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7174 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7175 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7177 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7179 tree int_ftype_int_v4si_v4si
7180 = build_function_type_list (integer_type_node,
7181 integer_type_node, V4SI_type_node,
7182 V4SI_type_node, NULL_TREE);
7183 tree v4sf_ftype_pcfloat
7184 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7185 tree void_ftype_pfloat_v4sf
7186 = build_function_type_list (void_type_node,
7187 pfloat_type_node, V4SF_type_node, NULL_TREE);
7188 tree v4si_ftype_pcint
7189 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7190 tree void_ftype_pint_v4si
7191 = build_function_type_list (void_type_node,
7192 pint_type_node, V4SI_type_node, NULL_TREE);
7193 tree v8hi_ftype_pcshort
7194 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7195 tree void_ftype_pshort_v8hi
7196 = build_function_type_list (void_type_node,
7197 pshort_type_node, V8HI_type_node, NULL_TREE);
7198 tree v16qi_ftype_pcchar
7199 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7200 tree void_ftype_pchar_v16qi
7201 = build_function_type_list (void_type_node,
7202 pchar_type_node, V16QI_type_node, NULL_TREE);
7203 tree void_ftype_v4si
7204 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7205 tree v8hi_ftype_void
7206 = build_function_type (V8HI_type_node, void_list_node);
7207 tree void_ftype_void
7208 = build_function_type (void_type_node, void_list_node);
7210 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
7212 tree v16qi_ftype_long_pcvoid
7213 = build_function_type_list (V16QI_type_node,
7214 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7215 tree v8hi_ftype_long_pcvoid
7216 = build_function_type_list (V8HI_type_node,
7217 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7218 tree v4si_ftype_long_pcvoid
7219 = build_function_type_list (V4SI_type_node,
7220 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7222 tree void_ftype_v4si_long_pvoid
7223 = build_function_type_list (void_type_node,
7224 V4SI_type_node, long_integer_type_node,
7225 pvoid_type_node, NULL_TREE);
7226 tree void_ftype_v16qi_long_pvoid
7227 = build_function_type_list (void_type_node,
7228 V16QI_type_node, long_integer_type_node,
7229 pvoid_type_node, NULL_TREE);
7230 tree void_ftype_v8hi_long_pvoid
7231 = build_function_type_list (void_type_node,
7232 V8HI_type_node, long_integer_type_node,
7233 pvoid_type_node, NULL_TREE);
7234 tree int_ftype_int_v8hi_v8hi
7235 = build_function_type_list (integer_type_node,
7236 integer_type_node, V8HI_type_node,
7237 V8HI_type_node, NULL_TREE);
7238 tree int_ftype_int_v16qi_v16qi
7239 = build_function_type_list (integer_type_node,
7240 integer_type_node, V16QI_type_node,
7241 V16QI_type_node, NULL_TREE);
7242 tree int_ftype_int_v4sf_v4sf
7243 = build_function_type_list (integer_type_node,
7244 integer_type_node, V4SF_type_node,
7245 V4SF_type_node, NULL_TREE);
7246 tree v4si_ftype_v4si
7247 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7248 tree v8hi_ftype_v8hi
7249 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7250 tree v16qi_ftype_v16qi
7251 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7252 tree v4sf_ftype_v4sf
7253 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7254 tree void_ftype_pcvoid_int_int
7255 = build_function_type_list (void_type_node,
7256 pcvoid_type_node, integer_type_node,
7257 integer_type_node, NULL_TREE);
7258 tree int_ftype_pcchar
7259 = build_function_type_list (integer_type_node,
7260 pcchar_type_node, NULL_TREE);
7262 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7263 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7264 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7265 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7266 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7267 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7268 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7269 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7270 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7271 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7272 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7273 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7274 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7275 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7276 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7277 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7278 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7279 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7280 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7281 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7282 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7283 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7284 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7285 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7286 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7287 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7288 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7289 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7290 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7291 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7292 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7293 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7295 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7296 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7297 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7299 /* Add the DST variants. */
7300 d = (struct builtin_description *) bdesc_dst;
7301 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7302 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7304 /* Initialize the predicates. */
7305 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7306 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7308 enum machine_mode mode1;
7311 mode1 = insn_data[dp->icode].operand[1].mode;
7316 type = int_ftype_int_v4si_v4si;
7319 type = int_ftype_int_v8hi_v8hi;
7322 type = int_ftype_int_v16qi_v16qi;
7325 type = int_ftype_int_v4sf_v4sf;
7331 def_builtin (dp->mask, dp->name, type, dp->code);
7334 /* Initialize the abs* operators. */
7335 d = (struct builtin_description *) bdesc_abs;
7336 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7338 enum machine_mode mode0;
7341 mode0 = insn_data[d->icode].operand[0].mode;
7346 type = v4si_ftype_v4si;
7349 type = v8hi_ftype_v8hi;
7352 type = v16qi_ftype_v16qi;
7355 type = v4sf_ftype_v4sf;
7361 def_builtin (d->mask, d->name, type, d->code);
7366 rs6000_common_init_builtins (void)
7368 struct builtin_description *d;
7371 tree v4sf_ftype_v4sf_v4sf_v16qi
7372 = build_function_type_list (V4SF_type_node,
7373 V4SF_type_node, V4SF_type_node,
7374 V16QI_type_node, NULL_TREE);
7375 tree v4si_ftype_v4si_v4si_v16qi
7376 = build_function_type_list (V4SI_type_node,
7377 V4SI_type_node, V4SI_type_node,
7378 V16QI_type_node, NULL_TREE);
7379 tree v8hi_ftype_v8hi_v8hi_v16qi
7380 = build_function_type_list (V8HI_type_node,
7381 V8HI_type_node, V8HI_type_node,
7382 V16QI_type_node, NULL_TREE);
7383 tree v16qi_ftype_v16qi_v16qi_v16qi
7384 = build_function_type_list (V16QI_type_node,
7385 V16QI_type_node, V16QI_type_node,
7386 V16QI_type_node, NULL_TREE);
7388 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7390 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7391 tree v16qi_ftype_int
7392 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7393 tree v8hi_ftype_v16qi
7394 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7395 tree v4sf_ftype_v4sf
7396 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7398 tree v2si_ftype_v2si_v2si
7399 = build_function_type_list (opaque_V2SI_type_node,
7400 opaque_V2SI_type_node,
7401 opaque_V2SI_type_node, NULL_TREE);
7403 tree v2sf_ftype_v2sf_v2sf
7404 = build_function_type_list (opaque_V2SF_type_node,
7405 opaque_V2SF_type_node,
7406 opaque_V2SF_type_node, NULL_TREE);
7408 tree v2si_ftype_int_int
7409 = build_function_type_list (opaque_V2SI_type_node,
7410 integer_type_node, integer_type_node,
7413 tree v2si_ftype_v2si
7414 = build_function_type_list (opaque_V2SI_type_node,
7415 opaque_V2SI_type_node, NULL_TREE);
7417 tree v2sf_ftype_v2sf
7418 = build_function_type_list (opaque_V2SF_type_node,
7419 opaque_V2SF_type_node, NULL_TREE);
7421 tree v2sf_ftype_v2si
7422 = build_function_type_list (opaque_V2SF_type_node,
7423 opaque_V2SI_type_node, NULL_TREE);
7425 tree v2si_ftype_v2sf
7426 = build_function_type_list (opaque_V2SI_type_node,
7427 opaque_V2SF_type_node, NULL_TREE);
7429 tree v2si_ftype_v2si_char
7430 = build_function_type_list (opaque_V2SI_type_node,
7431 opaque_V2SI_type_node,
7432 char_type_node, NULL_TREE);
7434 tree v2si_ftype_int_char
7435 = build_function_type_list (opaque_V2SI_type_node,
7436 integer_type_node, char_type_node, NULL_TREE);
7438 tree v2si_ftype_char
7439 = build_function_type_list (opaque_V2SI_type_node,
7440 char_type_node, NULL_TREE);
7442 tree int_ftype_int_int
7443 = build_function_type_list (integer_type_node,
7444 integer_type_node, integer_type_node,
7447 tree v4si_ftype_v4si_v4si
7448 = build_function_type_list (V4SI_type_node,
7449 V4SI_type_node, V4SI_type_node, NULL_TREE);
7450 tree v4sf_ftype_v4si_int
7451 = build_function_type_list (V4SF_type_node,
7452 V4SI_type_node, integer_type_node, NULL_TREE);
7453 tree v4si_ftype_v4sf_int
7454 = build_function_type_list (V4SI_type_node,
7455 V4SF_type_node, integer_type_node, NULL_TREE);
7456 tree v4si_ftype_v4si_int
7457 = build_function_type_list (V4SI_type_node,
7458 V4SI_type_node, integer_type_node, NULL_TREE);
7459 tree v8hi_ftype_v8hi_int
7460 = build_function_type_list (V8HI_type_node,
7461 V8HI_type_node, integer_type_node, NULL_TREE);
7462 tree v16qi_ftype_v16qi_int
7463 = build_function_type_list (V16QI_type_node,
7464 V16QI_type_node, integer_type_node, NULL_TREE);
7465 tree v16qi_ftype_v16qi_v16qi_int
7466 = build_function_type_list (V16QI_type_node,
7467 V16QI_type_node, V16QI_type_node,
7468 integer_type_node, NULL_TREE);
7469 tree v8hi_ftype_v8hi_v8hi_int
7470 = build_function_type_list (V8HI_type_node,
7471 V8HI_type_node, V8HI_type_node,
7472 integer_type_node, NULL_TREE);
7473 tree v4si_ftype_v4si_v4si_int
7474 = build_function_type_list (V4SI_type_node,
7475 V4SI_type_node, V4SI_type_node,
7476 integer_type_node, NULL_TREE);
7477 tree v4sf_ftype_v4sf_v4sf_int
7478 = build_function_type_list (V4SF_type_node,
7479 V4SF_type_node, V4SF_type_node,
7480 integer_type_node, NULL_TREE);
7481 tree v4sf_ftype_v4sf_v4sf
7482 = build_function_type_list (V4SF_type_node,
7483 V4SF_type_node, V4SF_type_node, NULL_TREE);
7484 tree v4sf_ftype_v4sf_v4sf_v4si
7485 = build_function_type_list (V4SF_type_node,
7486 V4SF_type_node, V4SF_type_node,
7487 V4SI_type_node, NULL_TREE);
7488 tree v4sf_ftype_v4sf_v4sf_v4sf
7489 = build_function_type_list (V4SF_type_node,
7490 V4SF_type_node, V4SF_type_node,
7491 V4SF_type_node, NULL_TREE);
7492 tree v4si_ftype_v4si_v4si_v4si
7493 = build_function_type_list (V4SI_type_node,
7494 V4SI_type_node, V4SI_type_node,
7495 V4SI_type_node, NULL_TREE);
7496 tree v8hi_ftype_v8hi_v8hi
7497 = build_function_type_list (V8HI_type_node,
7498 V8HI_type_node, V8HI_type_node, NULL_TREE);
7499 tree v8hi_ftype_v8hi_v8hi_v8hi
7500 = build_function_type_list (V8HI_type_node,
7501 V8HI_type_node, V8HI_type_node,
7502 V8HI_type_node, NULL_TREE);
7503 tree v4si_ftype_v8hi_v8hi_v4si
7504 = build_function_type_list (V4SI_type_node,
7505 V8HI_type_node, V8HI_type_node,
7506 V4SI_type_node, NULL_TREE);
7507 tree v4si_ftype_v16qi_v16qi_v4si
7508 = build_function_type_list (V4SI_type_node,
7509 V16QI_type_node, V16QI_type_node,
7510 V4SI_type_node, NULL_TREE);
7511 tree v16qi_ftype_v16qi_v16qi
7512 = build_function_type_list (V16QI_type_node,
7513 V16QI_type_node, V16QI_type_node, NULL_TREE);
7514 tree v4si_ftype_v4sf_v4sf
7515 = build_function_type_list (V4SI_type_node,
7516 V4SF_type_node, V4SF_type_node, NULL_TREE);
7517 tree v8hi_ftype_v16qi_v16qi
7518 = build_function_type_list (V8HI_type_node,
7519 V16QI_type_node, V16QI_type_node, NULL_TREE);
7520 tree v4si_ftype_v8hi_v8hi
7521 = build_function_type_list (V4SI_type_node,
7522 V8HI_type_node, V8HI_type_node, NULL_TREE);
7523 tree v8hi_ftype_v4si_v4si
7524 = build_function_type_list (V8HI_type_node,
7525 V4SI_type_node, V4SI_type_node, NULL_TREE);
7526 tree v16qi_ftype_v8hi_v8hi
7527 = build_function_type_list (V16QI_type_node,
7528 V8HI_type_node, V8HI_type_node, NULL_TREE);
7529 tree v4si_ftype_v16qi_v4si
7530 = build_function_type_list (V4SI_type_node,
7531 V16QI_type_node, V4SI_type_node, NULL_TREE);
7532 tree v4si_ftype_v16qi_v16qi
7533 = build_function_type_list (V4SI_type_node,
7534 V16QI_type_node, V16QI_type_node, NULL_TREE);
7535 tree v4si_ftype_v8hi_v4si
7536 = build_function_type_list (V4SI_type_node,
7537 V8HI_type_node, V4SI_type_node, NULL_TREE);
7538 tree v4si_ftype_v8hi
7539 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7540 tree int_ftype_v4si_v4si
7541 = build_function_type_list (integer_type_node,
7542 V4SI_type_node, V4SI_type_node, NULL_TREE);
7543 tree int_ftype_v4sf_v4sf
7544 = build_function_type_list (integer_type_node,
7545 V4SF_type_node, V4SF_type_node, NULL_TREE);
7546 tree int_ftype_v16qi_v16qi
7547 = build_function_type_list (integer_type_node,
7548 V16QI_type_node, V16QI_type_node, NULL_TREE);
7549 tree int_ftype_v8hi_v8hi
7550 = build_function_type_list (integer_type_node,
7551 V8HI_type_node, V8HI_type_node, NULL_TREE);
7553 /* Add the simple ternary operators. */
7554 d = (struct builtin_description *) bdesc_3arg;
7555 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7558 enum machine_mode mode0, mode1, mode2, mode3;
7561 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7564 mode0 = insn_data[d->icode].operand[0].mode;
7565 mode1 = insn_data[d->icode].operand[1].mode;
7566 mode2 = insn_data[d->icode].operand[2].mode;
7567 mode3 = insn_data[d->icode].operand[3].mode;
7569 /* When all four are of the same mode. */
7570 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7575 type = v4si_ftype_v4si_v4si_v4si;
7578 type = v4sf_ftype_v4sf_v4sf_v4sf;
7581 type = v8hi_ftype_v8hi_v8hi_v8hi;
7584 type = v16qi_ftype_v16qi_v16qi_v16qi;
7590 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7595 type = v4si_ftype_v4si_v4si_v16qi;
7598 type = v4sf_ftype_v4sf_v4sf_v16qi;
7601 type = v8hi_ftype_v8hi_v8hi_v16qi;
7604 type = v16qi_ftype_v16qi_v16qi_v16qi;
7610 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7611 && mode3 == V4SImode)
7612 type = v4si_ftype_v16qi_v16qi_v4si;
7613 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7614 && mode3 == V4SImode)
7615 type = v4si_ftype_v8hi_v8hi_v4si;
7616 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7617 && mode3 == V4SImode)
7618 type = v4sf_ftype_v4sf_v4sf_v4si;
7620 /* vchar, vchar, vchar, 4 bit literal. */
7621 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7623 type = v16qi_ftype_v16qi_v16qi_int;
7625 /* vshort, vshort, vshort, 4 bit literal. */
7626 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7628 type = v8hi_ftype_v8hi_v8hi_int;
7630 /* vint, vint, vint, 4 bit literal. */
7631 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7633 type = v4si_ftype_v4si_v4si_int;
7635 /* vfloat, vfloat, vfloat, 4 bit literal. */
7636 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7638 type = v4sf_ftype_v4sf_v4sf_int;
7643 def_builtin (d->mask, d->name, type, d->code);
7646 /* Add the simple binary operators. */
7647 d = (struct builtin_description *) bdesc_2arg;
7648 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7650 enum machine_mode mode0, mode1, mode2;
7653 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7656 mode0 = insn_data[d->icode].operand[0].mode;
7657 mode1 = insn_data[d->icode].operand[1].mode;
7658 mode2 = insn_data[d->icode].operand[2].mode;
7660 /* When all three operands are of the same mode. */
7661 if (mode0 == mode1 && mode1 == mode2)
7666 type = v4sf_ftype_v4sf_v4sf;
7669 type = v4si_ftype_v4si_v4si;
7672 type = v16qi_ftype_v16qi_v16qi;
7675 type = v8hi_ftype_v8hi_v8hi;
7678 type = v2si_ftype_v2si_v2si;
7681 type = v2sf_ftype_v2sf_v2sf;
7684 type = int_ftype_int_int;
7691 /* A few other combos we really don't want to do manually. */
7693 /* vint, vfloat, vfloat. */
7694 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7695 type = v4si_ftype_v4sf_v4sf;
7697 /* vshort, vchar, vchar. */
7698 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7699 type = v8hi_ftype_v16qi_v16qi;
7701 /* vint, vshort, vshort. */
7702 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7703 type = v4si_ftype_v8hi_v8hi;
7705 /* vshort, vint, vint. */
7706 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7707 type = v8hi_ftype_v4si_v4si;
7709 /* vchar, vshort, vshort. */
7710 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7711 type = v16qi_ftype_v8hi_v8hi;
7713 /* vint, vchar, vint. */
7714 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7715 type = v4si_ftype_v16qi_v4si;
7717 /* vint, vchar, vchar. */
7718 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7719 type = v4si_ftype_v16qi_v16qi;
7721 /* vint, vshort, vint. */
7722 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7723 type = v4si_ftype_v8hi_v4si;
7725 /* vint, vint, 5 bit literal. */
7726 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7727 type = v4si_ftype_v4si_int;
7729 /* vshort, vshort, 5 bit literal. */
7730 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7731 type = v8hi_ftype_v8hi_int;
7733 /* vchar, vchar, 5 bit literal. */
7734 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7735 type = v16qi_ftype_v16qi_int;
7737 /* vfloat, vint, 5 bit literal. */
7738 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7739 type = v4sf_ftype_v4si_int;
7741 /* vint, vfloat, 5 bit literal. */
7742 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7743 type = v4si_ftype_v4sf_int;
7745 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7746 type = v2si_ftype_int_int;
7748 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7749 type = v2si_ftype_v2si_char;
7751 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7752 type = v2si_ftype_int_char;
7755 else if (mode0 == SImode)
7760 type = int_ftype_v4si_v4si;
7763 type = int_ftype_v4sf_v4sf;
7766 type = int_ftype_v16qi_v16qi;
7769 type = int_ftype_v8hi_v8hi;
7779 def_builtin (d->mask, d->name, type, d->code);
7782 /* Add the simple unary operators. */
7783 d = (struct builtin_description *) bdesc_1arg;
7784 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7786 enum machine_mode mode0, mode1;
7789 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7792 mode0 = insn_data[d->icode].operand[0].mode;
7793 mode1 = insn_data[d->icode].operand[1].mode;
7795 if (mode0 == V4SImode && mode1 == QImode)
7796 type = v4si_ftype_int;
7797 else if (mode0 == V8HImode && mode1 == QImode)
7798 type = v8hi_ftype_int;
7799 else if (mode0 == V16QImode && mode1 == QImode)
7800 type = v16qi_ftype_int;
7801 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7802 type = v4sf_ftype_v4sf;
7803 else if (mode0 == V8HImode && mode1 == V16QImode)
7804 type = v8hi_ftype_v16qi;
7805 else if (mode0 == V4SImode && mode1 == V8HImode)
7806 type = v4si_ftype_v8hi;
7807 else if (mode0 == V2SImode && mode1 == V2SImode)
7808 type = v2si_ftype_v2si;
7809 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7810 type = v2sf_ftype_v2sf;
7811 else if (mode0 == V2SFmode && mode1 == V2SImode)
7812 type = v2sf_ftype_v2si;
7813 else if (mode0 == V2SImode && mode1 == V2SFmode)
7814 type = v2si_ftype_v2sf;
7815 else if (mode0 == V2SImode && mode1 == QImode)
7816 type = v2si_ftype_char;
7820 def_builtin (d->mask, d->name, type, d->code);
7825 rs6000_init_libfuncs (void)
7827 if (!TARGET_HARD_FLOAT)
7830 if (DEFAULT_ABI != ABI_V4)
7832 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7834 /* AIX library routines for float->int conversion. */
7835 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7836 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7837 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7838 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7841 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7842 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7843 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7844 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7845 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7849 /* 32-bit SVR4 quad floating point routines. */
7851 set_optab_libfunc (add_optab, TFmode, "_q_add");
7852 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7853 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7854 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7855 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7856 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7857 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7859 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7860 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7861 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7862 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7863 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7864 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7866 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7867 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7868 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7869 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7870 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7871 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7872 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7876 /* Expand a block move operation, and return 1 if successful. Return 0
7877 if we should let the compiler generate normal code.
7879 operands[0] is the destination
7880 operands[1] is the source
7881 operands[2] is the length
7882 operands[3] is the alignment */
7884 #define MAX_MOVE_REG 4
7887 expand_block_move (rtx operands[])
7889 rtx orig_dest = operands[0];
7890 rtx orig_src = operands[1];
7891 rtx bytes_rtx = operands[2];
7892 rtx align_rtx = operands[3];
7893 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7898 rtx stores[MAX_MOVE_REG];
7901 /* If this is not a fixed size move, just call memcpy */
7905 /* If this is not a fixed size alignment, abort */
7906 if (GET_CODE (align_rtx) != CONST_INT)
7908 align = INTVAL (align_rtx);
7910 /* Anything to move? */
7911 bytes = INTVAL (bytes_rtx);
7915 /* store_one_arg depends on expand_block_move to handle at least the size of
7916 reg_parm_stack_space. */
7917 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7920 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7923 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7924 rtx (*mov) (rtx, rtx);
7926 enum machine_mode mode = BLKmode;
7930 && bytes > 24 /* move up to 32 bytes at a time */
7938 && ! fixed_regs[12])
7940 move_bytes = (bytes > 32) ? 32 : bytes;
7941 gen_func.movstrsi = gen_movstrsi_8reg;
7943 else if (TARGET_STRING
7944 && bytes > 16 /* move up to 24 bytes at a time */
7950 && ! fixed_regs[10])
7952 move_bytes = (bytes > 24) ? 24 : bytes;
7953 gen_func.movstrsi = gen_movstrsi_6reg;
7955 else if (TARGET_STRING
7956 && bytes > 8 /* move up to 16 bytes at a time */
7962 move_bytes = (bytes > 16) ? 16 : bytes;
7963 gen_func.movstrsi = gen_movstrsi_4reg;
7965 else if (bytes >= 8 && TARGET_POWERPC64
7966 /* 64-bit loads and stores require word-aligned
7968 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7972 gen_func.mov = gen_movdi;
7974 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7975 { /* move up to 8 bytes at a time */
7976 move_bytes = (bytes > 8) ? 8 : bytes;
7977 gen_func.movstrsi = gen_movstrsi_2reg;
7979 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7980 { /* move 4 bytes */
7983 gen_func.mov = gen_movsi;
7985 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7986 { /* move 2 bytes */
7989 gen_func.mov = gen_movhi;
7991 else if (TARGET_STRING && bytes > 1)
7992 { /* move up to 4 bytes at a time */
7993 move_bytes = (bytes > 4) ? 4 : bytes;
7994 gen_func.movstrsi = gen_movstrsi_1reg;
7996 else /* move 1 byte at a time */
8000 gen_func.mov = gen_movqi;
8003 src = adjust_address (orig_src, mode, offset);
8004 dest = adjust_address (orig_dest, mode, offset);
8006 if (mode != BLKmode)
8008 rtx tmp_reg = gen_reg_rtx (mode);
8010 emit_insn ((*gen_func.mov) (tmp_reg, src));
8011 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8014 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8017 for (i = 0; i < num_reg; i++)
8018 emit_insn (stores[i]);
8022 if (mode == BLKmode)
8024 /* Move the address into scratch registers. The movstrsi
8025 patterns require zero offset. */
8026 if (!REG_P (XEXP (src, 0)))
8028 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8029 src = replace_equiv_address (src, src_reg);
8031 set_mem_size (src, GEN_INT (move_bytes));
8033 if (!REG_P (XEXP (dest, 0)))
8035 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8036 dest = replace_equiv_address (dest, dest_reg);
8038 set_mem_size (dest, GEN_INT (move_bytes));
8040 emit_insn ((*gen_func.movstrsi) (dest, src,
8041 GEN_INT (move_bytes & 31),
8050 /* Return 1 if OP is a load multiple operation. It is known to be a
8051 PARALLEL and the first section will be tested. */
8054 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8056 int count = XVECLEN (op, 0);
8057 unsigned int dest_regno;
8061 /* Perform a quick check so we don't blow up below. */
8063 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8064 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8065 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8068 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8069 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8071 for (i = 1; i < count; i++)
8073 rtx elt = XVECEXP (op, 0, i);
8075 if (GET_CODE (elt) != SET
8076 || GET_CODE (SET_DEST (elt)) != REG
8077 || GET_MODE (SET_DEST (elt)) != SImode
8078 || REGNO (SET_DEST (elt)) != dest_regno + i
8079 || GET_CODE (SET_SRC (elt)) != MEM
8080 || GET_MODE (SET_SRC (elt)) != SImode
8081 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8082 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8083 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8084 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8091 /* Similar, but tests for store multiple. Here, the second vector element
8092 is a CLOBBER. It will be tested later. */
8095 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8097 int count = XVECLEN (op, 0) - 1;
8098 unsigned int src_regno;
8102 /* Perform a quick check so we don't blow up below. */
8104 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8105 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8106 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8109 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8110 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8112 for (i = 1; i < count; i++)
8114 rtx elt = XVECEXP (op, 0, i + 1);
8116 if (GET_CODE (elt) != SET
8117 || GET_CODE (SET_SRC (elt)) != REG
8118 || GET_MODE (SET_SRC (elt)) != SImode
8119 || REGNO (SET_SRC (elt)) != src_regno + i
8120 || GET_CODE (SET_DEST (elt)) != MEM
8121 || GET_MODE (SET_DEST (elt)) != SImode
8122 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8123 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8124 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8125 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8132 /* Return a string to perform a load_multiple operation.
8133 operands[0] is the vector.
8134 operands[1] is the source address.
8135 operands[2] is the first destination register. */
8138 rs6000_output_load_multiple (rtx operands[3])
8140 /* We have to handle the case where the pseudo used to contain the address
8141 is assigned to one of the output registers. */
8143 int words = XVECLEN (operands[0], 0);
8146 if (XVECLEN (operands[0], 0) == 1)
8147 return "{l|lwz} %2,0(%1)";
8149 for (i = 0; i < words; i++)
8150 if (refers_to_regno_p (REGNO (operands[2]) + i,
8151 REGNO (operands[2]) + i + 1, operands[1], 0))
8155 xop[0] = GEN_INT (4 * (words-1));
8156 xop[1] = operands[1];
8157 xop[2] = operands[2];
8158 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8163 xop[0] = GEN_INT (4 * (words-1));
8164 xop[1] = operands[1];
8165 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8166 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8171 for (j = 0; j < words; j++)
8174 xop[0] = GEN_INT (j * 4);
8175 xop[1] = operands[1];
8176 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8177 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8179 xop[0] = GEN_INT (i * 4);
8180 xop[1] = operands[1];
8181 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8186 return "{lsi|lswi} %2,%1,%N0";
8189 /* Return 1 for a parallel vrsave operation. */
8192 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8194 int count = XVECLEN (op, 0);
8195 unsigned int dest_regno, src_regno;
8199 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8200 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8201 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8204 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8205 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8207 if (dest_regno != VRSAVE_REGNO
8208 && src_regno != VRSAVE_REGNO)
8211 for (i = 1; i < count; i++)
8213 rtx elt = XVECEXP (op, 0, i);
8215 if (GET_CODE (elt) != CLOBBER
8216 && GET_CODE (elt) != SET)
8223 /* Return 1 for an PARALLEL suitable for mfcr. */
8226 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8228 int count = XVECLEN (op, 0);
8231 /* Perform a quick check so we don't blow up below. */
8233 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8234 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8235 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8238 for (i = 0; i < count; i++)
8240 rtx exp = XVECEXP (op, 0, i);
8245 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8247 if (GET_CODE (src_reg) != REG
8248 || GET_MODE (src_reg) != CCmode
8249 || ! CR_REGNO_P (REGNO (src_reg)))
8252 if (GET_CODE (exp) != SET
8253 || GET_CODE (SET_DEST (exp)) != REG
8254 || GET_MODE (SET_DEST (exp)) != SImode
8255 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8257 unspec = SET_SRC (exp);
8258 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8260 if (GET_CODE (unspec) != UNSPEC
8261 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8262 || XVECLEN (unspec, 0) != 2
8263 || XVECEXP (unspec, 0, 0) != src_reg
8264 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8265 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8271 /* Return 1 for an PARALLEL suitable for mtcrf. */
8274 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8276 int count = XVECLEN (op, 0);
8280 /* Perform a quick check so we don't blow up below. */
8282 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8283 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8284 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8286 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8288 if (GET_CODE (src_reg) != REG
8289 || GET_MODE (src_reg) != SImode
8290 || ! INT_REGNO_P (REGNO (src_reg)))
8293 for (i = 0; i < count; i++)
8295 rtx exp = XVECEXP (op, 0, i);
8299 if (GET_CODE (exp) != SET
8300 || GET_CODE (SET_DEST (exp)) != REG
8301 || GET_MODE (SET_DEST (exp)) != CCmode
8302 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8304 unspec = SET_SRC (exp);
8305 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8307 if (GET_CODE (unspec) != UNSPEC
8308 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8309 || XVECLEN (unspec, 0) != 2
8310 || XVECEXP (unspec, 0, 0) != src_reg
8311 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8312 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8318 /* Return 1 for an PARALLEL suitable for lmw. */
8321 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8323 int count = XVECLEN (op, 0);
8324 unsigned int dest_regno;
8326 unsigned int base_regno;
8327 HOST_WIDE_INT offset;
8330 /* Perform a quick check so we don't blow up below. */
8332 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8333 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8334 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8337 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8338 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8341 || count != 32 - (int) dest_regno)
8344 if (legitimate_indirect_address_p (src_addr, 0))
8347 base_regno = REGNO (src_addr);
8348 if (base_regno == 0)
8351 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8353 offset = INTVAL (XEXP (src_addr, 1));
8354 base_regno = REGNO (XEXP (src_addr, 0));
8359 for (i = 0; i < count; i++)
8361 rtx elt = XVECEXP (op, 0, i);
8364 HOST_WIDE_INT newoffset;
8366 if (GET_CODE (elt) != SET
8367 || GET_CODE (SET_DEST (elt)) != REG
8368 || GET_MODE (SET_DEST (elt)) != SImode
8369 || REGNO (SET_DEST (elt)) != dest_regno + i
8370 || GET_CODE (SET_SRC (elt)) != MEM
8371 || GET_MODE (SET_SRC (elt)) != SImode)
8373 newaddr = XEXP (SET_SRC (elt), 0);
8374 if (legitimate_indirect_address_p (newaddr, 0))
8379 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8381 addr_reg = XEXP (newaddr, 0);
8382 newoffset = INTVAL (XEXP (newaddr, 1));
8386 if (REGNO (addr_reg) != base_regno
8387 || newoffset != offset + 4 * i)
8394 /* Return 1 for an PARALLEL suitable for stmw. */
8397 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8399 int count = XVECLEN (op, 0);
8400 unsigned int src_regno;
8402 unsigned int base_regno;
8403 HOST_WIDE_INT offset;
8406 /* Perform a quick check so we don't blow up below. */
8408 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8409 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8410 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8413 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8414 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8417 || count != 32 - (int) src_regno)
8420 if (legitimate_indirect_address_p (dest_addr, 0))
8423 base_regno = REGNO (dest_addr);
8424 if (base_regno == 0)
8427 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8429 offset = INTVAL (XEXP (dest_addr, 1));
8430 base_regno = REGNO (XEXP (dest_addr, 0));
8435 for (i = 0; i < count; i++)
8437 rtx elt = XVECEXP (op, 0, i);
8440 HOST_WIDE_INT newoffset;
8442 if (GET_CODE (elt) != SET
8443 || GET_CODE (SET_SRC (elt)) != REG
8444 || GET_MODE (SET_SRC (elt)) != SImode
8445 || REGNO (SET_SRC (elt)) != src_regno + i
8446 || GET_CODE (SET_DEST (elt)) != MEM
8447 || GET_MODE (SET_DEST (elt)) != SImode)
8449 newaddr = XEXP (SET_DEST (elt), 0);
8450 if (legitimate_indirect_address_p (newaddr, 0))
8455 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8457 addr_reg = XEXP (newaddr, 0);
8458 newoffset = INTVAL (XEXP (newaddr, 1));
8462 if (REGNO (addr_reg) != base_regno
8463 || newoffset != offset + 4 * i)
8470 /* A validation routine: say whether CODE, a condition code, and MODE
8471 match. The other alternatives either don't make sense or should
8472 never be generated. */
8475 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8477 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8478 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8479 || GET_MODE_CLASS (mode) != MODE_CC)
8482 /* These don't make sense. */
8483 if ((code == GT || code == LT || code == GE || code == LE)
8484 && mode == CCUNSmode)
8487 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8488 && mode != CCUNSmode)
8491 if (mode != CCFPmode
8492 && (code == ORDERED || code == UNORDERED
8493 || code == UNEQ || code == LTGT
8494 || code == UNGT || code == UNLT
8495 || code == UNGE || code == UNLE))
8498 /* These should never be generated except for
8499 flag_finite_math_only. */
8500 if (mode == CCFPmode
8501 && ! flag_finite_math_only
8502 && (code == LE || code == GE
8503 || code == UNEQ || code == LTGT
8504 || code == UNGT || code == UNLT))
8507 /* These are invalid; the information is not there. */
8508 if (mode == CCEQmode
8509 && code != EQ && code != NE)
8513 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8514 We only check the opcode against the mode of the CC value here. */
8517 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8519 enum rtx_code code = GET_CODE (op);
8520 enum machine_mode cc_mode;
8522 if (!COMPARISON_P (op))
8525 cc_mode = GET_MODE (XEXP (op, 0));
8526 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8529 validate_condition_mode (code, cc_mode);
8534 /* Return 1 if OP is a comparison operation that is valid for a branch
8535 insn and which is true if the corresponding bit in the CC register
8539 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8543 if (! branch_comparison_operator (op, mode))
8546 code = GET_CODE (op);
8547 return (code == EQ || code == LT || code == GT
8548 || code == LTU || code == GTU
8549 || code == UNORDERED);
8552 /* Return 1 if OP is a comparison operation that is valid for an scc
8553 insn: it must be a positive comparison. */
8556 scc_comparison_operator (rtx op, enum machine_mode mode)
8558 return branch_positive_comparison_operator (op, mode);
8562 trap_comparison_operator (rtx op, enum machine_mode mode)
8564 if (mode != VOIDmode && mode != GET_MODE (op))
8566 return COMPARISON_P (op);
8570 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8572 enum rtx_code code = GET_CODE (op);
8573 return (code == AND || code == IOR || code == XOR);
8577 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8579 enum rtx_code code = GET_CODE (op);
8580 return (code == IOR || code == XOR);
8584 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8586 enum rtx_code code = GET_CODE (op);
8587 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8590 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8591 mask required to convert the result of a rotate insn into a shift
8592 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8595 includes_lshift_p (rtx shiftop, rtx andop)
8597 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8599 shift_mask <<= INTVAL (shiftop);
8601 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8604 /* Similar, but for right shift. */
8607 includes_rshift_p (rtx shiftop, rtx andop)
8609 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8611 shift_mask >>= INTVAL (shiftop);
8613 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8616 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8617 to perform a left shift. It must have exactly SHIFTOP least
8618 significant 0's, then one or more 1's, then zero or more 0's. */
8621 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8623 if (GET_CODE (andop) == CONST_INT)
8625 HOST_WIDE_INT c, lsb, shift_mask;
8628 if (c == 0 || c == ~0)
8632 shift_mask <<= INTVAL (shiftop);
8634 /* Find the least significant one bit. */
8637 /* It must coincide with the LSB of the shift mask. */
8638 if (-lsb != shift_mask)
8641 /* Invert to look for the next transition (if any). */
8644 /* Remove the low group of ones (originally low group of zeros). */
8647 /* Again find the lsb, and check we have all 1's above. */
8651 else if (GET_CODE (andop) == CONST_DOUBLE
8652 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8654 HOST_WIDE_INT low, high, lsb;
8655 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8657 low = CONST_DOUBLE_LOW (andop);
8658 if (HOST_BITS_PER_WIDE_INT < 64)
8659 high = CONST_DOUBLE_HIGH (andop);
8661 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8662 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8665 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8667 shift_mask_high = ~0;
8668 if (INTVAL (shiftop) > 32)
8669 shift_mask_high <<= INTVAL (shiftop) - 32;
8673 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8680 return high == -lsb;
8683 shift_mask_low = ~0;
8684 shift_mask_low <<= INTVAL (shiftop);
8688 if (-lsb != shift_mask_low)
8691 if (HOST_BITS_PER_WIDE_INT < 64)
8696 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8699 return high == -lsb;
8703 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8709 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8710 to perform a left shift. It must have SHIFTOP or more least
8711 significant 0's, with the remainder of the word 1's. */
8714 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8716 if (GET_CODE (andop) == CONST_INT)
8718 HOST_WIDE_INT c, lsb, shift_mask;
8721 shift_mask <<= INTVAL (shiftop);
8724 /* Find the least significant one bit. */
8727 /* It must be covered by the shift mask.
8728 This test also rejects c == 0. */
8729 if ((lsb & shift_mask) == 0)
8732 /* Check we have all 1's above the transition, and reject all 1's. */
8733 return c == -lsb && lsb != 1;
8735 else if (GET_CODE (andop) == CONST_DOUBLE
8736 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8738 HOST_WIDE_INT low, lsb, shift_mask_low;
8740 low = CONST_DOUBLE_LOW (andop);
8742 if (HOST_BITS_PER_WIDE_INT < 64)
8744 HOST_WIDE_INT high, shift_mask_high;
8746 high = CONST_DOUBLE_HIGH (andop);
8750 shift_mask_high = ~0;
8751 if (INTVAL (shiftop) > 32)
8752 shift_mask_high <<= INTVAL (shiftop) - 32;
8756 if ((lsb & shift_mask_high) == 0)
8759 return high == -lsb;
8765 shift_mask_low = ~0;
8766 shift_mask_low <<= INTVAL (shiftop);
8770 if ((lsb & shift_mask_low) == 0)
8773 return low == -lsb && lsb != 1;
8779 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8780 for lfq and stfq insns iff the registers are hard registers. */
8783 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8785 /* We might have been passed a SUBREG. */
8786 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8789 /* We might have been passed non floating point registers. */
8790 if (!FP_REGNO_P (REGNO (reg1))
8791 || !FP_REGNO_P (REGNO (reg2)))
8794 return (REGNO (reg1) == REGNO (reg2) - 1);
8797 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8798 addr1 and addr2 must be in consecutive memory locations
8799 (addr2 == addr1 + 8). */
8802 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
8808 /* The mems cannot be volatile. */
8809 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
8812 addr1 = XEXP (mem1, 0);
8813 addr2 = XEXP (mem2, 0);
8815 /* Extract an offset (if used) from the first addr. */
8816 if (GET_CODE (addr1) == PLUS)
8818 /* If not a REG, return zero. */
8819 if (GET_CODE (XEXP (addr1, 0)) != REG)
8823 reg1 = REGNO (XEXP (addr1, 0));
8824 /* The offset must be constant! */
8825 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8827 offset1 = INTVAL (XEXP (addr1, 1));
8830 else if (GET_CODE (addr1) != REG)
8834 reg1 = REGNO (addr1);
8835 /* This was a simple (mem (reg)) expression. Offset is 0. */
8839 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8840 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8841 register as addr1. */
8842 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8844 if (GET_CODE (addr2) != PLUS)
8847 if (GET_CODE (XEXP (addr2, 0)) != REG
8848 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8851 if (reg1 != REGNO (XEXP (addr2, 0)))
8854 /* The offset for the second addr must be 8 more than the first addr. */
8855 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8858 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8863 /* Return the register class of a scratch register needed to copy IN into
8864 or out of a register in CLASS in MODE. If it can be done directly,
8865 NO_REGS is returned. */
8868 secondary_reload_class (enum reg_class class,
8869 enum machine_mode mode ATTRIBUTE_UNUSED,
8874 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8876 && MACHOPIC_INDIRECT
8880 /* We cannot copy a symbolic operand directly into anything
8881 other than BASE_REGS for TARGET_ELF. So indicate that a
8882 register from BASE_REGS is needed as an intermediate
8885 On Darwin, pic addresses require a load from memory, which
8886 needs a base register. */
8887 if (class != BASE_REGS
8888 && (GET_CODE (in) == SYMBOL_REF
8889 || GET_CODE (in) == HIGH
8890 || GET_CODE (in) == LABEL_REF
8891 || GET_CODE (in) == CONST))
8895 if (GET_CODE (in) == REG)
8898 if (regno >= FIRST_PSEUDO_REGISTER)
8900 regno = true_regnum (in);
8901 if (regno >= FIRST_PSEUDO_REGISTER)
8905 else if (GET_CODE (in) == SUBREG)
8907 regno = true_regnum (in);
8908 if (regno >= FIRST_PSEUDO_REGISTER)
8914 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8916 if (class == GENERAL_REGS || class == BASE_REGS
8917 || (regno >= 0 && INT_REGNO_P (regno)))
8920 /* Constants, memory, and FP registers can go into FP registers. */
8921 if ((regno == -1 || FP_REGNO_P (regno))
8922 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8925 /* Memory, and AltiVec registers can go into AltiVec registers. */
8926 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8927 && class == ALTIVEC_REGS)
8930 /* We can copy among the CR registers. */
8931 if ((class == CR_REGS || class == CR0_REGS)
8932 && regno >= 0 && CR_REGNO_P (regno))
8935 /* Otherwise, we need GENERAL_REGS. */
8936 return GENERAL_REGS;
8939 /* Given a comparison operation, return the bit number in CCR to test. We
8940 know this is a valid comparison.
8942 SCC_P is 1 if this is for an scc. That means that %D will have been
8943 used instead of %C, so the bits will be in different places.
8945 Return -1 if OP isn't a valid comparison for some reason. */
8948 ccr_bit (rtx op, int scc_p)
8950 enum rtx_code code = GET_CODE (op);
8951 enum machine_mode cc_mode;
8956 if (!COMPARISON_P (op))
8961 if (GET_CODE (reg) != REG
8962 || ! CR_REGNO_P (REGNO (reg)))
8965 cc_mode = GET_MODE (reg);
8966 cc_regnum = REGNO (reg);
8967 base_bit = 4 * (cc_regnum - CR0_REGNO);
8969 validate_condition_mode (code, cc_mode);
8971 /* When generating a sCOND operation, only positive conditions are
8973 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8974 && code != GTU && code != LTU)
8980 return scc_p ? base_bit + 3 : base_bit + 2;
8982 return base_bit + 2;
8983 case GT: case GTU: case UNLE:
8984 return base_bit + 1;
8985 case LT: case LTU: case UNGE:
8987 case ORDERED: case UNORDERED:
8988 return base_bit + 3;
8991 /* If scc, we will have done a cror to put the bit in the
8992 unordered position. So test that bit. For integer, this is ! LT
8993 unless this is an scc insn. */
8994 return scc_p ? base_bit + 3 : base_bit;
8997 return scc_p ? base_bit + 3 : base_bit + 1;
9004 /* Return the GOT register. */
9007 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9009 /* The second flow pass currently (June 1999) can't update
9010 regs_ever_live without disturbing other parts of the compiler, so
9011 update it here to make the prolog/epilogue code happy. */
9012 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9013 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9015 current_function_uses_pic_offset_table = 1;
9017 return pic_offset_table_rtx;
9020 /* Function to init struct machine_function.
9021 This will be called, via a pointer variable,
9022 from push_function_context. */
9024 static struct machine_function *
9025 rs6000_init_machine_status (void)
9027 return ggc_alloc_cleared (sizeof (machine_function));
9030 /* These macros test for integers and extract the low-order bits. */
9032 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9033 && GET_MODE (X) == VOIDmode)
9035 #define INT_LOWPART(X) \
9036 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9042 unsigned long val = INT_LOWPART (op);
9044 /* If the high bit is zero, the value is the first 1 bit we find
9046 if ((val & 0x80000000) == 0)
9048 if ((val & 0xffffffff) == 0)
9052 while (((val <<= 1) & 0x80000000) == 0)
9057 /* If the high bit is set and the low bit is not, or the mask is all
9058 1's, the value is zero. */
9059 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9062 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9065 while (((val >>= 1) & 1) != 0)
9075 unsigned long val = INT_LOWPART (op);
9077 /* If the low bit is zero, the value is the first 1 bit we find from
9081 if ((val & 0xffffffff) == 0)
9085 while (((val >>= 1) & 1) == 0)
9091 /* If the low bit is set and the high bit is not, or the mask is all
9092 1's, the value is 31. */
9093 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9096 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9099 while (((val <<= 1) & 0x80000000) != 0)
9105 /* Locate some local-dynamic symbol still in use by this function
9106 so that we can print its name in some tls_ld pattern. */
9109 rs6000_get_some_local_dynamic_name (void)
9113 if (cfun->machine->some_ld_name)
9114 return cfun->machine->some_ld_name;
9116 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9118 && for_each_rtx (&PATTERN (insn),
9119 rs6000_get_some_local_dynamic_name_1, 0))
9120 return cfun->machine->some_ld_name;
9125 /* Helper function for rs6000_get_some_local_dynamic_name. */
9128 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9132 if (GET_CODE (x) == SYMBOL_REF)
9134 const char *str = XSTR (x, 0);
9135 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9137 cfun->machine->some_ld_name = str;
9145 /* Print an operand. Recognize special options, documented below. */
9148 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9149 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9151 #define SMALL_DATA_RELOC "sda21"
9152 #define SMALL_DATA_REG 0
9156 print_operand (FILE *file, rtx x, int code)
9160 unsigned HOST_WIDE_INT uval;
9165 /* Write out an instruction after the call which may be replaced
9166 with glue code by the loader. This depends on the AIX version. */
9167 asm_fprintf (file, RS6000_CALL_GLUE);
9170 /* %a is output_address. */
9173 /* If X is a constant integer whose low-order 5 bits are zero,
9174 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9175 in the AIX assembler where "sri" with a zero shift count
9176 writes a trash instruction. */
9177 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9184 /* If constant, low-order 16 bits of constant, unsigned.
9185 Otherwise, write normally. */
9187 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9189 print_operand (file, x, 0);
9193 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9194 for 64-bit mask direction. */
9195 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9198 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9202 /* X is a CR register. Print the number of the GT bit of the CR. */
9203 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9204 output_operand_lossage ("invalid %%E value");
9206 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9210 /* Like 'J' but get to the GT bit. */
9211 if (GET_CODE (x) != REG)
9214 /* Bit 1 is GT bit. */
9215 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
9217 /* If we want bit 31, write a shift count of zero, not 32. */
9218 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9222 /* X is a CR register. Print the number of the EQ bit of the CR */
9223 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9224 output_operand_lossage ("invalid %%E value");
9226 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9230 /* X is a CR register. Print the shift count needed to move it
9231 to the high-order four bits. */
9232 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9233 output_operand_lossage ("invalid %%f value");
9235 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9239 /* Similar, but print the count for the rotate in the opposite
9241 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9242 output_operand_lossage ("invalid %%F value");
9244 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9248 /* X is a constant integer. If it is negative, print "m",
9249 otherwise print "z". This is to make an aze or ame insn. */
9250 if (GET_CODE (x) != CONST_INT)
9251 output_operand_lossage ("invalid %%G value");
9252 else if (INTVAL (x) >= 0)
9259 /* If constant, output low-order five bits. Otherwise, write
9262 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9264 print_operand (file, x, 0);
9268 /* If constant, output low-order six bits. Otherwise, write
9271 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9273 print_operand (file, x, 0);
9277 /* Print `i' if this is a constant, else nothing. */
9283 /* Write the bit number in CCR for jump. */
9286 output_operand_lossage ("invalid %%j code");
9288 fprintf (file, "%d", i);
9292 /* Similar, but add one for shift count in rlinm for scc and pass
9293 scc flag to `ccr_bit'. */
9296 output_operand_lossage ("invalid %%J code");
9298 /* If we want bit 31, write a shift count of zero, not 32. */
9299 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9303 /* X must be a constant. Write the 1's complement of the
9306 output_operand_lossage ("invalid %%k value");
9308 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9312 /* X must be a symbolic constant on ELF. Write an
9313 expression suitable for an 'addi' that adds in the low 16
9315 if (GET_CODE (x) != CONST)
9317 print_operand_address (file, x);
9322 if (GET_CODE (XEXP (x, 0)) != PLUS
9323 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9324 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9325 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9326 output_operand_lossage ("invalid %%K value");
9327 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9329 /* For GNU as, there must be a non-alphanumeric character
9330 between 'l' and the number. The '-' is added by
9331 print_operand() already. */
9332 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9334 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9338 /* %l is output_asm_label. */
9341 /* Write second word of DImode or DFmode reference. Works on register
9342 or non-indexed memory only. */
9343 if (GET_CODE (x) == REG)
9344 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9345 else if (GET_CODE (x) == MEM)
9347 /* Handle possible auto-increment. Since it is pre-increment and
9348 we have already done it, we can just use an offset of word. */
9349 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9350 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9351 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9354 output_address (XEXP (adjust_address_nv (x, SImode,
9358 if (small_data_operand (x, GET_MODE (x)))
9359 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9360 reg_names[SMALL_DATA_REG]);
9365 /* MB value for a mask operand. */
9366 if (! mask_operand (x, SImode))
9367 output_operand_lossage ("invalid %%m value");
9369 fprintf (file, "%d", extract_MB (x));
9373 /* ME value for a mask operand. */
9374 if (! mask_operand (x, SImode))
9375 output_operand_lossage ("invalid %%M value");
9377 fprintf (file, "%d", extract_ME (x));
9380 /* %n outputs the negative of its operand. */
9383 /* Write the number of elements in the vector times 4. */
9384 if (GET_CODE (x) != PARALLEL)
9385 output_operand_lossage ("invalid %%N value");
9387 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9391 /* Similar, but subtract 1 first. */
9392 if (GET_CODE (x) != PARALLEL)
9393 output_operand_lossage ("invalid %%O value");
9395 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9399 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9401 || INT_LOWPART (x) < 0
9402 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9403 output_operand_lossage ("invalid %%p value");
9405 fprintf (file, "%d", i);
9409 /* The operand must be an indirect memory reference. The result
9410 is the register name. */
9411 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9412 || REGNO (XEXP (x, 0)) >= 32)
9413 output_operand_lossage ("invalid %%P value");
9415 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9419 /* This outputs the logical code corresponding to a boolean
9420 expression. The expression may have one or both operands
9421 negated (if one, only the first one). For condition register
9422 logical operations, it will also treat the negated
9423 CR codes as NOTs, but not handle NOTs of them. */
9425 const char *const *t = 0;
9427 enum rtx_code code = GET_CODE (x);
9428 static const char * const tbl[3][3] = {
9429 { "and", "andc", "nor" },
9430 { "or", "orc", "nand" },
9431 { "xor", "eqv", "xor" } };
9435 else if (code == IOR)
9437 else if (code == XOR)
9440 output_operand_lossage ("invalid %%q value");
9442 if (GET_CODE (XEXP (x, 0)) != NOT)
9446 if (GET_CODE (XEXP (x, 1)) == NOT)
9464 /* X is a CR register. Print the mask for `mtcrf'. */
9465 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9466 output_operand_lossage ("invalid %%R value");
9468 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9472 /* Low 5 bits of 32 - value */
9474 output_operand_lossage ("invalid %%s value");
9476 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9480 /* PowerPC64 mask position. All 0's is excluded.
9481 CONST_INT 32-bit mask is considered sign-extended so any
9482 transition must occur within the CONST_INT, not on the boundary. */
9483 if (! mask64_operand (x, DImode))
9484 output_operand_lossage ("invalid %%S value");
9486 uval = INT_LOWPART (x);
9488 if (uval & 1) /* Clear Left */
9490 #if HOST_BITS_PER_WIDE_INT > 64
9491 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9495 else /* Clear Right */
9498 #if HOST_BITS_PER_WIDE_INT > 64
9499 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9507 fprintf (file, "%d", i);
9511 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9512 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9515 /* Bit 3 is OV bit. */
9516 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9518 /* If we want bit 31, write a shift count of zero, not 32. */
9519 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9523 /* Print the symbolic name of a branch target register. */
9524 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9525 && REGNO (x) != COUNT_REGISTER_REGNUM))
9526 output_operand_lossage ("invalid %%T value");
9527 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9528 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9530 fputs ("ctr", file);
9534 /* High-order 16 bits of constant for use in unsigned operand. */
9536 output_operand_lossage ("invalid %%u value");
9538 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9539 (INT_LOWPART (x) >> 16) & 0xffff);
9543 /* High-order 16 bits of constant for use in signed operand. */
9545 output_operand_lossage ("invalid %%v value");
9547 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9548 (INT_LOWPART (x) >> 16) & 0xffff);
9552 /* Print `u' if this has an auto-increment or auto-decrement. */
9553 if (GET_CODE (x) == MEM
9554 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9555 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9560 /* Print the trap code for this operand. */
9561 switch (GET_CODE (x))
9564 fputs ("eq", file); /* 4 */
9567 fputs ("ne", file); /* 24 */
9570 fputs ("lt", file); /* 16 */
9573 fputs ("le", file); /* 20 */
9576 fputs ("gt", file); /* 8 */
9579 fputs ("ge", file); /* 12 */
9582 fputs ("llt", file); /* 2 */
9585 fputs ("lle", file); /* 6 */
9588 fputs ("lgt", file); /* 1 */
9591 fputs ("lge", file); /* 5 */
9599 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9602 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9603 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9605 print_operand (file, x, 0);
9609 /* MB value for a PowerPC64 rldic operand. */
9610 val = (GET_CODE (x) == CONST_INT
9611 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9616 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9617 if ((val <<= 1) < 0)
9620 #if HOST_BITS_PER_WIDE_INT == 32
9621 if (GET_CODE (x) == CONST_INT && i >= 0)
9622 i += 32; /* zero-extend high-part was all 0's */
9623 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9625 val = CONST_DOUBLE_LOW (x);
9632 for ( ; i < 64; i++)
9633 if ((val <<= 1) < 0)
9638 fprintf (file, "%d", i + 1);
9642 if (GET_CODE (x) == MEM
9643 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9648 /* Like 'L', for third word of TImode */
9649 if (GET_CODE (x) == REG)
9650 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9651 else if (GET_CODE (x) == MEM)
9653 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9654 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9655 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9657 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9658 if (small_data_operand (x, GET_MODE (x)))
9659 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9660 reg_names[SMALL_DATA_REG]);
9665 /* X is a SYMBOL_REF. Write out the name preceded by a
9666 period and without any trailing data in brackets. Used for function
9667 names. If we are configured for System V (or the embedded ABI) on
9668 the PowerPC, do not emit the period, since those systems do not use
9669 TOCs and the like. */
9670 if (GET_CODE (x) != SYMBOL_REF)
9673 if (XSTR (x, 0)[0] != '.')
9675 switch (DEFAULT_ABI)
9689 /* For macho, we need to check it see if we need a stub. */
9692 const char *name = XSTR (x, 0);
9694 if (machopic_classify_name (name) == MACHOPIC_UNDEFINED_FUNCTION)
9695 name = machopic_stub_name (name);
9697 assemble_name (file, name);
9699 else if (TARGET_AIX)
9700 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9702 assemble_name (file, XSTR (x, 0));
9706 /* Like 'L', for last word of TImode. */
9707 if (GET_CODE (x) == REG)
9708 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9709 else if (GET_CODE (x) == MEM)
9711 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9712 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9713 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9715 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9716 if (small_data_operand (x, GET_MODE (x)))
9717 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9718 reg_names[SMALL_DATA_REG]);
9722 /* Print AltiVec or SPE memory operand. */
9727 if (GET_CODE (x) != MEM)
9735 if (GET_CODE (tmp) == REG)
9737 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9740 /* Handle [reg+UIMM]. */
9741 else if (GET_CODE (tmp) == PLUS &&
9742 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9746 if (GET_CODE (XEXP (tmp, 0)) != REG)
9749 x = INTVAL (XEXP (tmp, 1));
9750 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9754 /* Fall through. Must be [reg+reg]. */
9756 if (GET_CODE (tmp) == REG)
9757 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9758 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9760 if (REGNO (XEXP (tmp, 0)) == 0)
9761 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9762 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9764 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9765 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9773 if (GET_CODE (x) == REG)
9774 fprintf (file, "%s", reg_names[REGNO (x)]);
9775 else if (GET_CODE (x) == MEM)
9777 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9778 know the width from the mode. */
9779 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9780 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9781 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9782 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9783 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9784 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9786 output_address (XEXP (x, 0));
9789 output_addr_const (file, x);
9793 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9797 output_operand_lossage ("invalid %%xn code");
9801 /* Print the address of an operand. */
9804 print_operand_address (FILE *file, rtx x)
9806 if (GET_CODE (x) == REG)
9807 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9808 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9809 || GET_CODE (x) == LABEL_REF)
9811 output_addr_const (file, x);
9812 if (small_data_operand (x, GET_MODE (x)))
9813 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9814 reg_names[SMALL_DATA_REG]);
9815 else if (TARGET_TOC)
9818 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9820 if (REGNO (XEXP (x, 0)) == 0)
9821 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9822 reg_names[ REGNO (XEXP (x, 0)) ]);
9824 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9825 reg_names[ REGNO (XEXP (x, 1)) ]);
9827 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9828 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9829 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9831 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9832 && CONSTANT_P (XEXP (x, 1)))
9834 output_addr_const (file, XEXP (x, 1));
9835 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9839 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9840 && CONSTANT_P (XEXP (x, 1)))
9842 fprintf (file, "lo16(");
9843 output_addr_const (file, XEXP (x, 1));
9844 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9847 else if (legitimate_constant_pool_address_p (x))
9849 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9851 rtx contains_minus = XEXP (x, 1);
9855 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9856 turn it into (sym) for output_addr_const. */
9857 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9858 contains_minus = XEXP (contains_minus, 0);
9860 minus = XEXP (contains_minus, 0);
9861 symref = XEXP (minus, 0);
9862 XEXP (contains_minus, 0) = symref;
9867 name = XSTR (symref, 0);
9868 newname = alloca (strlen (name) + sizeof ("@toc"));
9869 strcpy (newname, name);
9870 strcat (newname, "@toc");
9871 XSTR (symref, 0) = newname;
9873 output_addr_const (file, XEXP (x, 1));
9875 XSTR (symref, 0) = name;
9876 XEXP (contains_minus, 0) = minus;
9879 output_addr_const (file, XEXP (x, 1));
9881 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9887 /* Target hook for assembling integer objects. The PowerPC version has
9888 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9889 is defined. It also needs to handle DI-mode objects on 64-bit
9893 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9895 #ifdef RELOCATABLE_NEEDS_FIXUP
9896 /* Special handling for SI values. */
9897 if (size == 4 && aligned_p)
9899 extern int in_toc_section (void);
9900 static int recurse = 0;
9902 /* For -mrelocatable, we mark all addresses that need to be fixed up
9903 in the .fixup section. */
9904 if (TARGET_RELOCATABLE
9905 && !in_toc_section ()
9906 && !in_text_section ()
9907 && !in_unlikely_text_section ()
9909 && GET_CODE (x) != CONST_INT
9910 && GET_CODE (x) != CONST_DOUBLE
9916 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9918 ASM_OUTPUT_LABEL (asm_out_file, buf);
9919 fprintf (asm_out_file, "\t.long\t(");
9920 output_addr_const (asm_out_file, x);
9921 fprintf (asm_out_file, ")@fixup\n");
9922 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9923 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9924 fprintf (asm_out_file, "\t.long\t");
9925 assemble_name (asm_out_file, buf);
9926 fprintf (asm_out_file, "\n\t.previous\n");
9930 /* Remove initial .'s to turn a -mcall-aixdesc function
9931 address into the address of the descriptor, not the function
9933 else if (GET_CODE (x) == SYMBOL_REF
9934 && XSTR (x, 0)[0] == '.'
9935 && DEFAULT_ABI == ABI_AIX)
9937 const char *name = XSTR (x, 0);
9938 while (*name == '.')
9941 fprintf (asm_out_file, "\t.long\t%s\n", name);
9945 #endif /* RELOCATABLE_NEEDS_FIXUP */
9946 return default_assemble_integer (x, size, aligned_p);
9949 #ifdef HAVE_GAS_HIDDEN
9950 /* Emit an assembler directive to set symbol visibility for DECL to
9954 rs6000_assemble_visibility (tree decl, int vis)
9956 /* Functions need to have their entry point symbol visibility set as
9957 well as their descriptor symbol visibility. */
9958 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9960 static const char * const visibility_types[] = {
9961 NULL, "internal", "hidden", "protected"
9964 const char *name, *type;
9966 name = ((* targetm.strip_name_encoding)
9967 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9968 type = visibility_types[vis];
9970 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9971 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9974 default_assemble_visibility (decl, vis);
9979 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9981 /* Reversal of FP compares takes care -- an ordered compare
9982 becomes an unordered compare and vice versa. */
9983 if (mode == CCFPmode
9984 && (!flag_finite_math_only
9985 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9986 || code == UNEQ || code == LTGT))
9987 return reverse_condition_maybe_unordered (code);
9989 return reverse_condition (code);
9992 /* Generate a compare for CODE. Return a brand-new rtx that
9993 represents the result of the compare. */
9996 rs6000_generate_compare (enum rtx_code code)
9998 enum machine_mode comp_mode;
10001 if (rs6000_compare_fp_p)
10002 comp_mode = CCFPmode;
10003 else if (code == GTU || code == LTU
10004 || code == GEU || code == LEU)
10005 comp_mode = CCUNSmode;
10007 comp_mode = CCmode;
10009 /* First, the compare. */
10010 compare_result = gen_reg_rtx (comp_mode);
10012 /* SPE FP compare instructions on the GPRs. Yuck! */
10013 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10014 && rs6000_compare_fp_p)
10016 rtx cmp, or1, or2, or_result, compare_result2;
10018 /* Note: The E500 comparison instructions set the GT bit (x +
10019 1), on success. This explains the mess. */
10023 case EQ: case UNEQ: case NE: case LTGT:
10024 cmp = flag_finite_math_only
10025 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10026 rs6000_compare_op1)
10027 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10028 rs6000_compare_op1);
10030 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10031 cmp = flag_finite_math_only
10032 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10033 rs6000_compare_op1)
10034 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10035 rs6000_compare_op1);
10037 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10038 cmp = flag_finite_math_only
10039 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10040 rs6000_compare_op1)
10041 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10042 rs6000_compare_op1);
10048 /* Synthesize LE and GE from LT/GT || EQ. */
10049 if (code == LE || code == GE || code == LEU || code == GEU)
10055 case LE: code = LT; break;
10056 case GE: code = GT; break;
10057 case LEU: code = LT; break;
10058 case GEU: code = GT; break;
10062 or1 = gen_reg_rtx (SImode);
10063 or2 = gen_reg_rtx (SImode);
10064 or_result = gen_reg_rtx (CCEQmode);
10065 compare_result2 = gen_reg_rtx (CCFPmode);
10068 cmp = flag_finite_math_only
10069 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10070 rs6000_compare_op1)
10071 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10072 rs6000_compare_op1);
10075 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10076 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10078 /* OR them together. */
10079 cmp = gen_rtx_SET (VOIDmode, or_result,
10080 gen_rtx_COMPARE (CCEQmode,
10081 gen_rtx_IOR (SImode, or1, or2),
10083 compare_result = or_result;
10088 if (code == NE || code == LTGT)
10097 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10098 gen_rtx_COMPARE (comp_mode,
10099 rs6000_compare_op0,
10100 rs6000_compare_op1)));
10102 /* Some kinds of FP comparisons need an OR operation;
10103 under flag_finite_math_only we don't bother. */
10104 if (rs6000_compare_fp_p
10105 && ! flag_finite_math_only
10106 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10107 && (code == LE || code == GE
10108 || code == UNEQ || code == LTGT
10109 || code == UNGT || code == UNLT))
10111 enum rtx_code or1, or2;
10112 rtx or1_rtx, or2_rtx, compare2_rtx;
10113 rtx or_result = gen_reg_rtx (CCEQmode);
10117 case LE: or1 = LT; or2 = EQ; break;
10118 case GE: or1 = GT; or2 = EQ; break;
10119 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10120 case LTGT: or1 = LT; or2 = GT; break;
10121 case UNGT: or1 = UNORDERED; or2 = GT; break;
10122 case UNLT: or1 = UNORDERED; or2 = LT; break;
10125 validate_condition_mode (or1, comp_mode);
10126 validate_condition_mode (or2, comp_mode);
10127 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10128 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10129 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10130 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10132 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10134 compare_result = or_result;
10138 validate_condition_mode (code, GET_MODE (compare_result));
10140 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10144 /* Emit the RTL for an sCOND pattern. */
10147 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10150 enum machine_mode op_mode;
10151 enum rtx_code cond_code;
10153 condition_rtx = rs6000_generate_compare (code);
10154 cond_code = GET_CODE (condition_rtx);
10156 if (TARGET_E500 && rs6000_compare_fp_p
10157 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10161 PUT_MODE (condition_rtx, SImode);
10162 t = XEXP (condition_rtx, 0);
10164 if (cond_code != NE && cond_code != EQ)
10167 if (cond_code == NE)
10168 emit_insn (gen_e500_flip_gt_bit (t, t));
10170 emit_insn (gen_move_from_CR_gt_bit (result, t));
10174 if (cond_code == NE
10175 || cond_code == GE || cond_code == LE
10176 || cond_code == GEU || cond_code == LEU
10177 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10179 rtx not_result = gen_reg_rtx (CCEQmode);
10180 rtx not_op, rev_cond_rtx;
10181 enum machine_mode cc_mode;
10183 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10185 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10186 SImode, XEXP (condition_rtx, 0), const0_rtx);
10187 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10188 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10189 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10192 op_mode = GET_MODE (rs6000_compare_op0);
10193 if (op_mode == VOIDmode)
10194 op_mode = GET_MODE (rs6000_compare_op1);
10196 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10198 PUT_MODE (condition_rtx, DImode);
10199 convert_move (result, condition_rtx, 0);
10203 PUT_MODE (condition_rtx, SImode);
10204 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10208 /* Emit a branch of kind CODE to location LOC. */
10211 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10213 rtx condition_rtx, loc_ref;
10215 condition_rtx = rs6000_generate_compare (code);
10216 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10217 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10218 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10219 loc_ref, pc_rtx)));
10222 /* Return the string to output a conditional branch to LABEL, which is
10223 the operand number of the label, or -1 if the branch is really a
10224 conditional return.
10226 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10227 condition code register and its mode specifies what kind of
10228 comparison we made.
10230 REVERSED is nonzero if we should reverse the sense of the comparison.
10232 INSN is the insn. */
10235 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10237 static char string[64];
10238 enum rtx_code code = GET_CODE (op);
10239 rtx cc_reg = XEXP (op, 0);
10240 enum machine_mode mode = GET_MODE (cc_reg);
10241 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10242 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10243 int really_reversed = reversed ^ need_longbranch;
10249 validate_condition_mode (code, mode);
10251 /* Work out which way this really branches. We could use
10252 reverse_condition_maybe_unordered here always but this
10253 makes the resulting assembler clearer. */
10254 if (really_reversed)
10256 /* Reversal of FP compares takes care -- an ordered compare
10257 becomes an unordered compare and vice versa. */
10258 if (mode == CCFPmode)
10259 code = reverse_condition_maybe_unordered (code);
10261 code = reverse_condition (code);
10264 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10266 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10269 /* Opposite of GT. */
10271 else if (code == NE)
10279 /* Not all of these are actually distinct opcodes, but
10280 we distinguish them for clarity of the resulting assembler. */
10281 case NE: case LTGT:
10282 ccode = "ne"; break;
10283 case EQ: case UNEQ:
10284 ccode = "eq"; break;
10286 ccode = "ge"; break;
10287 case GT: case GTU: case UNGT:
10288 ccode = "gt"; break;
10290 ccode = "le"; break;
10291 case LT: case LTU: case UNLT:
10292 ccode = "lt"; break;
10293 case UNORDERED: ccode = "un"; break;
10294 case ORDERED: ccode = "nu"; break;
10295 case UNGE: ccode = "nl"; break;
10296 case UNLE: ccode = "ng"; break;
10301 /* Maybe we have a guess as to how likely the branch is.
10302 The old mnemonics don't have a way to specify this information. */
10304 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10305 if (note != NULL_RTX)
10307 /* PROB is the difference from 50%. */
10308 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10310 /* Only hint for highly probable/improbable branches on newer
10311 cpus as static prediction overrides processor dynamic
10312 prediction. For older cpus we may as well always hint, but
10313 assume not taken for branches that are very close to 50% as a
10314 mispredicted taken branch is more expensive than a
10315 mispredicted not-taken branch. */
10316 if (rs6000_always_hint
10317 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10319 if (abs (prob) > REG_BR_PROB_BASE / 20
10320 && ((prob > 0) ^ need_longbranch))
10328 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10330 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10332 /* We need to escape any '%' characters in the reg_names string.
10333 Assume they'd only be the first character.... */
10334 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10336 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10340 /* If the branch distance was too far, we may have to use an
10341 unconditional branch to go the distance. */
10342 if (need_longbranch)
10343 s += sprintf (s, ",$+8\n\tb %s", label);
10345 s += sprintf (s, ",%s", label);
10351 /* Return the string to flip the GT bit on a CR. */
10353 output_e500_flip_gt_bit (rtx dst, rtx src)
10355 static char string[64];
10358 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10359 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10363 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10364 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10366 sprintf (string, "crnot %d,%d", a, b);
10370 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10371 operands of the last comparison is nonzero/true, FALSE_COND if it
10372 is zero/false. Return 0 if the hardware has no such operation. */
10375 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10377 enum rtx_code code = GET_CODE (op);
10378 rtx op0 = rs6000_compare_op0;
10379 rtx op1 = rs6000_compare_op1;
10380 REAL_VALUE_TYPE c1;
10381 enum machine_mode compare_mode = GET_MODE (op0);
10382 enum machine_mode result_mode = GET_MODE (dest);
10385 /* These modes should always match. */
10386 if (GET_MODE (op1) != compare_mode
10387 /* In the isel case however, we can use a compare immediate, so
10388 op1 may be a small constant. */
10389 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10391 if (GET_MODE (true_cond) != result_mode)
10393 if (GET_MODE (false_cond) != result_mode)
10396 /* First, work out if the hardware can do this at all, or
10397 if it's too slow.... */
10398 if (! rs6000_compare_fp_p)
10401 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10404 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10405 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10408 /* Eliminate half of the comparisons by switching operands, this
10409 makes the remaining code simpler. */
10410 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10411 || code == LTGT || code == LT || code == UNLE)
10413 code = reverse_condition_maybe_unordered (code);
10415 true_cond = false_cond;
10419 /* UNEQ and LTGT take four instructions for a comparison with zero,
10420 it'll probably be faster to use a branch here too. */
10421 if (code == UNEQ && HONOR_NANS (compare_mode))
10424 if (GET_CODE (op1) == CONST_DOUBLE)
10425 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10427 /* We're going to try to implement comparisons by performing
10428 a subtract, then comparing against zero. Unfortunately,
10429 Inf - Inf is NaN which is not zero, and so if we don't
10430 know that the operand is finite and the comparison
10431 would treat EQ different to UNORDERED, we can't do it. */
10432 if (HONOR_INFINITIES (compare_mode)
10433 && code != GT && code != UNGE
10434 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10435 /* Constructs of the form (a OP b ? a : b) are safe. */
10436 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10437 || (! rtx_equal_p (op0, true_cond)
10438 && ! rtx_equal_p (op1, true_cond))))
10440 /* At this point we know we can use fsel. */
10442 /* Reduce the comparison to a comparison against zero. */
10443 temp = gen_reg_rtx (compare_mode);
10444 emit_insn (gen_rtx_SET (VOIDmode, temp,
10445 gen_rtx_MINUS (compare_mode, op0, op1)));
10447 op1 = CONST0_RTX (compare_mode);
10449 /* If we don't care about NaNs we can reduce some of the comparisons
10450 down to faster ones. */
10451 if (! HONOR_NANS (compare_mode))
10457 true_cond = false_cond;
10470 /* Now, reduce everything down to a GE. */
10477 temp = gen_reg_rtx (compare_mode);
10478 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10483 temp = gen_reg_rtx (compare_mode);
10484 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10489 temp = gen_reg_rtx (compare_mode);
10490 emit_insn (gen_rtx_SET (VOIDmode, temp,
10491 gen_rtx_NEG (compare_mode,
10492 gen_rtx_ABS (compare_mode, op0))));
10497 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10498 temp = gen_reg_rtx (result_mode);
10499 emit_insn (gen_rtx_SET (VOIDmode, temp,
10500 gen_rtx_IF_THEN_ELSE (result_mode,
10501 gen_rtx_GE (VOIDmode,
10503 true_cond, false_cond)));
10504 false_cond = true_cond;
10507 temp = gen_reg_rtx (compare_mode);
10508 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10513 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10514 temp = gen_reg_rtx (result_mode);
10515 emit_insn (gen_rtx_SET (VOIDmode, temp,
10516 gen_rtx_IF_THEN_ELSE (result_mode,
10517 gen_rtx_GE (VOIDmode,
10519 true_cond, false_cond)));
10520 true_cond = false_cond;
10523 temp = gen_reg_rtx (compare_mode);
10524 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10532 emit_insn (gen_rtx_SET (VOIDmode, dest,
10533 gen_rtx_IF_THEN_ELSE (result_mode,
10534 gen_rtx_GE (VOIDmode,
10536 true_cond, false_cond)));
10540 /* Same as above, but for ints (isel). */
10543 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10545 rtx condition_rtx, cr;
10547 /* All isel implementations thus far are 32-bits. */
10548 if (GET_MODE (rs6000_compare_op0) != SImode)
10551 /* We still have to do the compare, because isel doesn't do a
10552 compare, it just looks at the CRx bits set by a previous compare
10554 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10555 cr = XEXP (condition_rtx, 0);
10557 if (GET_MODE (cr) == CCmode)
10558 emit_insn (gen_isel_signed (dest, condition_rtx,
10559 true_cond, false_cond, cr));
10561 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10562 true_cond, false_cond, cr));
10568 output_isel (rtx *operands)
10570 enum rtx_code code;
10572 code = GET_CODE (operands[1]);
10573 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10575 PUT_CODE (operands[1], reverse_condition (code));
10576 return "isel %0,%3,%2,%j1";
10579 return "isel %0,%2,%3,%j1";
10583 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10585 enum machine_mode mode = GET_MODE (op0);
10589 if (code == SMAX || code == SMIN)
10594 if (code == SMAX || code == UMAX)
10595 target = emit_conditional_move (dest, c, op0, op1, mode,
10596 op0, op1, mode, 0);
10598 target = emit_conditional_move (dest, c, op0, op1, mode,
10599 op1, op0, mode, 0);
10600 if (target == NULL_RTX)
10602 if (target != dest)
10603 emit_move_insn (dest, target);
10606 /* Emit instructions to move SRC to DST. Called by splitters for
10607 multi-register moves. It will emit at most one instruction for
10608 each register that is accessed; that is, it won't emit li/lis pairs
10609 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10613 rs6000_split_multireg_move (rtx dst, rtx src)
10615 /* The register number of the first register being moved. */
10617 /* The mode that is to be moved. */
10618 enum machine_mode mode;
10619 /* The mode that the move is being done in, and its size. */
10620 enum machine_mode reg_mode;
10622 /* The number of registers that will be moved. */
10625 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10626 mode = GET_MODE (dst);
10627 nregs = HARD_REGNO_NREGS (reg, mode);
10628 if (FP_REGNO_P (reg))
10630 else if (ALTIVEC_REGNO_P (reg))
10631 reg_mode = V16QImode;
10633 reg_mode = word_mode;
10634 reg_mode_size = GET_MODE_SIZE (reg_mode);
10636 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10639 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10641 /* Move register range backwards, if we might have destructive
10644 for (i = nregs - 1; i >= 0; i--)
10645 emit_insn (gen_rtx_SET (VOIDmode,
10646 simplify_gen_subreg (reg_mode, dst, mode,
10647 i * reg_mode_size),
10648 simplify_gen_subreg (reg_mode, src, mode,
10649 i * reg_mode_size)));
10655 bool used_update = false;
10657 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10661 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10662 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10665 breg = XEXP (XEXP (src, 0), 0);
10666 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10667 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10668 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10669 emit_insn (TARGET_32BIT
10670 ? gen_addsi3 (breg, breg, delta_rtx)
10671 : gen_adddi3 (breg, breg, delta_rtx));
10672 src = gen_rtx_MEM (mode, breg);
10675 /* We have now address involving an base register only.
10676 If we use one of the registers to address memory,
10677 we have change that register last. */
10679 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10680 ? XEXP (XEXP (src, 0), 0)
10686 if (REGNO (breg) >= REGNO (dst)
10687 && REGNO (breg) < REGNO (dst) + nregs)
10688 j = REGNO (breg) - REGNO (dst);
10691 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10695 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10696 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10699 breg = XEXP (XEXP (dst, 0), 0);
10700 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10701 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10702 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10704 /* We have to update the breg before doing the store.
10705 Use store with update, if available. */
10709 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10710 emit_insn (TARGET_32BIT
10711 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10712 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10713 used_update = true;
10716 emit_insn (TARGET_32BIT
10717 ? gen_addsi3 (breg, breg, delta_rtx)
10718 : gen_adddi3 (breg, breg, delta_rtx));
10719 dst = gen_rtx_MEM (mode, breg);
10723 for (i = 0; i < nregs; i++)
10725 /* Calculate index to next subword. */
10730 /* If compiler already emited move of first word by
10731 store with update, no need to do anything. */
10732 if (j == 0 && used_update)
10735 emit_insn (gen_rtx_SET (VOIDmode,
10736 simplify_gen_subreg (reg_mode, dst, mode,
10737 j * reg_mode_size),
10738 simplify_gen_subreg (reg_mode, src, mode,
10739 j * reg_mode_size)));
10745 /* This page contains routines that are used to determine what the
10746 function prologue and epilogue code will do and write them out. */
10748 /* Return the first fixed-point register that is required to be
10749 saved. 32 if none. */
10752 first_reg_to_save (void)
10756 /* Find lowest numbered live register. */
10757 for (first_reg = 13; first_reg <= 31; first_reg++)
10758 if (regs_ever_live[first_reg]
10759 && (! call_used_regs[first_reg]
10760 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10761 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10762 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10767 && current_function_uses_pic_offset_table
10768 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10769 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10775 /* Similar, for FP regs. */
10778 first_fp_reg_to_save (void)
10782 /* Find lowest numbered live register. */
10783 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10784 if (regs_ever_live[first_reg])
10790 /* Similar, for AltiVec regs. */
10793 first_altivec_reg_to_save (void)
10797 /* Stack frame remains as is unless we are in AltiVec ABI. */
10798 if (! TARGET_ALTIVEC_ABI)
10799 return LAST_ALTIVEC_REGNO + 1;
10801 /* Find lowest numbered live register. */
10802 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10803 if (regs_ever_live[i])
10809 /* Return a 32-bit mask of the AltiVec registers we need to set in
10810 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10811 the 32-bit word is 0. */
10813 static unsigned int
10814 compute_vrsave_mask (void)
10816 unsigned int i, mask = 0;
10818 /* First, find out if we use _any_ altivec registers. */
10819 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10820 if (regs_ever_live[i])
10821 mask |= ALTIVEC_REG_BIT (i);
10826 /* Next, remove the argument registers from the set. These must
10827 be in the VRSAVE mask set by the caller, so we don't need to add
10828 them in again. More importantly, the mask we compute here is
10829 used to generate CLOBBERs in the set_vrsave insn, and we do not
10830 wish the argument registers to die. */
10831 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10832 mask &= ~ALTIVEC_REG_BIT (i);
10834 /* Similarly, remove the return value from the set. */
10837 diddle_return_value (is_altivec_return_reg, &yes);
10839 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10846 is_altivec_return_reg (rtx reg, void *xyes)
10848 bool *yes = (bool *) xyes;
10849 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10854 /* Calculate the stack information for the current function. This is
10855 complicated by having two separate calling sequences, the AIX calling
10856 sequence and the V.4 calling sequence.
10858 AIX (and Darwin/Mac OS X) stack frames look like:
10860 SP----> +---------------------------------------+
10861 | back chain to caller | 0 0
10862 +---------------------------------------+
10863 | saved CR | 4 8 (8-11)
10864 +---------------------------------------+
10866 +---------------------------------------+
10867 | reserved for compilers | 12 24
10868 +---------------------------------------+
10869 | reserved for binders | 16 32
10870 +---------------------------------------+
10871 | saved TOC pointer | 20 40
10872 +---------------------------------------+
10873 | Parameter save area (P) | 24 48
10874 +---------------------------------------+
10875 | Alloca space (A) | 24+P etc.
10876 +---------------------------------------+
10877 | Local variable space (L) | 24+P+A
10878 +---------------------------------------+
10879 | Float/int conversion temporary (X) | 24+P+A+L
10880 +---------------------------------------+
10881 | Save area for AltiVec registers (W) | 24+P+A+L+X
10882 +---------------------------------------+
10883 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10884 +---------------------------------------+
10885 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10886 +---------------------------------------+
10887 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10888 +---------------------------------------+
10889 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10890 +---------------------------------------+
10891 old SP->| back chain to caller's caller |
10892 +---------------------------------------+
10894 The required alignment for AIX configurations is two words (i.e., 8
10898 V.4 stack frames look like:
10900 SP----> +---------------------------------------+
10901 | back chain to caller | 0
10902 +---------------------------------------+
10903 | caller's saved LR | 4
10904 +---------------------------------------+
10905 | Parameter save area (P) | 8
10906 +---------------------------------------+
10907 | Alloca space (A) | 8+P
10908 +---------------------------------------+
10909 | Varargs save area (V) | 8+P+A
10910 +---------------------------------------+
10911 | Local variable space (L) | 8+P+A+V
10912 +---------------------------------------+
10913 | Float/int conversion temporary (X) | 8+P+A+V+L
10914 +---------------------------------------+
10915 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10916 +---------------------------------------+
10917 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10918 +---------------------------------------+
10919 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10920 +---------------------------------------+
10921 | SPE: area for 64-bit GP registers |
10922 +---------------------------------------+
10923 | SPE alignment padding |
10924 +---------------------------------------+
10925 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10926 +---------------------------------------+
10927 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10928 +---------------------------------------+
10929 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10930 +---------------------------------------+
10931 old SP->| back chain to caller's caller |
10932 +---------------------------------------+
10934 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10935 given. (But note below and in sysv4.h that we require only 8 and
10936 may round up the size of our stack frame anyways. The historical
10937 reason is early versions of powerpc-linux which didn't properly
10938 align the stack at program startup. A happy side-effect is that
10939 -mno-eabi libraries can be used with -meabi programs.)
10941 The EABI configuration defaults to the V.4 layout. However,
10942 the stack alignment requirements may differ. If -mno-eabi is not
10943 given, the required stack alignment is 8 bytes; if -mno-eabi is
10944 given, the required alignment is 16 bytes. (But see V.4 comment
10947 #ifndef ABI_STACK_BOUNDARY
10948 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10951 static rs6000_stack_t *
10952 rs6000_stack_info (void)
10954 static rs6000_stack_t info, zero_info;
10955 rs6000_stack_t *info_ptr = &info;
10956 int reg_size = TARGET_32BIT ? 4 : 8;
10958 HOST_WIDE_INT non_fixed_size;
10960 /* Zero all fields portably. */
10965 /* Cache value so we don't rescan instruction chain over and over. */
10966 if (cfun->machine->insn_chain_scanned_p == 0)
10968 cfun->machine->insn_chain_scanned_p = 1;
10969 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10973 /* Select which calling sequence. */
10974 info_ptr->abi = DEFAULT_ABI;
10976 /* Calculate which registers need to be saved & save area size. */
10977 info_ptr->first_gp_reg_save = first_reg_to_save ();
10978 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10979 even if it currently looks like we won't. */
10980 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10981 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10982 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10983 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10984 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10986 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10988 /* For the SPE, we have an additional upper 32-bits on each GPR.
10989 Ideally we should save the entire 64-bits only when the upper
10990 half is used in SIMD instructions. Since we only record
10991 registers live (not the size they are used in), this proves
10992 difficult because we'd have to traverse the instruction chain at
10993 the right time, taking reload into account. This is a real pain,
10994 so we opt to save the GPRs in 64-bits always if but one register
10995 gets used in 64-bits. Otherwise, all the registers in the frame
10996 get saved in 32-bits.
10998 So... since when we save all GPRs (except the SP) in 64-bits, the
10999 traditional GP save area will be empty. */
11000 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11001 info_ptr->gp_size = 0;
11003 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11004 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11006 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11007 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11008 - info_ptr->first_altivec_reg_save);
11010 /* Does this function call anything? */
11011 info_ptr->calls_p = (! current_function_is_leaf
11012 || cfun->machine->ra_needs_full_frame);
11014 /* Determine if we need to save the link register. */
11015 if (rs6000_ra_ever_killed ()
11016 || (DEFAULT_ABI == ABI_AIX
11017 && current_function_profile
11018 && !TARGET_PROFILE_KERNEL)
11019 #ifdef TARGET_RELOCATABLE
11020 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11022 || (info_ptr->first_fp_reg_save != 64
11023 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11024 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11025 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11026 || (DEFAULT_ABI == ABI_DARWIN
11028 && current_function_uses_pic_offset_table)
11029 || info_ptr->calls_p)
11031 info_ptr->lr_save_p = 1;
11032 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11035 /* Determine if we need to save the condition code registers. */
11036 if (regs_ever_live[CR2_REGNO]
11037 || regs_ever_live[CR3_REGNO]
11038 || regs_ever_live[CR4_REGNO])
11040 info_ptr->cr_save_p = 1;
11041 if (DEFAULT_ABI == ABI_V4)
11042 info_ptr->cr_size = reg_size;
11045 /* If the current function calls __builtin_eh_return, then we need
11046 to allocate stack space for registers that will hold data for
11047 the exception handler. */
11048 if (current_function_calls_eh_return)
11051 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11054 /* SPE saves EH registers in 64-bits. */
11055 ehrd_size = i * (TARGET_SPE_ABI
11056 && info_ptr->spe_64bit_regs_used != 0
11057 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11062 /* Determine various sizes. */
11063 info_ptr->reg_size = reg_size;
11064 info_ptr->fixed_size = RS6000_SAVE_AREA;
11065 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11066 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11067 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11068 TARGET_ALTIVEC ? 16 : 8);
11070 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11071 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11073 info_ptr->spe_gp_size = 0;
11075 if (TARGET_ALTIVEC_ABI)
11076 info_ptr->vrsave_mask = compute_vrsave_mask ();
11078 info_ptr->vrsave_mask = 0;
11080 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11081 info_ptr->vrsave_size = 4;
11083 info_ptr->vrsave_size = 0;
11085 /* Calculate the offsets. */
11086 switch (DEFAULT_ABI)
11094 info_ptr->fp_save_offset = - info_ptr->fp_size;
11095 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11097 if (TARGET_ALTIVEC_ABI)
11099 info_ptr->vrsave_save_offset
11100 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11102 /* Align stack so vector save area is on a quadword boundary. */
11103 if (info_ptr->altivec_size != 0)
11104 info_ptr->altivec_padding_size
11105 = 16 - (-info_ptr->vrsave_save_offset % 16);
11107 info_ptr->altivec_padding_size = 0;
11109 info_ptr->altivec_save_offset
11110 = info_ptr->vrsave_save_offset
11111 - info_ptr->altivec_padding_size
11112 - info_ptr->altivec_size;
11114 /* Adjust for AltiVec case. */
11115 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11118 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11119 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11120 info_ptr->lr_save_offset = 2*reg_size;
11124 info_ptr->fp_save_offset = - info_ptr->fp_size;
11125 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11126 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11128 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11130 /* Align stack so SPE GPR save area is aligned on a
11131 double-word boundary. */
11132 if (info_ptr->spe_gp_size != 0)
11133 info_ptr->spe_padding_size
11134 = 8 - (-info_ptr->cr_save_offset % 8);
11136 info_ptr->spe_padding_size = 0;
11138 info_ptr->spe_gp_save_offset
11139 = info_ptr->cr_save_offset
11140 - info_ptr->spe_padding_size
11141 - info_ptr->spe_gp_size;
11143 /* Adjust for SPE case. */
11144 info_ptr->toc_save_offset
11145 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11147 else if (TARGET_ALTIVEC_ABI)
11149 info_ptr->vrsave_save_offset
11150 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11152 /* Align stack so vector save area is on a quadword boundary. */
11153 if (info_ptr->altivec_size != 0)
11154 info_ptr->altivec_padding_size
11155 = 16 - (-info_ptr->vrsave_save_offset % 16);
11157 info_ptr->altivec_padding_size = 0;
11159 info_ptr->altivec_save_offset
11160 = info_ptr->vrsave_save_offset
11161 - info_ptr->altivec_padding_size
11162 - info_ptr->altivec_size;
11164 /* Adjust for AltiVec case. */
11165 info_ptr->toc_save_offset
11166 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11169 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11170 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11171 info_ptr->lr_save_offset = reg_size;
11175 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11176 + info_ptr->gp_size
11177 + info_ptr->altivec_size
11178 + info_ptr->altivec_padding_size
11179 + info_ptr->spe_gp_size
11180 + info_ptr->spe_padding_size
11182 + info_ptr->cr_size
11183 + info_ptr->lr_size
11184 + info_ptr->vrsave_size
11185 + info_ptr->toc_size,
11186 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11189 non_fixed_size = (info_ptr->vars_size
11190 + info_ptr->parm_size
11191 + info_ptr->save_size
11192 + info_ptr->varargs_size);
11194 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11195 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11197 /* Determine if we need to allocate any stack frame:
11199 For AIX we need to push the stack if a frame pointer is needed
11200 (because the stack might be dynamically adjusted), if we are
11201 debugging, if we make calls, or if the sum of fp_save, gp_save,
11202 and local variables are more than the space needed to save all
11203 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11204 + 18*8 = 288 (GPR13 reserved).
11206 For V.4 we don't have the stack cushion that AIX uses, but assume
11207 that the debugger can handle stackless frames. */
11209 if (info_ptr->calls_p)
11210 info_ptr->push_p = 1;
11212 else if (DEFAULT_ABI == ABI_V4)
11213 info_ptr->push_p = non_fixed_size != 0;
11215 else if (frame_pointer_needed)
11216 info_ptr->push_p = 1;
11218 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11219 info_ptr->push_p = 1;
11222 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11224 /* Zero offsets if we're not saving those registers. */
11225 if (info_ptr->fp_size == 0)
11226 info_ptr->fp_save_offset = 0;
11228 if (info_ptr->gp_size == 0)
11229 info_ptr->gp_save_offset = 0;
11231 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11232 info_ptr->altivec_save_offset = 0;
11234 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11235 info_ptr->vrsave_save_offset = 0;
11237 if (! TARGET_SPE_ABI
11238 || info_ptr->spe_64bit_regs_used == 0
11239 || info_ptr->spe_gp_size == 0)
11240 info_ptr->spe_gp_save_offset = 0;
11242 if (! info_ptr->lr_save_p)
11243 info_ptr->lr_save_offset = 0;
11245 if (! info_ptr->cr_save_p)
11246 info_ptr->cr_save_offset = 0;
11248 if (! info_ptr->toc_save_p)
11249 info_ptr->toc_save_offset = 0;
11254 /* Return true if the current function uses any GPRs in 64-bit SIMD
11258 spe_func_has_64bit_regs_p (void)
11262 /* Functions that save and restore all the call-saved registers will
11263 need to save/restore the registers in 64-bits. */
11264 if (current_function_calls_eh_return
11265 || current_function_calls_setjmp
11266 || current_function_has_nonlocal_goto)
11269 insns = get_insns ();
11271 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11277 i = PATTERN (insn);
11278 if (GET_CODE (i) == SET
11279 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11288 debug_stack_info (rs6000_stack_t *info)
11290 const char *abi_string;
11293 info = rs6000_stack_info ();
11295 fprintf (stderr, "\nStack information for function %s:\n",
11296 ((current_function_decl && DECL_NAME (current_function_decl))
11297 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11302 default: abi_string = "Unknown"; break;
11303 case ABI_NONE: abi_string = "NONE"; break;
11304 case ABI_AIX: abi_string = "AIX"; break;
11305 case ABI_DARWIN: abi_string = "Darwin"; break;
11306 case ABI_V4: abi_string = "V.4"; break;
11309 fprintf (stderr, "\tABI = %5s\n", abi_string);
11311 if (TARGET_ALTIVEC_ABI)
11312 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11314 if (TARGET_SPE_ABI)
11315 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11317 if (info->first_gp_reg_save != 32)
11318 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11320 if (info->first_fp_reg_save != 64)
11321 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11323 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11324 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11325 info->first_altivec_reg_save);
11327 if (info->lr_save_p)
11328 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11330 if (info->cr_save_p)
11331 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11333 if (info->toc_save_p)
11334 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11336 if (info->vrsave_mask)
11337 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11340 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11343 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11345 if (info->gp_save_offset)
11346 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11348 if (info->fp_save_offset)
11349 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11351 if (info->altivec_save_offset)
11352 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11353 info->altivec_save_offset);
11355 if (info->spe_gp_save_offset)
11356 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11357 info->spe_gp_save_offset);
11359 if (info->vrsave_save_offset)
11360 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11361 info->vrsave_save_offset);
11363 if (info->lr_save_offset)
11364 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11366 if (info->cr_save_offset)
11367 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11369 if (info->toc_save_offset)
11370 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11372 if (info->varargs_save_offset)
11373 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11375 if (info->total_size)
11376 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11379 if (info->varargs_size)
11380 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11382 if (info->vars_size)
11383 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11386 if (info->parm_size)
11387 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11389 if (info->fixed_size)
11390 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11393 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11395 if (info->spe_gp_size)
11396 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11399 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11401 if (info->altivec_size)
11402 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11404 if (info->vrsave_size)
11405 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11407 if (info->altivec_padding_size)
11408 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11409 info->altivec_padding_size);
11411 if (info->spe_padding_size)
11412 fprintf (stderr, "\tspe_padding_size = %5d\n",
11413 info->spe_padding_size);
11416 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11419 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11421 if (info->toc_size)
11422 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11424 if (info->save_size)
11425 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11427 if (info->reg_size != 4)
11428 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11430 fprintf (stderr, "\n");
11434 rs6000_return_addr (int count, rtx frame)
11436 /* Currently we don't optimize very well between prolog and body
11437 code and for PIC code the code can be actually quite bad, so
11438 don't try to be too clever here. */
11439 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11441 cfun->machine->ra_needs_full_frame = 1;
11448 plus_constant (copy_to_reg
11449 (gen_rtx_MEM (Pmode,
11450 memory_address (Pmode, frame))),
11451 RETURN_ADDRESS_OFFSET)));
11454 cfun->machine->ra_need_lr = 1;
11455 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11458 /* Say whether a function is a candidate for sibcall handling or not.
11459 We do not allow indirect calls to be optimized into sibling calls.
11460 Also, we can't do it if there are any vector parameters; there's
11461 nowhere to put the VRsave code so it works; note that functions with
11462 vector parameters are required to have a prototype, so the argument
11463 type info must be available here. (The tail recursion case can work
11464 with vector parameters, but there's no way to distinguish here.) */
11466 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11471 if (TARGET_ALTIVEC_VRSAVE)
11473 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11474 type; type = TREE_CHAIN (type))
11476 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11480 if (DEFAULT_ABI == ABI_DARWIN
11481 || (*targetm.binds_local_p) (decl))
11483 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11485 if (!lookup_attribute ("longcall", attr_list)
11486 || lookup_attribute ("shortcall", attr_list))
11494 rs6000_ra_ever_killed (void)
11500 if (current_function_is_thunk)
11503 /* regs_ever_live has LR marked as used if any sibcalls are present,
11504 but this should not force saving and restoring in the
11505 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11506 clobbers LR, so that is inappropriate. */
11508 /* Also, the prologue can generate a store into LR that
11509 doesn't really count, like this:
11512 bcl to set PIC register
11516 When we're called from the epilogue, we need to avoid counting
11517 this as a store. */
11519 push_topmost_sequence ();
11520 top = get_insns ();
11521 pop_topmost_sequence ();
11522 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11524 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11528 if (FIND_REG_INC_NOTE (insn, reg))
11530 else if (GET_CODE (insn) == CALL_INSN
11531 && !SIBLING_CALL_P (insn))
11533 else if (set_of (reg, insn) != NULL_RTX
11534 && !prologue_epilogue_contains (insn))
11541 /* Add a REG_MAYBE_DEAD note to the insn. */
11543 rs6000_maybe_dead (rtx insn)
11545 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11550 /* Emit instructions needed to load the TOC register.
11551 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11552 a constant pool; or for SVR4 -fpic. */
11555 rs6000_emit_load_toc_table (int fromprolog)
11558 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11560 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11562 rtx temp = (fromprolog
11563 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11564 : gen_reg_rtx (Pmode));
11565 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11567 rs6000_maybe_dead (insn);
11568 insn = emit_move_insn (dest, temp);
11570 rs6000_maybe_dead (insn);
11572 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11575 rtx tempLR = (fromprolog
11576 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11577 : gen_reg_rtx (Pmode));
11578 rtx temp0 = (fromprolog
11579 ? gen_rtx_REG (Pmode, 0)
11580 : gen_reg_rtx (Pmode));
11583 /* possibly create the toc section */
11584 if (! toc_initialized)
11587 function_section (current_function_decl);
11594 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11595 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11597 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11598 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11600 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11602 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11603 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11610 static int reload_toc_labelno = 0;
11612 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11614 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11615 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11617 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11618 emit_move_insn (dest, tempLR);
11619 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11621 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11623 rs6000_maybe_dead (insn);
11625 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11627 /* This is for AIX code running in non-PIC ELF32. */
11630 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11631 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11633 insn = emit_insn (gen_elf_high (dest, realsym));
11635 rs6000_maybe_dead (insn);
11636 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11638 rs6000_maybe_dead (insn);
11640 else if (DEFAULT_ABI == ABI_AIX)
11643 insn = emit_insn (gen_load_toc_aix_si (dest));
11645 insn = emit_insn (gen_load_toc_aix_di (dest));
11647 rs6000_maybe_dead (insn);
11653 /* Emit instructions to restore the link register after determining where
11654 its value has been stored. */
11657 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11659 rs6000_stack_t *info = rs6000_stack_info ();
11662 operands[0] = source;
11663 operands[1] = scratch;
11665 if (info->lr_save_p)
11667 rtx frame_rtx = stack_pointer_rtx;
11668 HOST_WIDE_INT sp_offset = 0;
11671 if (frame_pointer_needed
11672 || current_function_calls_alloca
11673 || info->total_size > 32767)
11675 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11676 frame_rtx = operands[1];
11678 else if (info->push_p)
11679 sp_offset = info->total_size;
11681 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11682 tmp = gen_rtx_MEM (Pmode, tmp);
11683 emit_move_insn (tmp, operands[0]);
11686 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11689 static GTY(()) int set = -1;
11692 get_TOC_alias_set (void)
11695 set = new_alias_set ();
11699 /* This returns nonzero if the current function uses the TOC. This is
11700 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11701 is generated by the ABI_V4 load_toc_* patterns. */
11708 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11711 rtx pat = PATTERN (insn);
11714 if (GET_CODE (pat) == PARALLEL)
11715 for (i = 0; i < XVECLEN (pat, 0); i++)
11717 rtx sub = XVECEXP (pat, 0, i);
11718 if (GET_CODE (sub) == USE)
11720 sub = XEXP (sub, 0);
11721 if (GET_CODE (sub) == UNSPEC
11722 && XINT (sub, 1) == UNSPEC_TOC)
11732 create_TOC_reference (rtx symbol)
11734 return gen_rtx_PLUS (Pmode,
11735 gen_rtx_REG (Pmode, TOC_REGISTER),
11736 gen_rtx_CONST (Pmode,
11737 gen_rtx_MINUS (Pmode, symbol,
11738 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11741 /* If _Unwind_* has been called from within the same module,
11742 toc register is not guaranteed to be saved to 40(1) on function
11743 entry. Save it there in that case. */
11746 rs6000_aix_emit_builtin_unwind_init (void)
11749 rtx stack_top = gen_reg_rtx (Pmode);
11750 rtx opcode_addr = gen_reg_rtx (Pmode);
11751 rtx opcode = gen_reg_rtx (SImode);
11752 rtx tocompare = gen_reg_rtx (SImode);
11753 rtx no_toc_save_needed = gen_label_rtx ();
11755 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11756 emit_move_insn (stack_top, mem);
11758 mem = gen_rtx_MEM (Pmode,
11759 gen_rtx_PLUS (Pmode, stack_top,
11760 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11761 emit_move_insn (opcode_addr, mem);
11762 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11763 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11764 : 0xE8410028, SImode));
11766 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11767 SImode, NULL_RTX, NULL_RTX,
11768 no_toc_save_needed);
11770 mem = gen_rtx_MEM (Pmode,
11771 gen_rtx_PLUS (Pmode, stack_top,
11772 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11773 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11774 emit_label (no_toc_save_needed);
11777 /* This ties together stack memory (MEM with an alias set of
11778 rs6000_sr_alias_set) and the change to the stack pointer. */
11781 rs6000_emit_stack_tie (void)
11783 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11785 set_mem_alias_set (mem, rs6000_sr_alias_set);
11786 emit_insn (gen_stack_tie (mem));
11789 /* Emit the correct code for allocating stack space, as insns.
11790 If COPY_R12, make sure a copy of the old frame is left in r12.
11791 The generated code may use hard register 0 as a temporary. */
11794 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11797 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11798 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11799 rtx todec = GEN_INT (-size);
11801 if (current_function_limit_stack)
11803 if (REG_P (stack_limit_rtx)
11804 && REGNO (stack_limit_rtx) > 1
11805 && REGNO (stack_limit_rtx) <= 31)
11807 emit_insn (TARGET_32BIT
11808 ? gen_addsi3 (tmp_reg,
11811 : gen_adddi3 (tmp_reg,
11815 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11818 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11820 && DEFAULT_ABI == ABI_V4)
11822 rtx toload = gen_rtx_CONST (VOIDmode,
11823 gen_rtx_PLUS (Pmode,
11827 emit_insn (gen_elf_high (tmp_reg, toload));
11828 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11829 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11833 warning ("stack limit expression is not supported");
11836 if (copy_r12 || ! TARGET_UPDATE)
11837 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11843 /* Need a note here so that try_split doesn't get confused. */
11844 if (get_last_insn() == NULL_RTX)
11845 emit_note (NOTE_INSN_DELETED);
11846 insn = emit_move_insn (tmp_reg, todec);
11847 try_split (PATTERN (insn), insn, 0);
11851 insn = emit_insn (TARGET_32BIT
11852 ? gen_movsi_update (stack_reg, stack_reg,
11854 : gen_movdi_update (stack_reg, stack_reg,
11855 todec, stack_reg));
11859 insn = emit_insn (TARGET_32BIT
11860 ? gen_addsi3 (stack_reg, stack_reg, todec)
11861 : gen_adddi3 (stack_reg, stack_reg, todec));
11862 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11863 gen_rtx_REG (Pmode, 12));
11866 RTX_FRAME_RELATED_P (insn) = 1;
11868 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11869 gen_rtx_SET (VOIDmode, stack_reg,
11870 gen_rtx_PLUS (Pmode, stack_reg,
11875 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11876 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11877 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11878 deduce these equivalences by itself so it wasn't necessary to hold
11879 its hand so much. */
11882 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11883 rtx reg2, rtx rreg)
11887 /* copy_rtx will not make unique copies of registers, so we need to
11888 ensure we don't have unwanted sharing here. */
11890 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11893 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11895 real = copy_rtx (PATTERN (insn));
11897 if (reg2 != NULL_RTX)
11898 real = replace_rtx (real, reg2, rreg);
11900 real = replace_rtx (real, reg,
11901 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11902 STACK_POINTER_REGNUM),
11905 /* We expect that 'real' is either a SET or a PARALLEL containing
11906 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11907 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11909 if (GET_CODE (real) == SET)
11913 temp = simplify_rtx (SET_SRC (set));
11915 SET_SRC (set) = temp;
11916 temp = simplify_rtx (SET_DEST (set));
11918 SET_DEST (set) = temp;
11919 if (GET_CODE (SET_DEST (set)) == MEM)
11921 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11923 XEXP (SET_DEST (set), 0) = temp;
11926 else if (GET_CODE (real) == PARALLEL)
11929 for (i = 0; i < XVECLEN (real, 0); i++)
11930 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11932 rtx set = XVECEXP (real, 0, i);
11934 temp = simplify_rtx (SET_SRC (set));
11936 SET_SRC (set) = temp;
11937 temp = simplify_rtx (SET_DEST (set));
11939 SET_DEST (set) = temp;
11940 if (GET_CODE (SET_DEST (set)) == MEM)
11942 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11944 XEXP (SET_DEST (set), 0) = temp;
11946 RTX_FRAME_RELATED_P (set) = 1;
11953 real = spe_synthesize_frame_save (real);
11955 RTX_FRAME_RELATED_P (insn) = 1;
11956 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11961 /* Given an SPE frame note, return a PARALLEL of SETs with the
11962 original note, plus a synthetic register save. */
11965 spe_synthesize_frame_save (rtx real)
11967 rtx synth, offset, reg, real2;
11969 if (GET_CODE (real) != SET
11970 || GET_MODE (SET_SRC (real)) != V2SImode)
11973 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11974 frame related note. The parallel contains a set of the register
11975 being saved, and another set to a synthetic register (n+1200).
11976 This is so we can differentiate between 64-bit and 32-bit saves.
11977 Words cannot describe this nastiness. */
11979 if (GET_CODE (SET_DEST (real)) != MEM
11980 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11981 || GET_CODE (SET_SRC (real)) != REG)
11985 (set (mem (plus (reg x) (const y)))
11988 (set (mem (plus (reg x) (const y+4)))
11992 real2 = copy_rtx (real);
11993 PUT_MODE (SET_DEST (real2), SImode);
11994 reg = SET_SRC (real2);
11995 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11996 synth = copy_rtx (real2);
11998 if (BYTES_BIG_ENDIAN)
12000 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12001 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12004 reg = SET_SRC (synth);
12006 synth = replace_rtx (synth, reg,
12007 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12009 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12010 synth = replace_rtx (synth, offset,
12011 GEN_INT (INTVAL (offset)
12012 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12014 RTX_FRAME_RELATED_P (synth) = 1;
12015 RTX_FRAME_RELATED_P (real2) = 1;
12016 if (BYTES_BIG_ENDIAN)
12017 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12019 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12024 /* Returns an insn that has a vrsave set operation with the
12025 appropriate CLOBBERs. */
12028 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12031 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12032 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12035 = gen_rtx_SET (VOIDmode,
12037 gen_rtx_UNSPEC_VOLATILE (SImode,
12038 gen_rtvec (2, reg, vrsave),
12043 /* We need to clobber the registers in the mask so the scheduler
12044 does not move sets to VRSAVE before sets of AltiVec registers.
12046 However, if the function receives nonlocal gotos, reload will set
12047 all call saved registers live. We will end up with:
12049 (set (reg 999) (mem))
12050 (parallel [ (set (reg vrsave) (unspec blah))
12051 (clobber (reg 999))])
12053 The clobber will cause the store into reg 999 to be dead, and
12054 flow will attempt to delete an epilogue insn. In this case, we
12055 need an unspec use/set of the register. */
12057 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12058 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12060 if (!epiloguep || call_used_regs [i])
12061 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12062 gen_rtx_REG (V4SImode, i));
12065 rtx reg = gen_rtx_REG (V4SImode, i);
12068 = gen_rtx_SET (VOIDmode,
12070 gen_rtx_UNSPEC (V4SImode,
12071 gen_rtvec (1, reg), 27));
12075 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12077 for (i = 0; i < nclobs; ++i)
12078 XVECEXP (insn, 0, i) = clobs[i];
12083 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12084 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12087 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12088 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12090 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12091 rtx replacea, replaceb;
12093 int_rtx = GEN_INT (offset);
12095 /* Some cases that need register indexed addressing. */
12096 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12098 && SPE_VECTOR_MODE (mode)
12099 && !SPE_CONST_OFFSET_OK (offset)))
12101 /* Whomever calls us must make sure r11 is available in the
12102 flow path of instructions in the prologue. */
12103 offset_rtx = gen_rtx_REG (Pmode, 11);
12104 emit_move_insn (offset_rtx, int_rtx);
12106 replacea = offset_rtx;
12107 replaceb = int_rtx;
12111 offset_rtx = int_rtx;
12112 replacea = NULL_RTX;
12113 replaceb = NULL_RTX;
12116 reg = gen_rtx_REG (mode, regno);
12117 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12118 mem = gen_rtx_MEM (mode, addr);
12119 set_mem_alias_set (mem, rs6000_sr_alias_set);
12121 insn = emit_move_insn (mem, reg);
12123 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12126 /* Emit an offset memory reference suitable for a frame store, while
12127 converting to a valid addressing mode. */
12130 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12132 rtx int_rtx, offset_rtx;
12134 int_rtx = GEN_INT (offset);
12136 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12138 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12139 emit_move_insn (offset_rtx, int_rtx);
12142 offset_rtx = int_rtx;
12144 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12147 /* Emit function prologue as insns. */
12150 rs6000_emit_prologue (void)
12152 rs6000_stack_t *info = rs6000_stack_info ();
12153 enum machine_mode reg_mode = Pmode;
12154 int reg_size = TARGET_32BIT ? 4 : 8;
12155 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12156 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12157 rtx frame_reg_rtx = sp_reg_rtx;
12158 rtx cr_save_rtx = NULL_RTX;
12160 int saving_FPRs_inline;
12161 int using_store_multiple;
12162 HOST_WIDE_INT sp_offset = 0;
12164 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12166 reg_mode = V2SImode;
12170 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12171 && (!TARGET_SPE_ABI
12172 || info->spe_64bit_regs_used == 0)
12173 && info->first_gp_reg_save < 31);
12174 saving_FPRs_inline = (info->first_fp_reg_save == 64
12175 || FP_SAVE_INLINE (info->first_fp_reg_save)
12176 || current_function_calls_eh_return
12177 || cfun->machine->ra_need_lr);
12179 /* For V.4, update stack before we do any saving and set back pointer. */
12181 && (DEFAULT_ABI == ABI_V4
12182 || current_function_calls_eh_return))
12184 if (info->total_size < 32767)
12185 sp_offset = info->total_size;
12187 frame_reg_rtx = frame_ptr_rtx;
12188 rs6000_emit_allocate_stack (info->total_size,
12189 (frame_reg_rtx != sp_reg_rtx
12190 && (info->cr_save_p
12192 || info->first_fp_reg_save < 64
12193 || info->first_gp_reg_save < 32
12195 if (frame_reg_rtx != sp_reg_rtx)
12196 rs6000_emit_stack_tie ();
12199 /* Save AltiVec registers if needed. */
12200 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12204 /* There should be a non inline version of this, for when we
12205 are saving lots of vector registers. */
12206 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12207 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12209 rtx areg, savereg, mem;
12212 offset = info->altivec_save_offset + sp_offset
12213 + 16 * (i - info->first_altivec_reg_save);
12215 savereg = gen_rtx_REG (V4SImode, i);
12217 areg = gen_rtx_REG (Pmode, 0);
12218 emit_move_insn (areg, GEN_INT (offset));
12220 /* AltiVec addressing mode is [reg+reg]. */
12221 mem = gen_rtx_MEM (V4SImode,
12222 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12224 set_mem_alias_set (mem, rs6000_sr_alias_set);
12226 insn = emit_move_insn (mem, savereg);
12228 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12229 areg, GEN_INT (offset));
12233 /* VRSAVE is a bit vector representing which AltiVec registers
12234 are used. The OS uses this to determine which vector
12235 registers to save on a context switch. We need to save
12236 VRSAVE on the stack frame, add whatever AltiVec registers we
12237 used in this function, and do the corresponding magic in the
12240 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12241 && info->vrsave_mask != 0)
12243 rtx reg, mem, vrsave;
12246 /* Get VRSAVE onto a GPR. */
12247 reg = gen_rtx_REG (SImode, 12);
12248 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12250 emit_insn (gen_get_vrsave_internal (reg));
12252 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12255 offset = info->vrsave_save_offset + sp_offset;
12257 = gen_rtx_MEM (SImode,
12258 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12259 set_mem_alias_set (mem, rs6000_sr_alias_set);
12260 insn = emit_move_insn (mem, reg);
12262 /* Include the registers in the mask. */
12263 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12265 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12268 /* If we use the link register, get it into r0. */
12269 if (info->lr_save_p)
12270 emit_move_insn (gen_rtx_REG (Pmode, 0),
12271 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12273 /* If we need to save CR, put it into r12. */
12274 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12276 cr_save_rtx = gen_rtx_REG (SImode, 12);
12277 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12280 /* Do any required saving of fpr's. If only one or two to save, do
12281 it ourselves. Otherwise, call function. */
12282 if (saving_FPRs_inline)
12285 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12286 if ((regs_ever_live[info->first_fp_reg_save+i]
12287 && ! call_used_regs[info->first_fp_reg_save+i]))
12288 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12289 info->first_fp_reg_save + i,
12290 info->fp_save_offset + sp_offset + 8 * i,
12293 else if (info->first_fp_reg_save != 64)
12297 const char *alloc_rname;
12299 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12301 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12302 gen_rtx_REG (Pmode,
12303 LINK_REGISTER_REGNUM));
12304 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12305 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12306 alloc_rname = ggc_strdup (rname);
12307 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12308 gen_rtx_SYMBOL_REF (Pmode,
12310 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12312 rtx addr, reg, mem;
12313 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12314 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12315 GEN_INT (info->fp_save_offset
12316 + sp_offset + 8*i));
12317 mem = gen_rtx_MEM (DFmode, addr);
12318 set_mem_alias_set (mem, rs6000_sr_alias_set);
12320 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12322 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12323 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12324 NULL_RTX, NULL_RTX);
12327 /* Save GPRs. This is done as a PARALLEL if we are using
12328 the store-multiple instructions. */
12329 if (using_store_multiple)
12333 p = rtvec_alloc (32 - info->first_gp_reg_save);
12334 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12336 rtx addr, reg, mem;
12337 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12338 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12339 GEN_INT (info->gp_save_offset
12342 mem = gen_rtx_MEM (reg_mode, addr);
12343 set_mem_alias_set (mem, rs6000_sr_alias_set);
12345 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12347 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12348 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12349 NULL_RTX, NULL_RTX);
12354 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12355 if ((regs_ever_live[info->first_gp_reg_save+i]
12356 && ! call_used_regs[info->first_gp_reg_save+i])
12357 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12358 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12359 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12361 rtx addr, reg, mem;
12362 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12364 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12366 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12369 if (!SPE_CONST_OFFSET_OK (offset))
12371 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12372 emit_move_insn (b, GEN_INT (offset));
12375 b = GEN_INT (offset);
12377 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12378 mem = gen_rtx_MEM (V2SImode, addr);
12379 set_mem_alias_set (mem, rs6000_sr_alias_set);
12380 insn = emit_move_insn (mem, reg);
12382 if (GET_CODE (b) == CONST_INT)
12383 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12384 NULL_RTX, NULL_RTX);
12386 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12387 b, GEN_INT (offset));
12391 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12392 GEN_INT (info->gp_save_offset
12395 mem = gen_rtx_MEM (reg_mode, addr);
12396 set_mem_alias_set (mem, rs6000_sr_alias_set);
12398 insn = emit_move_insn (mem, reg);
12399 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12400 NULL_RTX, NULL_RTX);
12405 /* ??? There's no need to emit actual instructions here, but it's the
12406 easiest way to get the frame unwind information emitted. */
12407 if (current_function_calls_eh_return)
12409 unsigned int i, regno;
12411 /* In AIX ABI we need to pretend we save r2 here. */
12414 rtx addr, reg, mem;
12416 reg = gen_rtx_REG (reg_mode, 2);
12417 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12418 GEN_INT (sp_offset + 5 * reg_size));
12419 mem = gen_rtx_MEM (reg_mode, addr);
12420 set_mem_alias_set (mem, rs6000_sr_alias_set);
12422 insn = emit_move_insn (mem, reg);
12423 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12424 NULL_RTX, NULL_RTX);
12425 PATTERN (insn) = gen_blockage ();
12430 regno = EH_RETURN_DATA_REGNO (i);
12431 if (regno == INVALID_REGNUM)
12434 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12435 info->ehrd_offset + sp_offset
12436 + reg_size * (int) i,
12441 /* Save lr if we used it. */
12442 if (info->lr_save_p)
12444 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12445 GEN_INT (info->lr_save_offset + sp_offset));
12446 rtx reg = gen_rtx_REG (Pmode, 0);
12447 rtx mem = gen_rtx_MEM (Pmode, addr);
12448 /* This should not be of rs6000_sr_alias_set, because of
12449 __builtin_return_address. */
12451 insn = emit_move_insn (mem, reg);
12452 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12453 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12456 /* Save CR if we use any that must be preserved. */
12457 if (info->cr_save_p)
12459 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12460 GEN_INT (info->cr_save_offset + sp_offset));
12461 rtx mem = gen_rtx_MEM (SImode, addr);
12463 set_mem_alias_set (mem, rs6000_sr_alias_set);
12465 /* If r12 was used to hold the original sp, copy cr into r0 now
12467 if (REGNO (frame_reg_rtx) == 12)
12469 cr_save_rtx = gen_rtx_REG (SImode, 0);
12470 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12472 insn = emit_move_insn (mem, cr_save_rtx);
12474 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12475 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12476 But that's OK. All we have to do is specify that _one_ condition
12477 code register is saved in this stack slot. The thrower's epilogue
12478 will then restore all the call-saved registers.
12479 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12480 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12481 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12484 /* Update stack and set back pointer unless this is V.4,
12485 for which it was done previously. */
12487 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12488 rs6000_emit_allocate_stack (info->total_size, FALSE);
12490 /* Set frame pointer, if needed. */
12491 if (frame_pointer_needed)
12493 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12495 RTX_FRAME_RELATED_P (insn) = 1;
12498 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12499 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12500 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12501 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12503 /* If emit_load_toc_table will use the link register, we need to save
12504 it. We use R12 for this purpose because emit_load_toc_table
12505 can use register 0. This allows us to use a plain 'blr' to return
12506 from the procedure more often. */
12507 int save_LR_around_toc_setup = (TARGET_ELF
12508 && DEFAULT_ABI != ABI_AIX
12510 && ! info->lr_save_p
12511 && EXIT_BLOCK_PTR->pred != NULL);
12512 if (save_LR_around_toc_setup)
12514 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12515 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12516 rs6000_emit_load_toc_table (TRUE);
12517 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12520 rs6000_emit_load_toc_table (TRUE);
12524 if (DEFAULT_ABI == ABI_DARWIN
12525 && flag_pic && current_function_uses_pic_offset_table)
12527 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12528 const char *picbase = machopic_function_base_name ();
12529 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12531 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12533 rs6000_maybe_dead (
12534 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12535 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12540 /* Write function prologue. */
12543 rs6000_output_function_prologue (FILE *file,
12544 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12546 rs6000_stack_t *info = rs6000_stack_info ();
12548 if (TARGET_DEBUG_STACK)
12549 debug_stack_info (info);
12551 /* Write .extern for any function we will call to save and restore
12553 if (info->first_fp_reg_save < 64
12554 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12555 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12556 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12557 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12558 RESTORE_FP_SUFFIX);
12560 /* Write .extern for AIX common mode routines, if needed. */
12561 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12563 fputs ("\t.extern __mulh\n", file);
12564 fputs ("\t.extern __mull\n", file);
12565 fputs ("\t.extern __divss\n", file);
12566 fputs ("\t.extern __divus\n", file);
12567 fputs ("\t.extern __quoss\n", file);
12568 fputs ("\t.extern __quous\n", file);
12569 common_mode_defined = 1;
12572 if (! HAVE_prologue)
12576 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12577 the "toplevel" insn chain. */
12578 emit_note (NOTE_INSN_DELETED);
12579 rs6000_emit_prologue ();
12580 emit_note (NOTE_INSN_DELETED);
12582 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12586 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12588 INSN_ADDRESSES_NEW (insn, addr);
12593 if (TARGET_DEBUG_STACK)
12594 debug_rtx_list (get_insns (), 100);
12595 final (get_insns (), file, FALSE, FALSE);
12599 rs6000_pic_labelno++;
12602 /* Emit function epilogue as insns.
12604 At present, dwarf2out_frame_debug_expr doesn't understand
12605 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12606 anywhere in the epilogue. Most of the insns below would in any case
12607 need special notes to explain where r11 is in relation to the stack. */
12610 rs6000_emit_epilogue (int sibcall)
12612 rs6000_stack_t *info;
12613 int restoring_FPRs_inline;
12614 int using_load_multiple;
12615 int using_mfcr_multiple;
12616 int use_backchain_to_restore_sp;
12618 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12619 rtx frame_reg_rtx = sp_reg_rtx;
12620 enum machine_mode reg_mode = Pmode;
12621 int reg_size = TARGET_32BIT ? 4 : 8;
12624 info = rs6000_stack_info ();
12626 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12628 reg_mode = V2SImode;
12632 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12633 && (!TARGET_SPE_ABI
12634 || info->spe_64bit_regs_used == 0)
12635 && info->first_gp_reg_save < 31);
12636 restoring_FPRs_inline = (sibcall
12637 || current_function_calls_eh_return
12638 || info->first_fp_reg_save == 64
12639 || FP_SAVE_INLINE (info->first_fp_reg_save));
12640 use_backchain_to_restore_sp = (frame_pointer_needed
12641 || current_function_calls_alloca
12642 || info->total_size > 32767);
12643 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12644 || rs6000_cpu == PROCESSOR_PPC603
12645 || rs6000_cpu == PROCESSOR_PPC750
12648 /* If we have a frame pointer, a call to alloca, or a large stack
12649 frame, restore the old stack pointer using the backchain. Otherwise,
12650 we know what size to update it with. */
12651 if (use_backchain_to_restore_sp)
12653 /* Under V.4, don't reset the stack pointer until after we're done
12654 loading the saved registers. */
12655 if (DEFAULT_ABI == ABI_V4)
12656 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12658 emit_move_insn (frame_reg_rtx,
12659 gen_rtx_MEM (Pmode, sp_reg_rtx));
12662 else if (info->push_p)
12664 if (DEFAULT_ABI == ABI_V4
12665 || current_function_calls_eh_return)
12666 sp_offset = info->total_size;
12669 emit_insn (TARGET_32BIT
12670 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12671 GEN_INT (info->total_size))
12672 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12673 GEN_INT (info->total_size)));
12677 /* Restore AltiVec registers if needed. */
12678 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12682 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12683 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12685 rtx addr, areg, mem;
12687 areg = gen_rtx_REG (Pmode, 0);
12689 (areg, GEN_INT (info->altivec_save_offset
12691 + 16 * (i - info->first_altivec_reg_save)));
12693 /* AltiVec addressing mode is [reg+reg]. */
12694 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12695 mem = gen_rtx_MEM (V4SImode, addr);
12696 set_mem_alias_set (mem, rs6000_sr_alias_set);
12698 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12702 /* Restore VRSAVE if needed. */
12703 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12704 && info->vrsave_mask != 0)
12706 rtx addr, mem, reg;
12708 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12709 GEN_INT (info->vrsave_save_offset + sp_offset));
12710 mem = gen_rtx_MEM (SImode, addr);
12711 set_mem_alias_set (mem, rs6000_sr_alias_set);
12712 reg = gen_rtx_REG (SImode, 12);
12713 emit_move_insn (reg, mem);
12715 emit_insn (generate_set_vrsave (reg, info, 1));
12718 /* Get the old lr if we saved it. */
12719 if (info->lr_save_p)
12721 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12722 info->lr_save_offset + sp_offset);
12724 set_mem_alias_set (mem, rs6000_sr_alias_set);
12726 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12729 /* Get the old cr if we saved it. */
12730 if (info->cr_save_p)
12732 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12733 GEN_INT (info->cr_save_offset + sp_offset));
12734 rtx mem = gen_rtx_MEM (SImode, addr);
12736 set_mem_alias_set (mem, rs6000_sr_alias_set);
12738 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12741 /* Set LR here to try to overlap restores below. */
12742 if (info->lr_save_p)
12743 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12744 gen_rtx_REG (Pmode, 0));
12746 /* Load exception handler data registers, if needed. */
12747 if (current_function_calls_eh_return)
12749 unsigned int i, regno;
12753 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12754 GEN_INT (sp_offset + 5 * reg_size));
12755 rtx mem = gen_rtx_MEM (reg_mode, addr);
12757 set_mem_alias_set (mem, rs6000_sr_alias_set);
12759 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12766 regno = EH_RETURN_DATA_REGNO (i);
12767 if (regno == INVALID_REGNUM)
12770 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12771 info->ehrd_offset + sp_offset
12772 + reg_size * (int) i);
12773 set_mem_alias_set (mem, rs6000_sr_alias_set);
12775 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12779 /* Restore GPRs. This is done as a PARALLEL if we are using
12780 the load-multiple instructions. */
12781 if (using_load_multiple)
12784 p = rtvec_alloc (32 - info->first_gp_reg_save);
12785 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12787 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12788 GEN_INT (info->gp_save_offset
12791 rtx mem = gen_rtx_MEM (reg_mode, addr);
12793 set_mem_alias_set (mem, rs6000_sr_alias_set);
12796 gen_rtx_SET (VOIDmode,
12797 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12800 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12803 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12804 if ((regs_ever_live[info->first_gp_reg_save+i]
12805 && ! call_used_regs[info->first_gp_reg_save+i])
12806 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12807 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12808 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12810 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12811 GEN_INT (info->gp_save_offset
12814 rtx mem = gen_rtx_MEM (reg_mode, addr);
12816 /* Restore 64-bit quantities for SPE. */
12817 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12819 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12822 if (!SPE_CONST_OFFSET_OK (offset))
12824 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12825 emit_move_insn (b, GEN_INT (offset));
12828 b = GEN_INT (offset);
12830 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12831 mem = gen_rtx_MEM (V2SImode, addr);
12834 set_mem_alias_set (mem, rs6000_sr_alias_set);
12836 emit_move_insn (gen_rtx_REG (reg_mode,
12837 info->first_gp_reg_save + i), mem);
12840 /* Restore fpr's if we need to do it without calling a function. */
12841 if (restoring_FPRs_inline)
12842 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12843 if ((regs_ever_live[info->first_fp_reg_save+i]
12844 && ! call_used_regs[info->first_fp_reg_save+i]))
12847 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12848 GEN_INT (info->fp_save_offset
12851 mem = gen_rtx_MEM (DFmode, addr);
12852 set_mem_alias_set (mem, rs6000_sr_alias_set);
12854 emit_move_insn (gen_rtx_REG (DFmode,
12855 info->first_fp_reg_save + i),
12859 /* If we saved cr, restore it here. Just those that were used. */
12860 if (info->cr_save_p)
12862 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12865 if (using_mfcr_multiple)
12867 for (i = 0; i < 8; i++)
12868 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12874 if (using_mfcr_multiple && count > 1)
12879 p = rtvec_alloc (count);
12882 for (i = 0; i < 8; i++)
12883 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12885 rtvec r = rtvec_alloc (2);
12886 RTVEC_ELT (r, 0) = r12_rtx;
12887 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12888 RTVEC_ELT (p, ndx) =
12889 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12890 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12893 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12898 for (i = 0; i < 8; i++)
12899 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12901 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12907 /* If this is V.4, unwind the stack pointer after all of the loads
12908 have been done. We need to emit a block here so that sched
12909 doesn't decide to move the sp change before the register restores
12910 (which may not have any obvious dependency on the stack). This
12911 doesn't hurt performance, because there is no scheduling that can
12912 be done after this point. */
12913 if (DEFAULT_ABI == ABI_V4
12914 || current_function_calls_eh_return)
12916 if (frame_reg_rtx != sp_reg_rtx)
12917 rs6000_emit_stack_tie ();
12919 if (use_backchain_to_restore_sp)
12921 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12923 else if (sp_offset != 0)
12925 emit_insn (TARGET_32BIT
12926 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12927 GEN_INT (sp_offset))
12928 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12929 GEN_INT (sp_offset)));
12933 if (current_function_calls_eh_return)
12935 rtx sa = EH_RETURN_STACKADJ_RTX;
12936 emit_insn (TARGET_32BIT
12937 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12938 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12944 if (! restoring_FPRs_inline)
12945 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12947 p = rtvec_alloc (2);
12949 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12950 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12951 gen_rtx_REG (Pmode,
12952 LINK_REGISTER_REGNUM));
12954 /* If we have to restore more than two FP registers, branch to the
12955 restore function. It will return to our caller. */
12956 if (! restoring_FPRs_inline)
12960 const char *alloc_rname;
12962 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12963 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12964 alloc_rname = ggc_strdup (rname);
12965 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12966 gen_rtx_SYMBOL_REF (Pmode,
12969 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12972 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12973 GEN_INT (info->fp_save_offset + 8*i));
12974 mem = gen_rtx_MEM (DFmode, addr);
12975 set_mem_alias_set (mem, rs6000_sr_alias_set);
12977 RTVEC_ELT (p, i+3) =
12978 gen_rtx_SET (VOIDmode,
12979 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12984 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12988 /* Write function epilogue. */
12991 rs6000_output_function_epilogue (FILE *file,
12992 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12994 rs6000_stack_t *info = rs6000_stack_info ();
12996 if (! HAVE_epilogue)
12998 rtx insn = get_last_insn ();
12999 /* If the last insn was a BARRIER, we don't have to write anything except
13000 the trace table. */
13001 if (GET_CODE (insn) == NOTE)
13002 insn = prev_nonnote_insn (insn);
13003 if (insn == 0 || GET_CODE (insn) != BARRIER)
13005 /* This is slightly ugly, but at least we don't have two
13006 copies of the epilogue-emitting code. */
13009 /* A NOTE_INSN_DELETED is supposed to be at the start
13010 and end of the "toplevel" insn chain. */
13011 emit_note (NOTE_INSN_DELETED);
13012 rs6000_emit_epilogue (FALSE);
13013 emit_note (NOTE_INSN_DELETED);
13015 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13019 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13021 INSN_ADDRESSES_NEW (insn, addr);
13026 if (TARGET_DEBUG_STACK)
13027 debug_rtx_list (get_insns (), 100);
13028 final (get_insns (), file, FALSE, FALSE);
13034 macho_branch_islands ();
13035 /* Mach-O doesn't support labels at the end of objects, so if
13036 it looks like we might want one, insert a NOP. */
13038 rtx insn = get_last_insn ();
13041 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
13042 insn = PREV_INSN (insn);
13046 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
13047 fputs ("\tnop\n", file);
13051 /* Output a traceback table here. See /usr/include/sys/debug.h for info
13054 We don't output a traceback table if -finhibit-size-directive was
13055 used. The documentation for -finhibit-size-directive reads
13056 ``don't output a @code{.size} assembler directive, or anything
13057 else that would cause trouble if the function is split in the
13058 middle, and the two halves are placed at locations far apart in
13059 memory.'' The traceback table has this property, since it
13060 includes the offset from the start of the function to the
13061 traceback table itself.
13063 System V.4 Powerpc's (and the embedded ABI derived from it) use a
13064 different traceback table. */
13065 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
13066 && rs6000_traceback != traceback_none)
13068 const char *fname = NULL;
13069 const char *language_string = lang_hooks.name;
13070 int fixed_parms = 0, float_parms = 0, parm_info = 0;
13072 int optional_tbtab;
13074 if (rs6000_traceback == traceback_full)
13075 optional_tbtab = 1;
13076 else if (rs6000_traceback == traceback_part)
13077 optional_tbtab = 0;
13079 optional_tbtab = !optimize_size && !TARGET_ELF;
13081 if (optional_tbtab)
13083 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
13084 while (*fname == '.') /* V.4 encodes . in the name */
13087 /* Need label immediately before tbtab, so we can compute
13088 its offset from the function start. */
13089 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13090 ASM_OUTPUT_LABEL (file, fname);
13093 /* The .tbtab pseudo-op can only be used for the first eight
13094 expressions, since it can't handle the possibly variable
13095 length fields that follow. However, if you omit the optional
13096 fields, the assembler outputs zeros for all optional fields
13097 anyways, giving each variable length field is minimum length
13098 (as defined in sys/debug.h). Thus we can not use the .tbtab
13099 pseudo-op at all. */
13101 /* An all-zero word flags the start of the tbtab, for debuggers
13102 that have to find it by searching forward from the entry
13103 point or from the current pc. */
13104 fputs ("\t.long 0\n", file);
13106 /* Tbtab format type. Use format type 0. */
13107 fputs ("\t.byte 0,", file);
13109 /* Language type. Unfortunately, there does not seem to be any
13110 official way to discover the language being compiled, so we
13111 use language_string.
13112 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
13113 Java is 13. Objective-C is 14. */
13114 if (! strcmp (language_string, "GNU C"))
13116 else if (! strcmp (language_string, "GNU F77"))
13118 else if (! strcmp (language_string, "GNU Pascal"))
13120 else if (! strcmp (language_string, "GNU Ada"))
13122 else if (! strcmp (language_string, "GNU C++"))
13124 else if (! strcmp (language_string, "GNU Java"))
13126 else if (! strcmp (language_string, "GNU Objective-C"))
13130 fprintf (file, "%d,", i);
13132 /* 8 single bit fields: global linkage (not set for C extern linkage,
13133 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13134 from start of procedure stored in tbtab, internal function, function
13135 has controlled storage, function has no toc, function uses fp,
13136 function logs/aborts fp operations. */
13137 /* Assume that fp operations are used if any fp reg must be saved. */
13138 fprintf (file, "%d,",
13139 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13141 /* 6 bitfields: function is interrupt handler, name present in
13142 proc table, function calls alloca, on condition directives
13143 (controls stack walks, 3 bits), saves condition reg, saves
13145 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13146 set up as a frame pointer, even when there is no alloca call. */
13147 fprintf (file, "%d,",
13148 ((optional_tbtab << 6)
13149 | ((optional_tbtab & frame_pointer_needed) << 5)
13150 | (info->cr_save_p << 1)
13151 | (info->lr_save_p)));
13153 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13155 fprintf (file, "%d,",
13156 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13158 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13159 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13161 if (optional_tbtab)
13163 /* Compute the parameter info from the function decl argument
13166 int next_parm_info_bit = 31;
13168 for (decl = DECL_ARGUMENTS (current_function_decl);
13169 decl; decl = TREE_CHAIN (decl))
13171 rtx parameter = DECL_INCOMING_RTL (decl);
13172 enum machine_mode mode = GET_MODE (parameter);
13174 if (GET_CODE (parameter) == REG)
13176 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13182 if (mode == SFmode)
13184 else if (mode == DFmode || mode == TFmode)
13189 /* If only one bit will fit, don't or in this entry. */
13190 if (next_parm_info_bit > 0)
13191 parm_info |= (bits << (next_parm_info_bit - 1));
13192 next_parm_info_bit -= 2;
13196 fixed_parms += ((GET_MODE_SIZE (mode)
13197 + (UNITS_PER_WORD - 1))
13199 next_parm_info_bit -= 1;
13205 /* Number of fixed point parameters. */
13206 /* This is actually the number of words of fixed point parameters; thus
13207 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13208 fprintf (file, "%d,", fixed_parms);
13210 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13212 /* This is actually the number of fp registers that hold parameters;
13213 and thus the maximum value is 13. */
13214 /* Set parameters on stack bit if parameters are not in their original
13215 registers, regardless of whether they are on the stack? Xlc
13216 seems to set the bit when not optimizing. */
13217 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13219 if (! optional_tbtab)
13222 /* Optional fields follow. Some are variable length. */
13224 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13225 11 double float. */
13226 /* There is an entry for each parameter in a register, in the order that
13227 they occur in the parameter list. Any intervening arguments on the
13228 stack are ignored. If the list overflows a long (max possible length
13229 34 bits) then completely leave off all elements that don't fit. */
13230 /* Only emit this long if there was at least one parameter. */
13231 if (fixed_parms || float_parms)
13232 fprintf (file, "\t.long %d\n", parm_info);
13234 /* Offset from start of code to tb table. */
13235 fputs ("\t.long ", file);
13236 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13238 RS6000_OUTPUT_BASENAME (file, fname);
13240 assemble_name (file, fname);
13242 fputs ("-.", file);
13244 RS6000_OUTPUT_BASENAME (file, fname);
13246 assemble_name (file, fname);
13250 /* Interrupt handler mask. */
13251 /* Omit this long, since we never set the interrupt handler bit
13254 /* Number of CTL (controlled storage) anchors. */
13255 /* Omit this long, since the has_ctl bit is never set above. */
13257 /* Displacement into stack of each CTL anchor. */
13258 /* Omit this list of longs, because there are no CTL anchors. */
13260 /* Length of function name. */
13263 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13265 /* Function name. */
13266 assemble_string (fname, strlen (fname));
13268 /* Register for alloca automatic storage; this is always reg 31.
13269 Only emit this if the alloca bit was set above. */
13270 if (frame_pointer_needed)
13271 fputs ("\t.byte 31\n", file);
13273 fputs ("\t.align 2\n", file);
13277 /* A C compound statement that outputs the assembler code for a thunk
13278 function, used to implement C++ virtual function calls with
13279 multiple inheritance. The thunk acts as a wrapper around a virtual
13280 function, adjusting the implicit object parameter before handing
13281 control off to the real function.
13283 First, emit code to add the integer DELTA to the location that
13284 contains the incoming first argument. Assume that this argument
13285 contains a pointer, and is the one used to pass the `this' pointer
13286 in C++. This is the incoming argument *before* the function
13287 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13288 values of all other incoming arguments.
13290 After the addition, emit code to jump to FUNCTION, which is a
13291 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13292 not touch the return address. Hence returning from FUNCTION will
13293 return to whoever called the current `thunk'.
13295 The effect must be as if FUNCTION had been called directly with the
13296 adjusted first argument. This macro is responsible for emitting
13297 all of the code for a thunk function; output_function_prologue()
13298 and output_function_epilogue() are not invoked.
13300 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13301 been extracted from it.) It might possibly be useful on some
13302 targets, but probably not.
13304 If you do not define this macro, the target-independent code in the
13305 C++ frontend will generate a less efficient heavyweight thunk that
13306 calls FUNCTION instead of jumping to it. The generic approach does
13307 not support varargs. */
13310 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13311 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13314 rtx this, insn, funexp;
13316 reload_completed = 1;
13317 epilogue_completed = 1;
13318 no_new_pseudos = 1;
13320 /* Mark the end of the (empty) prologue. */
13321 emit_note (NOTE_INSN_PROLOGUE_END);
13323 /* Find the "this" pointer. If the function returns a structure,
13324 the structure return pointer is in r3. */
13325 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13326 this = gen_rtx_REG (Pmode, 4);
13328 this = gen_rtx_REG (Pmode, 3);
13330 /* Apply the constant offset, if required. */
13333 rtx delta_rtx = GEN_INT (delta);
13334 emit_insn (TARGET_32BIT
13335 ? gen_addsi3 (this, this, delta_rtx)
13336 : gen_adddi3 (this, this, delta_rtx));
13339 /* Apply the offset from the vtable, if required. */
13342 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13343 rtx tmp = gen_rtx_REG (Pmode, 12);
13345 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13346 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13348 emit_insn (TARGET_32BIT
13349 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13350 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13351 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13355 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13357 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13359 emit_insn (TARGET_32BIT
13360 ? gen_addsi3 (this, this, tmp)
13361 : gen_adddi3 (this, this, tmp));
13364 /* Generate a tail call to the target function. */
13365 if (!TREE_USED (function))
13367 assemble_external (function);
13368 TREE_USED (function) = 1;
13370 funexp = XEXP (DECL_RTL (function), 0);
13371 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13374 if (MACHOPIC_INDIRECT)
13375 funexp = machopic_indirect_call_target (funexp);
13378 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13379 generate sibcall RTL explicitly to avoid constraint abort. */
13380 insn = emit_call_insn (
13381 gen_rtx_PARALLEL (VOIDmode,
13383 gen_rtx_CALL (VOIDmode,
13384 funexp, const0_rtx),
13385 gen_rtx_USE (VOIDmode, const0_rtx),
13386 gen_rtx_USE (VOIDmode,
13387 gen_rtx_REG (SImode,
13388 LINK_REGISTER_REGNUM)),
13389 gen_rtx_RETURN (VOIDmode))));
13390 SIBLING_CALL_P (insn) = 1;
13393 /* Run just enough of rest_of_compilation to get the insns emitted.
13394 There's not really enough bulk here to make other passes such as
13395 instruction scheduling worth while. Note that use_thunk calls
13396 assemble_start_function and assemble_end_function. */
13397 insn = get_insns ();
13398 insn_locators_initialize ();
13399 shorten_branches (insn);
13400 final_start_function (insn, file, 1);
13401 final (insn, file, 1, 0);
13402 final_end_function ();
13404 reload_completed = 0;
13405 epilogue_completed = 0;
13406 no_new_pseudos = 0;
13409 /* A quick summary of the various types of 'constant-pool tables'
13412 Target Flags Name One table per
13413 AIX (none) AIX TOC object file
13414 AIX -mfull-toc AIX TOC object file
13415 AIX -mminimal-toc AIX minimal TOC translation unit
13416 SVR4/EABI (none) SVR4 SDATA object file
13417 SVR4/EABI -fpic SVR4 pic object file
13418 SVR4/EABI -fPIC SVR4 PIC translation unit
13419 SVR4/EABI -mrelocatable EABI TOC function
13420 SVR4/EABI -maix AIX TOC object file
13421 SVR4/EABI -maix -mminimal-toc
13422 AIX minimal TOC translation unit
13424 Name Reg. Set by entries contains:
13425 made by addrs? fp? sum?
13427 AIX TOC 2 crt0 as Y option option
13428 AIX minimal TOC 30 prolog gcc Y Y option
13429 SVR4 SDATA 13 crt0 gcc N Y N
13430 SVR4 pic 30 prolog ld Y not yet N
13431 SVR4 PIC 30 prolog gcc Y option option
13432 EABI TOC 30 prolog gcc Y option option
13436 /* Hash functions for the hash table. */
13439 rs6000_hash_constant (rtx k)
13441 enum rtx_code code = GET_CODE (k);
13442 enum machine_mode mode = GET_MODE (k);
13443 unsigned result = (code << 3) ^ mode;
13444 const char *format;
13447 format = GET_RTX_FORMAT (code);
13448 flen = strlen (format);
13454 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13457 if (mode != VOIDmode)
13458 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13470 for (; fidx < flen; fidx++)
13471 switch (format[fidx])
13476 const char *str = XSTR (k, fidx);
13477 len = strlen (str);
13478 result = result * 613 + len;
13479 for (i = 0; i < len; i++)
13480 result = result * 613 + (unsigned) str[i];
13485 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13489 result = result * 613 + (unsigned) XINT (k, fidx);
13492 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13493 result = result * 613 + (unsigned) XWINT (k, fidx);
13497 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13498 result = result * 613 + (unsigned) (XWINT (k, fidx)
13512 toc_hash_function (const void *hash_entry)
13514 const struct toc_hash_struct *thc =
13515 (const struct toc_hash_struct *) hash_entry;
13516 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13519 /* Compare H1 and H2 for equivalence. */
13522 toc_hash_eq (const void *h1, const void *h2)
13524 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13525 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13527 if (((const struct toc_hash_struct *) h1)->key_mode
13528 != ((const struct toc_hash_struct *) h2)->key_mode)
13531 return rtx_equal_p (r1, r2);
13534 /* These are the names given by the C++ front-end to vtables, and
13535 vtable-like objects. Ideally, this logic should not be here;
13536 instead, there should be some programmatic way of inquiring as
13537 to whether or not an object is a vtable. */
13539 #define VTABLE_NAME_P(NAME) \
13540 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13541 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13542 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13543 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
13544 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13547 rs6000_output_symbol_ref (FILE *file, rtx x)
13549 /* Currently C++ toc references to vtables can be emitted before it
13550 is decided whether the vtable is public or private. If this is
13551 the case, then the linker will eventually complain that there is
13552 a reference to an unknown section. Thus, for vtables only,
13553 we emit the TOC reference to reference the symbol and not the
13555 const char *name = XSTR (x, 0);
13557 if (VTABLE_NAME_P (name))
13559 RS6000_OUTPUT_BASENAME (file, name);
13562 assemble_name (file, name);
13565 /* Output a TOC entry. We derive the entry name from what is being
13569 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13572 const char *name = buf;
13573 const char *real_name;
13580 /* When the linker won't eliminate them, don't output duplicate
13581 TOC entries (this happens on AIX if there is any kind of TOC,
13582 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13584 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13586 struct toc_hash_struct *h;
13589 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13590 time because GGC is not initialized at that point. */
13591 if (toc_hash_table == NULL)
13592 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13593 toc_hash_eq, NULL);
13595 h = ggc_alloc (sizeof (*h));
13597 h->key_mode = mode;
13598 h->labelno = labelno;
13600 found = htab_find_slot (toc_hash_table, h, 1);
13601 if (*found == NULL)
13603 else /* This is indeed a duplicate.
13604 Set this label equal to that label. */
13606 fputs ("\t.set ", file);
13607 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13608 fprintf (file, "%d,", labelno);
13609 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13610 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13616 /* If we're going to put a double constant in the TOC, make sure it's
13617 aligned properly when strict alignment is on. */
13618 if (GET_CODE (x) == CONST_DOUBLE
13619 && STRICT_ALIGNMENT
13620 && GET_MODE_BITSIZE (mode) >= 64
13621 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13622 ASM_OUTPUT_ALIGN (file, 3);
13625 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13627 /* Handle FP constants specially. Note that if we have a minimal
13628 TOC, things we put here aren't actually in the TOC, so we can allow
13630 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13632 REAL_VALUE_TYPE rv;
13635 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13636 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13640 if (TARGET_MINIMAL_TOC)
13641 fputs (DOUBLE_INT_ASM_OP, file);
13643 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13644 k[0] & 0xffffffff, k[1] & 0xffffffff,
13645 k[2] & 0xffffffff, k[3] & 0xffffffff);
13646 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13647 k[0] & 0xffffffff, k[1] & 0xffffffff,
13648 k[2] & 0xffffffff, k[3] & 0xffffffff);
13653 if (TARGET_MINIMAL_TOC)
13654 fputs ("\t.long ", file);
13656 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13657 k[0] & 0xffffffff, k[1] & 0xffffffff,
13658 k[2] & 0xffffffff, k[3] & 0xffffffff);
13659 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13660 k[0] & 0xffffffff, k[1] & 0xffffffff,
13661 k[2] & 0xffffffff, k[3] & 0xffffffff);
13665 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13667 REAL_VALUE_TYPE rv;
13670 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13671 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13675 if (TARGET_MINIMAL_TOC)
13676 fputs (DOUBLE_INT_ASM_OP, file);
13678 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13679 k[0] & 0xffffffff, k[1] & 0xffffffff);
13680 fprintf (file, "0x%lx%08lx\n",
13681 k[0] & 0xffffffff, k[1] & 0xffffffff);
13686 if (TARGET_MINIMAL_TOC)
13687 fputs ("\t.long ", file);
13689 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13690 k[0] & 0xffffffff, k[1] & 0xffffffff);
13691 fprintf (file, "0x%lx,0x%lx\n",
13692 k[0] & 0xffffffff, k[1] & 0xffffffff);
13696 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13698 REAL_VALUE_TYPE rv;
13701 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13702 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13706 if (TARGET_MINIMAL_TOC)
13707 fputs (DOUBLE_INT_ASM_OP, file);
13709 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13710 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13715 if (TARGET_MINIMAL_TOC)
13716 fputs ("\t.long ", file);
13718 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13719 fprintf (file, "0x%lx\n", l & 0xffffffff);
13723 else if (GET_MODE (x) == VOIDmode
13724 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13726 unsigned HOST_WIDE_INT low;
13727 HOST_WIDE_INT high;
13729 if (GET_CODE (x) == CONST_DOUBLE)
13731 low = CONST_DOUBLE_LOW (x);
13732 high = CONST_DOUBLE_HIGH (x);
13735 #if HOST_BITS_PER_WIDE_INT == 32
13738 high = (low & 0x80000000) ? ~0 : 0;
13742 low = INTVAL (x) & 0xffffffff;
13743 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13747 /* TOC entries are always Pmode-sized, but since this
13748 is a bigendian machine then if we're putting smaller
13749 integer constants in the TOC we have to pad them.
13750 (This is still a win over putting the constants in
13751 a separate constant pool, because then we'd have
13752 to have both a TOC entry _and_ the actual constant.)
13754 For a 32-bit target, CONST_INT values are loaded and shifted
13755 entirely within `low' and can be stored in one TOC entry. */
13757 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13758 abort ();/* It would be easy to make this work, but it doesn't now. */
13760 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13762 #if HOST_BITS_PER_WIDE_INT == 32
13763 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13764 POINTER_SIZE, &low, &high, 0);
13767 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13768 high = (HOST_WIDE_INT) low >> 32;
13775 if (TARGET_MINIMAL_TOC)
13776 fputs (DOUBLE_INT_ASM_OP, file);
13778 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13779 (long) high & 0xffffffff, (long) low & 0xffffffff);
13780 fprintf (file, "0x%lx%08lx\n",
13781 (long) high & 0xffffffff, (long) low & 0xffffffff);
13786 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13788 if (TARGET_MINIMAL_TOC)
13789 fputs ("\t.long ", file);
13791 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13792 (long) high & 0xffffffff, (long) low & 0xffffffff);
13793 fprintf (file, "0x%lx,0x%lx\n",
13794 (long) high & 0xffffffff, (long) low & 0xffffffff);
13798 if (TARGET_MINIMAL_TOC)
13799 fputs ("\t.long ", file);
13801 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13802 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13808 if (GET_CODE (x) == CONST)
13810 if (GET_CODE (XEXP (x, 0)) != PLUS)
13813 base = XEXP (XEXP (x, 0), 0);
13814 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13817 if (GET_CODE (base) == SYMBOL_REF)
13818 name = XSTR (base, 0);
13819 else if (GET_CODE (base) == LABEL_REF)
13820 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13821 else if (GET_CODE (base) == CODE_LABEL)
13822 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13826 real_name = (*targetm.strip_name_encoding) (name);
13827 if (TARGET_MINIMAL_TOC)
13828 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13831 fprintf (file, "\t.tc %s", real_name);
13834 fprintf (file, ".N%d", - offset);
13836 fprintf (file, ".P%d", offset);
13838 fputs ("[TC],", file);
13841 /* Currently C++ toc references to vtables can be emitted before it
13842 is decided whether the vtable is public or private. If this is
13843 the case, then the linker will eventually complain that there is
13844 a TOC reference to an unknown section. Thus, for vtables only,
13845 we emit the TOC reference to reference the symbol and not the
13847 if (VTABLE_NAME_P (name))
13849 RS6000_OUTPUT_BASENAME (file, name);
13851 fprintf (file, "%d", offset);
13852 else if (offset > 0)
13853 fprintf (file, "+%d", offset);
13856 output_addr_const (file, x);
13860 /* Output an assembler pseudo-op to write an ASCII string of N characters
13861 starting at P to FILE.
13863 On the RS/6000, we have to do this using the .byte operation and
13864 write out special characters outside the quoted string.
13865 Also, the assembler is broken; very long strings are truncated,
13866 so we must artificially break them up early. */
13869 output_ascii (FILE *file, const char *p, int n)
13872 int i, count_string;
13873 const char *for_string = "\t.byte \"";
13874 const char *for_decimal = "\t.byte ";
13875 const char *to_close = NULL;
13878 for (i = 0; i < n; i++)
13881 if (c >= ' ' && c < 0177)
13884 fputs (for_string, file);
13887 /* Write two quotes to get one. */
13895 for_decimal = "\"\n\t.byte ";
13899 if (count_string >= 512)
13901 fputs (to_close, file);
13903 for_string = "\t.byte \"";
13904 for_decimal = "\t.byte ";
13912 fputs (for_decimal, file);
13913 fprintf (file, "%d", c);
13915 for_string = "\n\t.byte \"";
13916 for_decimal = ", ";
13922 /* Now close the string if we have written one. Then end the line. */
13924 fputs (to_close, file);
13927 /* Generate a unique section name for FILENAME for a section type
13928 represented by SECTION_DESC. Output goes into BUF.
13930 SECTION_DESC can be any string, as long as it is different for each
13931 possible section type.
13933 We name the section in the same manner as xlc. The name begins with an
13934 underscore followed by the filename (after stripping any leading directory
13935 names) with the last period replaced by the string SECTION_DESC. If
13936 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13940 rs6000_gen_section_name (char **buf, const char *filename,
13941 const char *section_desc)
13943 const char *q, *after_last_slash, *last_period = 0;
13947 after_last_slash = filename;
13948 for (q = filename; *q; q++)
13951 after_last_slash = q + 1;
13952 else if (*q == '.')
13956 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13957 *buf = (char *) xmalloc (len);
13962 for (q = after_last_slash; *q; q++)
13964 if (q == last_period)
13966 strcpy (p, section_desc);
13967 p += strlen (section_desc);
13971 else if (ISALNUM (*q))
13975 if (last_period == 0)
13976 strcpy (p, section_desc);
13981 /* Emit profile function. */
13984 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13986 if (TARGET_PROFILE_KERNEL)
13989 if (DEFAULT_ABI == ABI_AIX)
13991 #ifndef NO_PROFILE_COUNTERS
13992 # define NO_PROFILE_COUNTERS 0
13994 if (NO_PROFILE_COUNTERS)
13995 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13999 const char *label_name;
14002 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14003 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
14004 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
14006 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
14010 else if (DEFAULT_ABI == ABI_DARWIN)
14012 const char *mcount_name = RS6000_MCOUNT;
14013 int caller_addr_regno = LINK_REGISTER_REGNUM;
14015 /* Be conservative and always set this, at least for now. */
14016 current_function_uses_pic_offset_table = 1;
14019 /* For PIC code, set up a stub and collect the caller's address
14020 from r0, which is where the prologue puts it. */
14021 if (MACHOPIC_INDIRECT)
14023 mcount_name = machopic_stub_name (mcount_name);
14024 if (current_function_uses_pic_offset_table)
14025 caller_addr_regno = 0;
14028 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
14030 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
14034 /* Write function profiler code. */
14037 output_function_profiler (FILE *file, int labelno)
14042 switch (DEFAULT_ABI)
14051 warning ("no profiling of 64-bit code for this ABI");
14054 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14055 fprintf (file, "\tmflr %s\n", reg_names[0]);
14058 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
14059 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14060 reg_names[0], save_lr, reg_names[1]);
14061 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
14062 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
14063 assemble_name (file, buf);
14064 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
14066 else if (flag_pic > 1)
14068 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14069 reg_names[0], save_lr, reg_names[1]);
14070 /* Now, we need to get the address of the label. */
14071 fputs ("\tbl 1f\n\t.long ", file);
14072 assemble_name (file, buf);
14073 fputs ("-.\n1:", file);
14074 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
14075 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
14076 reg_names[0], reg_names[11]);
14077 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
14078 reg_names[0], reg_names[0], reg_names[11]);
14082 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
14083 assemble_name (file, buf);
14084 fputs ("@ha\n", file);
14085 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14086 reg_names[0], save_lr, reg_names[1]);
14087 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
14088 assemble_name (file, buf);
14089 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
14092 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
14093 fprintf (file, "\tbl %s%s\n",
14094 RS6000_MCOUNT, flag_pic ? "@plt" : "");
14099 if (!TARGET_PROFILE_KERNEL)
14101 /* Don't do anything, done in output_profile_hook (). */
14108 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
14109 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
14111 if (current_function_needs_context)
14113 asm_fprintf (file, "\tstd %s,24(%s)\n",
14114 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14115 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14116 asm_fprintf (file, "\tld %s,24(%s)\n",
14117 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14120 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14128 rs6000_use_dfa_pipeline_interface (void)
14133 /* Power4 load update and store update instructions are cracked into a
14134 load or store and an integer insn which are executed in the same cycle.
14135 Branches have their own dispatch slot which does not count against the
14136 GCC issue rate, but it changes the program flow so there are no other
14137 instructions to issue in this cycle. */
14140 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14141 int verbose ATTRIBUTE_UNUSED,
14142 rtx insn, int more)
14144 if (GET_CODE (PATTERN (insn)) == USE
14145 || GET_CODE (PATTERN (insn)) == CLOBBER)
14148 if (rs6000_sched_groups)
14150 if (is_microcoded_insn (insn))
14152 else if (is_cracked_insn (insn))
14153 return more > 2 ? more - 2 : 0;
14159 /* Adjust the cost of a scheduling dependency. Return the new cost of
14160 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14163 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14166 if (! recog_memoized (insn))
14169 if (REG_NOTE_KIND (link) != 0)
14172 if (REG_NOTE_KIND (link) == 0)
14174 /* Data dependency; DEP_INSN writes a register that INSN reads
14175 some cycles later. */
14176 switch (get_attr_type (insn))
14179 /* Tell the first scheduling pass about the latency between
14180 a mtctr and bctr (and mtlr and br/blr). The first
14181 scheduling pass will not know about this latency since
14182 the mtctr instruction, which has the latency associated
14183 to it, will be generated by reload. */
14184 return TARGET_POWER ? 5 : 4;
14186 /* Leave some extra cycles between a compare and its
14187 dependent branch, to inhibit expensive mispredicts. */
14188 if ((rs6000_cpu_attr == CPU_PPC603
14189 || rs6000_cpu_attr == CPU_PPC604
14190 || rs6000_cpu_attr == CPU_PPC604E
14191 || rs6000_cpu_attr == CPU_PPC620
14192 || rs6000_cpu_attr == CPU_PPC630
14193 || rs6000_cpu_attr == CPU_PPC750
14194 || rs6000_cpu_attr == CPU_PPC7400
14195 || rs6000_cpu_attr == CPU_PPC7450
14196 || rs6000_cpu_attr == CPU_POWER4
14197 || rs6000_cpu_attr == CPU_POWER5)
14198 && recog_memoized (dep_insn)
14199 && (INSN_CODE (dep_insn) >= 0)
14200 && (get_attr_type (dep_insn) == TYPE_CMP
14201 || get_attr_type (dep_insn) == TYPE_COMPARE
14202 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14203 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14204 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14205 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14206 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14207 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14212 /* Fall out to return default cost. */
14218 /* The function returns a true if INSN is microcoded.
14219 Return false otherwise. */
14222 is_microcoded_insn (rtx insn)
14224 if (!insn || !INSN_P (insn)
14225 || GET_CODE (PATTERN (insn)) == USE
14226 || GET_CODE (PATTERN (insn)) == CLOBBER)
14229 if (rs6000_sched_groups)
14231 enum attr_type type = get_attr_type (insn);
14232 if (type == TYPE_LOAD_EXT_U
14233 || type == TYPE_LOAD_EXT_UX
14234 || type == TYPE_LOAD_UX
14235 || type == TYPE_STORE_UX
14236 || type == TYPE_MFCR)
14243 /* The function returns a nonzero value if INSN can be scheduled only
14244 as the first insn in a dispatch group ("dispatch-slot restricted").
14245 In this case, the returned value indicates how many dispatch slots
14246 the insn occupies (at the beginning of the group).
14247 Return 0 otherwise. */
14250 is_dispatch_slot_restricted (rtx insn)
14252 enum attr_type type;
14254 if (!rs6000_sched_groups)
14258 || insn == NULL_RTX
14259 || GET_CODE (insn) == NOTE
14260 || GET_CODE (PATTERN (insn)) == USE
14261 || GET_CODE (PATTERN (insn)) == CLOBBER)
14264 type = get_attr_type (insn);
14271 case TYPE_DELAYED_CR:
14272 case TYPE_CR_LOGICAL:
14280 if (rs6000_cpu == PROCESSOR_POWER5
14281 && is_cracked_insn (insn))
14287 /* The function returns true if INSN is cracked into 2 instructions
14288 by the processor (and therefore occupies 2 issue slots). */
14291 is_cracked_insn (rtx insn)
14293 if (!insn || !INSN_P (insn)
14294 || GET_CODE (PATTERN (insn)) == USE
14295 || GET_CODE (PATTERN (insn)) == CLOBBER)
14298 if (rs6000_sched_groups)
14300 enum attr_type type = get_attr_type (insn);
14301 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14302 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14303 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14304 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14305 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14306 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14307 || type == TYPE_IDIV || type == TYPE_LDIV
14308 || type == TYPE_INSERT_WORD)
14315 /* The function returns true if INSN can be issued only from
14316 the branch slot. */
14319 is_branch_slot_insn (rtx insn)
14321 if (!insn || !INSN_P (insn)
14322 || GET_CODE (PATTERN (insn)) == USE
14323 || GET_CODE (PATTERN (insn)) == CLOBBER)
14326 if (rs6000_sched_groups)
14328 enum attr_type type = get_attr_type (insn);
14329 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14337 /* A C statement (sans semicolon) to update the integer scheduling
14338 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14339 INSN earlier, reduce the priority to execute INSN later. Do not
14340 define this macro if you do not need to adjust the scheduling
14341 priorities of insns. */
14344 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14346 /* On machines (like the 750) which have asymmetric integer units,
14347 where one integer unit can do multiply and divides and the other
14348 can't, reduce the priority of multiply/divide so it is scheduled
14349 before other integer operations. */
14352 if (! INSN_P (insn))
14355 if (GET_CODE (PATTERN (insn)) == USE)
14358 switch (rs6000_cpu_attr) {
14360 switch (get_attr_type (insn))
14367 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14368 priority, priority);
14369 if (priority >= 0 && priority < 0x01000000)
14376 if (is_dispatch_slot_restricted (insn)
14377 && reload_completed
14378 && current_sched_info->sched_max_insns_priority
14379 && rs6000_sched_restricted_insns_priority)
14382 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14383 if (rs6000_sched_restricted_insns_priority == 1)
14384 /* Attach highest priority to insn. This means that in
14385 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14386 precede 'priority' (critical path) considerations. */
14387 return current_sched_info->sched_max_insns_priority;
14388 else if (rs6000_sched_restricted_insns_priority == 2)
14389 /* Increase priority of insn by a minimal amount. This means that in
14390 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14391 precede dispatch-slot restriction considerations. */
14392 return (priority + 1);
14398 /* Return how many instructions the machine can issue per cycle. */
14401 rs6000_issue_rate (void)
14403 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14404 if (!reload_completed)
14407 switch (rs6000_cpu_attr) {
14408 case CPU_RIOS1: /* ? */
14410 case CPU_PPC601: /* ? */
14433 /* Return how many instructions to look ahead for better insn
14437 rs6000_use_sched_lookahead (void)
14439 if (rs6000_cpu_attr == CPU_PPC8540)
14444 /* Determine is PAT refers to memory. */
14447 is_mem_ref (rtx pat)
14453 if (GET_CODE (pat) == MEM)
14456 /* Recursively process the pattern. */
14457 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14459 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14462 ret |= is_mem_ref (XEXP (pat, i));
14463 else if (fmt[i] == 'E')
14464 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14465 ret |= is_mem_ref (XVECEXP (pat, i, j));
14471 /* Determine if PAT is a PATTERN of a load insn. */
14474 is_load_insn1 (rtx pat)
14476 if (!pat || pat == NULL_RTX)
14479 if (GET_CODE (pat) == SET)
14480 return is_mem_ref (SET_SRC (pat));
14482 if (GET_CODE (pat) == PARALLEL)
14486 for (i = 0; i < XVECLEN (pat, 0); i++)
14487 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14494 /* Determine if INSN loads from memory. */
14497 is_load_insn (rtx insn)
14499 if (!insn || !INSN_P (insn))
14502 if (GET_CODE (insn) == CALL_INSN)
14505 return is_load_insn1 (PATTERN (insn));
14508 /* Determine if PAT is a PATTERN of a store insn. */
14511 is_store_insn1 (rtx pat)
14513 if (!pat || pat == NULL_RTX)
14516 if (GET_CODE (pat) == SET)
14517 return is_mem_ref (SET_DEST (pat));
14519 if (GET_CODE (pat) == PARALLEL)
14523 for (i = 0; i < XVECLEN (pat, 0); i++)
14524 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14531 /* Determine if INSN stores to memory. */
14534 is_store_insn (rtx insn)
14536 if (!insn || !INSN_P (insn))
14539 return is_store_insn1 (PATTERN (insn));
14542 /* Returns whether the dependence between INSN and NEXT is considered
14543 costly by the given target. */
14546 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14548 /* If the flag is not enbled - no dependence is considered costly;
14549 allow all dependent insns in the same group.
14550 This is the most aggressive option. */
14551 if (rs6000_sched_costly_dep == no_dep_costly)
14554 /* If the flag is set to 1 - a dependence is always considered costly;
14555 do not allow dependent instructions in the same group.
14556 This is the most conservative option. */
14557 if (rs6000_sched_costly_dep == all_deps_costly)
14560 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14561 && is_load_insn (next)
14562 && is_store_insn (insn))
14563 /* Prevent load after store in the same group. */
14566 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14567 && is_load_insn (next)
14568 && is_store_insn (insn)
14569 && (!link || (int) REG_NOTE_KIND (link) == 0))
14570 /* Prevent load after store in the same group if it is a true dependence. */
14573 /* The flag is set to X; dependences with latency >= X are considered costly,
14574 and will not be scheduled in the same group. */
14575 if (rs6000_sched_costly_dep <= max_dep_latency
14576 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14582 /* Return the next insn after INSN that is found before TAIL is reached,
14583 skipping any "non-active" insns - insns that will not actually occupy
14584 an issue slot. Return NULL_RTX if such an insn is not found. */
14587 get_next_active_insn (rtx insn, rtx tail)
14591 if (!insn || insn == tail)
14594 next_insn = NEXT_INSN (insn);
14597 && next_insn != tail
14598 && (GET_CODE(next_insn) == NOTE
14599 || GET_CODE (PATTERN (next_insn)) == USE
14600 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14602 next_insn = NEXT_INSN (next_insn);
14605 if (!next_insn || next_insn == tail)
14611 /* Return whether the presence of INSN causes a dispatch group termination
14612 of group WHICH_GROUP.
14614 If WHICH_GROUP == current_group, this function will return true if INSN
14615 causes the termination of the current group (i.e, the dispatch group to
14616 which INSN belongs). This means that INSN will be the last insn in the
14617 group it belongs to.
14619 If WHICH_GROUP == previous_group, this function will return true if INSN
14620 causes the termination of the previous group (i.e, the dispatch group that
14621 precedes the group to which INSN belongs). This means that INSN will be
14622 the first insn in the group it belongs to). */
14625 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14627 enum attr_type type;
14632 type = get_attr_type (insn);
14634 if (is_microcoded_insn (insn))
14637 if (which_group == current_group)
14639 if (is_branch_slot_insn (insn))
14643 else if (which_group == previous_group)
14645 if (is_dispatch_slot_restricted (insn))
14653 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14654 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14657 is_costly_group (rtx *group_insns, rtx next_insn)
14662 int issue_rate = rs6000_issue_rate ();
14664 for (i = 0; i < issue_rate; i++)
14666 rtx insn = group_insns[i];
14669 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14671 rtx next = XEXP (link, 0);
14672 if (next == next_insn)
14674 cost = insn_cost (insn, link, next_insn);
14675 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14684 /* Utility of the function redefine_groups.
14685 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14686 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14687 to keep it "far" (in a separate group) from GROUP_INSNS, following
14688 one of the following schemes, depending on the value of the flag
14689 -minsert_sched_nops = X:
14690 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14691 in order to force NEXT_INSN into a separate group.
14692 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14693 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14694 insertion (has a group just ended, how many vacant issue slots remain in the
14695 last group, and how many dispatch groups were encountered so far). */
14698 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14699 bool *group_end, int can_issue_more, int *group_count)
14703 int issue_rate = rs6000_issue_rate ();
14704 bool end = *group_end;
14707 if (next_insn == NULL_RTX)
14708 return can_issue_more;
14710 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14711 return can_issue_more;
14713 force = is_costly_group (group_insns, next_insn);
14715 return can_issue_more;
14717 if (sched_verbose > 6)
14718 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14719 *group_count ,can_issue_more);
14721 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14724 can_issue_more = 0;
14726 /* Since only a branch can be issued in the last issue_slot, it is
14727 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14728 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14729 in this case the last nop will start a new group and the branch will be
14730 forced to the new group. */
14731 if (can_issue_more && !is_branch_slot_insn (next_insn))
14734 while (can_issue_more > 0)
14737 emit_insn_before (nop, next_insn);
14745 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14747 int n_nops = rs6000_sched_insert_nops;
14749 /* Nops can't be issued from the branch slot, so the effective
14750 issue_rate for nops is 'issue_rate - 1'. */
14751 if (can_issue_more == 0)
14752 can_issue_more = issue_rate;
14754 if (can_issue_more == 0)
14756 can_issue_more = issue_rate - 1;
14759 for (i = 0; i < issue_rate; i++)
14761 group_insns[i] = 0;
14768 emit_insn_before (nop, next_insn);
14769 if (can_issue_more == issue_rate - 1) /* new group begins */
14772 if (can_issue_more == 0)
14774 can_issue_more = issue_rate - 1;
14777 for (i = 0; i < issue_rate; i++)
14779 group_insns[i] = 0;
14785 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14788 *group_end = /* Is next_insn going to start a new group? */
14790 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14791 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14792 || (can_issue_more < issue_rate &&
14793 insn_terminates_group_p (next_insn, previous_group)));
14794 if (*group_end && end)
14797 if (sched_verbose > 6)
14798 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14799 *group_count, can_issue_more);
14800 return can_issue_more;
14803 return can_issue_more;
14806 /* This function tries to synch the dispatch groups that the compiler "sees"
14807 with the dispatch groups that the processor dispatcher is expected to
14808 form in practice. It tries to achieve this synchronization by forcing the
14809 estimated processor grouping on the compiler (as opposed to the function
14810 'pad_goups' which tries to force the scheduler's grouping on the processor).
14812 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14813 examines the (estimated) dispatch groups that will be formed by the processor
14814 dispatcher. It marks these group boundaries to reflect the estimated
14815 processor grouping, overriding the grouping that the scheduler had marked.
14816 Depending on the value of the flag '-minsert-sched-nops' this function can
14817 force certain insns into separate groups or force a certain distance between
14818 them by inserting nops, for example, if there exists a "costly dependence"
14821 The function estimates the group boundaries that the processor will form as
14822 folllows: It keeps track of how many vacant issue slots are available after
14823 each insn. A subsequent insn will start a new group if one of the following
14825 - no more vacant issue slots remain in the current dispatch group.
14826 - only the last issue slot, which is the branch slot, is vacant, but the next
14827 insn is not a branch.
14828 - only the last 2 or less issue slots, including the branch slot, are vacant,
14829 which means that a cracked insn (which occupies two issue slots) can't be
14830 issued in this group.
14831 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14832 start a new group. */
14835 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14837 rtx insn, next_insn;
14839 int can_issue_more;
14842 int group_count = 0;
14846 issue_rate = rs6000_issue_rate ();
14847 group_insns = alloca (issue_rate * sizeof (rtx));
14848 for (i = 0; i < issue_rate; i++)
14850 group_insns[i] = 0;
14852 can_issue_more = issue_rate;
14854 insn = get_next_active_insn (prev_head_insn, tail);
14857 while (insn != NULL_RTX)
14859 slot = (issue_rate - can_issue_more);
14860 group_insns[slot] = insn;
14862 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14863 if (insn_terminates_group_p (insn, current_group))
14864 can_issue_more = 0;
14866 next_insn = get_next_active_insn (insn, tail);
14867 if (next_insn == NULL_RTX)
14868 return group_count + 1;
14870 group_end = /* Is next_insn going to start a new group? */
14871 (can_issue_more == 0
14872 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14873 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14874 || (can_issue_more < issue_rate &&
14875 insn_terminates_group_p (next_insn, previous_group)));
14877 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14878 next_insn, &group_end, can_issue_more, &group_count);
14883 can_issue_more = 0;
14884 for (i = 0; i < issue_rate; i++)
14886 group_insns[i] = 0;
14890 if (GET_MODE (next_insn) == TImode && can_issue_more)
14891 PUT_MODE(next_insn, VOIDmode);
14892 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14893 PUT_MODE (next_insn, TImode);
14896 if (can_issue_more == 0)
14897 can_issue_more = issue_rate;
14900 return group_count;
14903 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14904 dispatch group boundaries that the scheduler had marked. Pad with nops
14905 any dispatch groups which have vacant issue slots, in order to force the
14906 scheduler's grouping on the processor dispatcher. The function
14907 returns the number of dispatch groups found. */
14910 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14912 rtx insn, next_insn;
14915 int can_issue_more;
14917 int group_count = 0;
14919 /* Initialize issue_rate. */
14920 issue_rate = rs6000_issue_rate ();
14921 can_issue_more = issue_rate;
14923 insn = get_next_active_insn (prev_head_insn, tail);
14924 next_insn = get_next_active_insn (insn, tail);
14926 while (insn != NULL_RTX)
14929 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14931 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14933 if (next_insn == NULL_RTX)
14938 /* If the scheduler had marked group termination at this location
14939 (between insn and next_indn), and neither insn nor next_insn will
14940 force group termination, pad the group with nops to force group
14943 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14944 && !insn_terminates_group_p (insn, current_group)
14945 && !insn_terminates_group_p (next_insn, previous_group))
14947 if (!is_branch_slot_insn(next_insn))
14950 while (can_issue_more)
14953 emit_insn_before (nop, next_insn);
14958 can_issue_more = issue_rate;
14963 next_insn = get_next_active_insn (insn, tail);
14966 return group_count;
14969 /* The following function is called at the end of scheduling BB.
14970 After reload, it inserts nops at insn group bundling. */
14973 rs6000_sched_finish (FILE *dump, int sched_verbose)
14978 fprintf (dump, "=== Finishing schedule.\n");
14980 if (reload_completed && rs6000_sched_groups)
14982 if (rs6000_sched_insert_nops == sched_finish_none)
14985 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14986 n_groups = pad_groups (dump, sched_verbose,
14987 current_sched_info->prev_head,
14988 current_sched_info->next_tail);
14990 n_groups = redefine_groups (dump, sched_verbose,
14991 current_sched_info->prev_head,
14992 current_sched_info->next_tail);
14994 if (sched_verbose >= 6)
14996 fprintf (dump, "ngroups = %d\n", n_groups);
14997 print_rtl (dump, current_sched_info->prev_head);
14998 fprintf (dump, "Done finish_sched\n");
15003 /* Length in units of the trampoline for entering a nested function. */
15006 rs6000_trampoline_size (void)
15010 switch (DEFAULT_ABI)
15016 ret = (TARGET_32BIT) ? 12 : 24;
15021 ret = (TARGET_32BIT) ? 40 : 48;
15028 /* Emit RTL insns to initialize the variable parts of a trampoline.
15029 FNADDR is an RTX for the address of the function's pure code.
15030 CXT is an RTX for the static chain value for the function. */
15033 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
15035 enum machine_mode pmode = Pmode;
15036 int regsize = (TARGET_32BIT) ? 4 : 8;
15037 rtx ctx_reg = force_reg (pmode, cxt);
15039 switch (DEFAULT_ABI)
15044 /* Macros to shorten the code expansions below. */
15045 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
15046 #define MEM_PLUS(addr,offset) \
15047 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
15049 /* Under AIX, just build the 3 word function descriptor */
15052 rtx fn_reg = gen_reg_rtx (pmode);
15053 rtx toc_reg = gen_reg_rtx (pmode);
15054 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
15055 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
15056 emit_move_insn (MEM_DEREF (addr), fn_reg);
15057 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
15058 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
15062 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
15065 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
15066 FALSE, VOIDmode, 4,
15068 GEN_INT (rs6000_trampoline_size ()), SImode,
15078 /* Table of valid machine attributes. */
15080 const struct attribute_spec rs6000_attribute_table[] =
15082 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
15083 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
15084 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15085 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15086 { NULL, 0, 0, false, false, false, NULL }
15089 /* Handle the "altivec" attribute. The attribute may have
15090 arguments as follows:
15092 __attribute__((altivec(vector__)))
15093 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
15094 __attribute__((altivec(bool__))) (always followed by 'unsigned')
15096 and may appear more than once (e.g., 'vector bool char') in a
15097 given declaration. */
15100 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
15101 int flags ATTRIBUTE_UNUSED,
15102 bool *no_add_attrs)
15104 tree type = *node, result = NULL_TREE;
15105 enum machine_mode mode;
15108 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
15109 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
15110 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
15113 while (POINTER_TYPE_P (type)
15114 || TREE_CODE (type) == FUNCTION_TYPE
15115 || TREE_CODE (type) == METHOD_TYPE
15116 || TREE_CODE (type) == ARRAY_TYPE)
15117 type = TREE_TYPE (type);
15119 mode = TYPE_MODE (type);
15121 if (rs6000_warn_altivec_long
15122 && (type == long_unsigned_type_node || type == long_integer_type_node))
15123 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15125 switch (altivec_type)
15128 unsigned_p = TYPE_UNSIGNED (type);
15132 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15135 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15138 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15140 case SFmode: result = V4SF_type_node; break;
15141 /* If the user says 'vector int bool', we may be handed the 'bool'
15142 attribute _before_ the 'vector' attribute, and so select the proper
15143 type in the 'b' case below. */
15144 case V4SImode: case V8HImode: case V16QImode: result = type;
15151 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15152 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15153 case QImode: case V16QImode: result = bool_V16QI_type_node;
15160 case V8HImode: result = pixel_V8HI_type_node;
15166 *no_add_attrs = true; /* No need to hang on to the attribute. */
15169 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15171 *node = reconstruct_complex_type (*node, result);
15176 /* AltiVec defines four built-in scalar types that serve as vector
15177 elements; we must teach the compiler how to mangle them. */
15179 static const char *
15180 rs6000_mangle_fundamental_type (tree type)
15182 if (type == bool_char_type_node) return "U6__boolc";
15183 if (type == bool_short_type_node) return "U6__bools";
15184 if (type == pixel_type_node) return "u7__pixel";
15185 if (type == bool_int_type_node) return "U6__booli";
15187 /* For all other types, use normal C++ mangling. */
15191 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15192 struct attribute_spec.handler. */
15195 rs6000_handle_longcall_attribute (tree *node, tree name,
15196 tree args ATTRIBUTE_UNUSED,
15197 int flags ATTRIBUTE_UNUSED,
15198 bool *no_add_attrs)
15200 if (TREE_CODE (*node) != FUNCTION_TYPE
15201 && TREE_CODE (*node) != FIELD_DECL
15202 && TREE_CODE (*node) != TYPE_DECL)
15204 warning ("`%s' attribute only applies to functions",
15205 IDENTIFIER_POINTER (name));
15206 *no_add_attrs = true;
15212 /* Set longcall attributes on all functions declared when
15213 rs6000_default_long_calls is true. */
15215 rs6000_set_default_type_attributes (tree type)
15217 if (rs6000_default_long_calls
15218 && (TREE_CODE (type) == FUNCTION_TYPE
15219 || TREE_CODE (type) == METHOD_TYPE))
15220 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15222 TYPE_ATTRIBUTES (type));
15225 /* Return a reference suitable for calling a function with the
15226 longcall attribute. */
15229 rs6000_longcall_ref (rtx call_ref)
15231 const char *call_name;
15234 if (GET_CODE (call_ref) != SYMBOL_REF)
15237 /* System V adds '.' to the internal name, so skip them. */
15238 call_name = XSTR (call_ref, 0);
15239 if (*call_name == '.')
15241 while (*call_name == '.')
15244 node = get_identifier (call_name);
15245 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15248 return force_reg (Pmode, call_ref);
15251 #ifdef USING_ELFOS_H
15253 /* A C statement or statements to switch to the appropriate section
15254 for output of RTX in mode MODE. You can assume that RTX is some
15255 kind of constant in RTL. The argument MODE is redundant except in
15256 the case of a `const_int' rtx. Select the section by calling
15257 `text_section' or one of the alternatives for other sections.
15259 Do not define this macro if you put all constants in the read-only
15263 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15264 unsigned HOST_WIDE_INT align)
15266 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15269 default_elf_select_rtx_section (mode, x, align);
15272 /* A C statement or statements to switch to the appropriate
15273 section for output of DECL. DECL is either a `VAR_DECL' node
15274 or a constant of some sort. RELOC indicates whether forming
15275 the initial value of DECL requires link-time relocations. */
15278 rs6000_elf_select_section (tree decl, int reloc,
15279 unsigned HOST_WIDE_INT align)
15281 /* Pretend that we're always building for a shared library when
15282 ABI_AIX, because otherwise we end up with dynamic relocations
15283 in read-only sections. This happens for function pointers,
15284 references to vtables in typeinfo, and probably other cases. */
15285 default_elf_select_section_1 (decl, reloc, align,
15286 flag_pic || DEFAULT_ABI == ABI_AIX);
15289 /* A C statement to build up a unique section name, expressed as a
15290 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15291 RELOC indicates whether the initial value of EXP requires
15292 link-time relocations. If you do not define this macro, GCC will use
15293 the symbol name prefixed by `.' as the section name. Note - this
15294 macro can now be called for uninitialized data items as well as
15295 initialized data and functions. */
15298 rs6000_elf_unique_section (tree decl, int reloc)
15300 /* As above, pretend that we're always building for a shared library
15301 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15302 default_unique_section_1 (decl, reloc,
15303 flag_pic || DEFAULT_ABI == ABI_AIX);
15306 /* For a SYMBOL_REF, set generic flags and then perform some
15307 target-specific processing.
15309 When the AIX ABI is requested on a non-AIX system, replace the
15310 function name with the real name (with a leading .) rather than the
15311 function descriptor name. This saves a lot of overriding code to
15312 read the prefixes. */
15315 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15317 default_encode_section_info (decl, rtl, first);
15320 && TREE_CODE (decl) == FUNCTION_DECL
15322 && DEFAULT_ABI == ABI_AIX)
15324 rtx sym_ref = XEXP (rtl, 0);
15325 size_t len = strlen (XSTR (sym_ref, 0));
15326 char *str = alloca (len + 2);
15328 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15329 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15334 rs6000_elf_in_small_data_p (tree decl)
15336 if (rs6000_sdata == SDATA_NONE)
15339 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15341 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15342 if (strcmp (section, ".sdata") == 0
15343 || strcmp (section, ".sdata2") == 0
15344 || strcmp (section, ".sbss") == 0
15345 || strcmp (section, ".sbss2") == 0
15346 || strcmp (section, ".PPC.EMB.sdata0") == 0
15347 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15352 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15355 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15356 /* If it's not public, and we're not going to reference it there,
15357 there's no need to put it in the small data section. */
15358 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15365 #endif /* USING_ELFOS_H */
15368 /* Return a REG that occurs in ADDR with coefficient 1.
15369 ADDR can be effectively incremented by incrementing REG.
15371 r0 is special and we must not select it as an address
15372 register by this routine since our caller will try to
15373 increment the returned register via an "la" instruction. */
15376 find_addr_reg (rtx addr)
15378 while (GET_CODE (addr) == PLUS)
15380 if (GET_CODE (XEXP (addr, 0)) == REG
15381 && REGNO (XEXP (addr, 0)) != 0)
15382 addr = XEXP (addr, 0);
15383 else if (GET_CODE (XEXP (addr, 1)) == REG
15384 && REGNO (XEXP (addr, 1)) != 0)
15385 addr = XEXP (addr, 1);
15386 else if (CONSTANT_P (XEXP (addr, 0)))
15387 addr = XEXP (addr, 1);
15388 else if (CONSTANT_P (XEXP (addr, 1)))
15389 addr = XEXP (addr, 0);
15393 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15399 rs6000_fatal_bad_address (rtx op)
15401 fatal_insn ("bad address", op);
15407 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15408 reference and a constant. */
15411 symbolic_operand (rtx op)
15413 switch (GET_CODE (op))
15420 return (GET_CODE (op) == SYMBOL_REF ||
15421 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15422 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15423 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15432 static tree branch_island_list = 0;
15434 /* Remember to generate a branch island for far calls to the given
15438 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15440 tree branch_island = build_tree_list (function_name, label_name);
15441 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15442 TREE_CHAIN (branch_island) = branch_island_list;
15443 branch_island_list = branch_island;
15446 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15447 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15448 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15449 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15451 /* Generate far-jump branch islands for everything on the
15452 branch_island_list. Invoked immediately after the last instruction
15453 of the epilogue has been emitted; the branch-islands must be
15454 appended to, and contiguous with, the function body. Mach-O stubs
15455 are generated in machopic_output_stub(). */
15458 macho_branch_islands (void)
15461 tree branch_island;
15463 for (branch_island = branch_island_list;
15465 branch_island = TREE_CHAIN (branch_island))
15467 const char *label =
15468 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15470 darwin_strip_name_encoding (
15471 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15472 char name_buf[512];
15473 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15474 if (name[0] == '*' || name[0] == '&')
15475 strcpy (name_buf, name+1);
15479 strcpy (name_buf+1, name);
15481 strcpy (tmp_buf, "\n");
15482 strcat (tmp_buf, label);
15483 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15484 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15485 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15486 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15487 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15490 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15491 strcat (tmp_buf, label);
15492 strcat (tmp_buf, "_pic\n");
15493 strcat (tmp_buf, label);
15494 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15496 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15497 strcat (tmp_buf, name_buf);
15498 strcat (tmp_buf, " - ");
15499 strcat (tmp_buf, label);
15500 strcat (tmp_buf, "_pic)\n");
15502 strcat (tmp_buf, "\tmtlr r0\n");
15504 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15505 strcat (tmp_buf, name_buf);
15506 strcat (tmp_buf, " - ");
15507 strcat (tmp_buf, label);
15508 strcat (tmp_buf, "_pic)\n");
15510 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15514 strcat (tmp_buf, ":\nlis r12,hi16(");
15515 strcat (tmp_buf, name_buf);
15516 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15517 strcat (tmp_buf, name_buf);
15518 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15520 output_asm_insn (tmp_buf, 0);
15521 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15522 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15523 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15524 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15525 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15528 branch_island_list = 0;
15531 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15532 already there or not. */
15535 no_previous_def (tree function_name)
15537 tree branch_island;
15538 for (branch_island = branch_island_list;
15540 branch_island = TREE_CHAIN (branch_island))
15541 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15546 /* GET_PREV_LABEL gets the label name from the previous definition of
15550 get_prev_label (tree function_name)
15552 tree branch_island;
15553 for (branch_island = branch_island_list;
15555 branch_island = TREE_CHAIN (branch_island))
15556 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15557 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15561 /* INSN is either a function call or a millicode call. It may have an
15562 unconditional jump in its delay slot.
15564 CALL_DEST is the routine we are calling. */
15567 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15569 static char buf[256];
15570 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15571 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15574 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15576 if (no_previous_def (funname))
15578 int line_number = 0;
15579 rtx label_rtx = gen_label_rtx ();
15580 char *label_buf, temp_buf[256];
15581 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15582 CODE_LABEL_NUMBER (label_rtx));
15583 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15584 labelname = get_identifier (label_buf);
15585 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15587 line_number = NOTE_LINE_NUMBER (insn);
15588 add_compiler_branch_island (labelname, funname, line_number);
15591 labelname = get_prev_label (funname);
15593 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15594 instruction will reach 'foo', otherwise link as 'bl L42'".
15595 "L42" should be a 'branch island', that will do a far jump to
15596 'foo'. Branch islands are generated in
15597 macho_branch_islands(). */
15598 sprintf (buf, "jbsr %%z%d,%.246s",
15599 dest_operand_number, IDENTIFIER_POINTER (labelname));
15602 sprintf (buf, "bl %%z%d", dest_operand_number);
15606 #endif /* TARGET_MACHO */
15608 /* Generate PIC and indirect symbol stubs. */
15611 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15613 unsigned int length;
15614 char *symbol_name, *lazy_ptr_name;
15615 char *local_label_0;
15616 static int label = 0;
15618 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15619 symb = (*targetm.strip_name_encoding) (symb);
15622 length = strlen (symb);
15623 symbol_name = alloca (length + 32);
15624 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15626 lazy_ptr_name = alloca (length + 32);
15627 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15630 machopic_picsymbol_stub1_section ();
15632 machopic_symbol_stub1_section ();
15633 fprintf (file, "\t.align 2\n");
15635 fprintf (file, "%s:\n", stub);
15636 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15641 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15642 sprintf (local_label_0, "\"L%011d$spb\"", label);
15644 fprintf (file, "\tmflr r0\n");
15645 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15646 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15647 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15648 lazy_ptr_name, local_label_0);
15649 fprintf (file, "\tmtlr r0\n");
15650 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15651 lazy_ptr_name, local_label_0);
15652 fprintf (file, "\tmtctr r12\n");
15653 fprintf (file, "\tbctr\n");
15657 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15658 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15659 fprintf (file, "\tmtctr r12\n");
15660 fprintf (file, "\tbctr\n");
15663 machopic_lazy_symbol_ptr_section ();
15664 fprintf (file, "%s:\n", lazy_ptr_name);
15665 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15666 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15669 /* Legitimize PIC addresses. If the address is already
15670 position-independent, we return ORIG. Newly generated
15671 position-independent addresses go into a reg. This is REG if non
15672 zero, otherwise we allocate register(s) as necessary. */
15674 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15677 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15682 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15683 reg = gen_reg_rtx (Pmode);
15685 if (GET_CODE (orig) == CONST)
15687 if (GET_CODE (XEXP (orig, 0)) == PLUS
15688 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15691 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15693 /* Use a different reg for the intermediate value, as
15694 it will be marked UNCHANGING. */
15695 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15698 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15701 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15707 if (GET_CODE (offset) == CONST_INT)
15709 if (SMALL_INT (offset))
15710 return plus_constant (base, INTVAL (offset));
15711 else if (! reload_in_progress && ! reload_completed)
15712 offset = force_reg (Pmode, offset);
15715 rtx mem = force_const_mem (Pmode, orig);
15716 return machopic_legitimize_pic_address (mem, Pmode, reg);
15719 return gen_rtx_PLUS (Pmode, base, offset);
15722 /* Fall back on generic machopic code. */
15723 return machopic_legitimize_pic_address (orig, mode, reg);
15726 /* This is just a placeholder to make linking work without having to
15727 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15728 ever needed for Darwin (not too likely!) this would have to get a
15729 real definition. */
15736 #endif /* TARGET_MACHO */
15739 static unsigned int
15740 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15742 return default_section_type_flags_1 (decl, name, reloc,
15743 flag_pic || DEFAULT_ABI == ABI_AIX);
15746 /* Record an element in the table of global constructors. SYMBOL is
15747 a SYMBOL_REF of the function to be called; PRIORITY is a number
15748 between 0 and MAX_INIT_PRIORITY.
15750 This differs from default_named_section_asm_out_constructor in
15751 that we have special handling for -mrelocatable. */
15754 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15756 const char *section = ".ctors";
15759 if (priority != DEFAULT_INIT_PRIORITY)
15761 sprintf (buf, ".ctors.%.5u",
15762 /* Invert the numbering so the linker puts us in the proper
15763 order; constructors are run from right to left, and the
15764 linker sorts in increasing order. */
15765 MAX_INIT_PRIORITY - priority);
15769 named_section_flags (section, SECTION_WRITE);
15770 assemble_align (POINTER_SIZE);
15772 if (TARGET_RELOCATABLE)
15774 fputs ("\t.long (", asm_out_file);
15775 output_addr_const (asm_out_file, symbol);
15776 fputs (")@fixup\n", asm_out_file);
15779 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15783 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15785 const char *section = ".dtors";
15788 if (priority != DEFAULT_INIT_PRIORITY)
15790 sprintf (buf, ".dtors.%.5u",
15791 /* Invert the numbering so the linker puts us in the proper
15792 order; constructors are run from right to left, and the
15793 linker sorts in increasing order. */
15794 MAX_INIT_PRIORITY - priority);
15798 named_section_flags (section, SECTION_WRITE);
15799 assemble_align (POINTER_SIZE);
15801 if (TARGET_RELOCATABLE)
15803 fputs ("\t.long (", asm_out_file);
15804 output_addr_const (asm_out_file, symbol);
15805 fputs (")@fixup\n", asm_out_file);
15808 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15812 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15816 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15817 ASM_OUTPUT_LABEL (file, name);
15818 fputs (DOUBLE_INT_ASM_OP, file);
15820 assemble_name (file, name);
15821 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15822 assemble_name (file, name);
15823 fputs (",24\n\t.type\t.", file);
15824 assemble_name (file, name);
15825 fputs (",@function\n", file);
15826 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15828 fputs ("\t.globl\t.", file);
15829 assemble_name (file, name);
15832 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15834 ASM_OUTPUT_LABEL (file, name);
15838 if (TARGET_RELOCATABLE
15839 && (get_pool_size () != 0 || current_function_profile)
15844 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15846 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15847 fprintf (file, "\t.long ");
15848 assemble_name (file, buf);
15850 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15851 assemble_name (file, buf);
15855 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15856 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15858 if (DEFAULT_ABI == ABI_AIX)
15860 const char *desc_name, *orig_name;
15862 orig_name = (*targetm.strip_name_encoding) (name);
15863 desc_name = orig_name;
15864 while (*desc_name == '.')
15867 if (TREE_PUBLIC (decl))
15868 fprintf (file, "\t.globl %s\n", desc_name);
15870 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15871 fprintf (file, "%s:\n", desc_name);
15872 fprintf (file, "\t.long %s\n", orig_name);
15873 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15874 if (DEFAULT_ABI == ABI_AIX)
15875 fputs ("\t.long 0\n", file);
15876 fprintf (file, "\t.previous\n");
15878 ASM_OUTPUT_LABEL (file, name);
15884 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15886 fputs (GLOBAL_ASM_OP, stream);
15887 RS6000_OUTPUT_BASENAME (stream, name);
15888 putc ('\n', stream);
15892 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15895 static const char * const suffix[3] = { "PR", "RO", "RW" };
15897 if (flags & SECTION_CODE)
15899 else if (flags & SECTION_WRITE)
15904 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15905 (flags & SECTION_CODE) ? "." : "",
15906 name, suffix[smclass], flags & SECTION_ENTSIZE);
15910 rs6000_xcoff_select_section (tree decl, int reloc,
15911 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15913 if (decl_readonly_section_1 (decl, reloc, 1))
15915 if (TREE_PUBLIC (decl))
15916 read_only_data_section ();
15918 read_only_private_data_section ();
15922 if (TREE_PUBLIC (decl))
15925 private_data_section ();
15930 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15934 /* Use select_section for private and uninitialized data. */
15935 if (!TREE_PUBLIC (decl)
15936 || DECL_COMMON (decl)
15937 || DECL_INITIAL (decl) == NULL_TREE
15938 || DECL_INITIAL (decl) == error_mark_node
15939 || (flag_zero_initialized_in_bss
15940 && initializer_zerop (DECL_INITIAL (decl))))
15943 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15944 name = (*targetm.strip_name_encoding) (name);
15945 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15948 /* Select section for constant in constant pool.
15950 On RS/6000, all constants are in the private read-only data area.
15951 However, if this is being placed in the TOC it must be output as a
15955 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15956 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15958 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15961 read_only_private_data_section ();
15964 /* Remove any trailing [DS] or the like from the symbol name. */
15966 static const char *
15967 rs6000_xcoff_strip_name_encoding (const char *name)
15972 len = strlen (name);
15973 if (name[len - 1] == ']')
15974 return ggc_alloc_string (name, len - 4);
15979 /* Section attributes. AIX is always PIC. */
15981 static unsigned int
15982 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15984 unsigned int align;
15985 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15987 /* Align to at least UNIT size. */
15988 if (flags & SECTION_CODE)
15989 align = MIN_UNITS_PER_WORD;
15991 /* Increase alignment of large objects if not already stricter. */
15992 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15993 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15994 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15996 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15999 /* Output at beginning of assembler file.
16001 Initialize the section names for the RS/6000 at this point.
16003 Specify filename, including full path, to assembler.
16005 We want to go into the TOC section so at least one .toc will be emitted.
16006 Also, in order to output proper .bs/.es pairs, we need at least one static
16007 [RW] section emitted.
16009 Finally, declare mcount when profiling to make the assembler happy. */
16012 rs6000_xcoff_file_start (void)
16014 rs6000_gen_section_name (&xcoff_bss_section_name,
16015 main_input_filename, ".bss_");
16016 rs6000_gen_section_name (&xcoff_private_data_section_name,
16017 main_input_filename, ".rw_");
16018 rs6000_gen_section_name (&xcoff_read_only_section_name,
16019 main_input_filename, ".ro_");
16021 fputs ("\t.file\t", asm_out_file);
16022 output_quoted_string (asm_out_file, main_input_filename);
16023 fputc ('\n', asm_out_file);
16025 if (write_symbols != NO_DEBUG)
16026 private_data_section ();
16029 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
16030 rs6000_file_start ();
16033 /* Output at end of assembler file.
16034 On the RS/6000, referencing data should automatically pull in text. */
16037 rs6000_xcoff_file_end (void)
16040 fputs ("_section_.text:\n", asm_out_file);
16042 fputs (TARGET_32BIT
16043 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
16046 #endif /* TARGET_XCOFF */
16049 /* Cross-module name binding. Darwin does not support overriding
16050 functions at dynamic-link time. */
16053 rs6000_binds_local_p (tree decl)
16055 return default_binds_local_p_1 (decl, 0);
16059 /* Compute a (partial) cost for rtx X. Return true if the complete
16060 cost has been computed, and false if subexpressions should be
16061 scanned. In either case, *TOTAL contains the cost result. */
16064 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
16069 /* On the RS/6000, if it is valid in the insn, it is free.
16070 So this always returns 0. */
16081 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16082 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
16083 + 0x8000) >= 0x10000)
16084 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16085 ? COSTS_N_INSNS (2)
16086 : COSTS_N_INSNS (1));
16092 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16093 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
16094 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16095 ? COSTS_N_INSNS (2)
16096 : COSTS_N_INSNS (1));
16102 *total = COSTS_N_INSNS (2);
16105 switch (rs6000_cpu)
16107 case PROCESSOR_RIOS1:
16108 case PROCESSOR_PPC405:
16109 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16110 ? COSTS_N_INSNS (5)
16111 : (INTVAL (XEXP (x, 1)) >= -256
16112 && INTVAL (XEXP (x, 1)) <= 255)
16113 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16116 case PROCESSOR_PPC440:
16117 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16118 ? COSTS_N_INSNS (3)
16119 : COSTS_N_INSNS (2));
16122 case PROCESSOR_RS64A:
16123 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16124 ? GET_MODE (XEXP (x, 1)) != DImode
16125 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
16126 : (INTVAL (XEXP (x, 1)) >= -256
16127 && INTVAL (XEXP (x, 1)) <= 255)
16128 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
16131 case PROCESSOR_RIOS2:
16132 case PROCESSOR_MPCCORE:
16133 case PROCESSOR_PPC604e:
16134 *total = COSTS_N_INSNS (2);
16137 case PROCESSOR_PPC601:
16138 *total = COSTS_N_INSNS (5);
16141 case PROCESSOR_PPC603:
16142 case PROCESSOR_PPC7400:
16143 case PROCESSOR_PPC750:
16144 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16145 ? COSTS_N_INSNS (5)
16146 : (INTVAL (XEXP (x, 1)) >= -256
16147 && INTVAL (XEXP (x, 1)) <= 255)
16148 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16151 case PROCESSOR_PPC7450:
16152 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16153 ? COSTS_N_INSNS (4)
16154 : COSTS_N_INSNS (3));
16157 case PROCESSOR_PPC403:
16158 case PROCESSOR_PPC604:
16159 case PROCESSOR_PPC8540:
16160 *total = COSTS_N_INSNS (4);
16163 case PROCESSOR_PPC620:
16164 case PROCESSOR_PPC630:
16165 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16166 ? GET_MODE (XEXP (x, 1)) != DImode
16167 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16168 : (INTVAL (XEXP (x, 1)) >= -256
16169 && INTVAL (XEXP (x, 1)) <= 255)
16170 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16173 case PROCESSOR_POWER4:
16174 case PROCESSOR_POWER5:
16175 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16176 ? GET_MODE (XEXP (x, 1)) != DImode
16177 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16178 : COSTS_N_INSNS (2));
16187 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16188 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16190 *total = COSTS_N_INSNS (2);
16197 switch (rs6000_cpu)
16199 case PROCESSOR_RIOS1:
16200 *total = COSTS_N_INSNS (19);
16203 case PROCESSOR_RIOS2:
16204 *total = COSTS_N_INSNS (13);
16207 case PROCESSOR_RS64A:
16208 *total = (GET_MODE (XEXP (x, 1)) != DImode
16209 ? COSTS_N_INSNS (65)
16210 : COSTS_N_INSNS (67));
16213 case PROCESSOR_MPCCORE:
16214 *total = COSTS_N_INSNS (6);
16217 case PROCESSOR_PPC403:
16218 *total = COSTS_N_INSNS (33);
16221 case PROCESSOR_PPC405:
16222 *total = COSTS_N_INSNS (35);
16225 case PROCESSOR_PPC440:
16226 *total = COSTS_N_INSNS (34);
16229 case PROCESSOR_PPC601:
16230 *total = COSTS_N_INSNS (36);
16233 case PROCESSOR_PPC603:
16234 *total = COSTS_N_INSNS (37);
16237 case PROCESSOR_PPC604:
16238 case PROCESSOR_PPC604e:
16239 *total = COSTS_N_INSNS (20);
16242 case PROCESSOR_PPC620:
16243 case PROCESSOR_PPC630:
16244 *total = (GET_MODE (XEXP (x, 1)) != DImode
16245 ? COSTS_N_INSNS (21)
16246 : COSTS_N_INSNS (37));
16249 case PROCESSOR_PPC750:
16250 case PROCESSOR_PPC8540:
16251 case PROCESSOR_PPC7400:
16252 *total = COSTS_N_INSNS (19);
16255 case PROCESSOR_PPC7450:
16256 *total = COSTS_N_INSNS (23);
16259 case PROCESSOR_POWER4:
16260 case PROCESSOR_POWER5:
16261 *total = (GET_MODE (XEXP (x, 1)) != DImode
16262 ? COSTS_N_INSNS (18)
16263 : COSTS_N_INSNS (34));
16271 *total = COSTS_N_INSNS (4);
16275 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16284 /* A C expression returning the cost of moving data from a register of class
16285 CLASS1 to one of CLASS2. */
16288 rs6000_register_move_cost (enum machine_mode mode,
16289 enum reg_class from, enum reg_class to)
16291 /* Moves from/to GENERAL_REGS. */
16292 if (reg_classes_intersect_p (to, GENERAL_REGS)
16293 || reg_classes_intersect_p (from, GENERAL_REGS))
16295 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16298 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16299 return (rs6000_memory_move_cost (mode, from, 0)
16300 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16302 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16303 else if (from == CR_REGS)
16307 /* A move will cost one instruction per GPR moved. */
16308 return 2 * HARD_REGNO_NREGS (0, mode);
16311 /* Moving between two similar registers is just one instruction. */
16312 else if (reg_classes_intersect_p (to, from))
16313 return mode == TFmode ? 4 : 2;
16315 /* Everything else has to go through GENERAL_REGS. */
16317 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16318 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16321 /* A C expressions returning the cost of moving data of MODE from a register to
16325 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16326 int in ATTRIBUTE_UNUSED)
16328 if (reg_classes_intersect_p (class, GENERAL_REGS))
16329 return 4 * HARD_REGNO_NREGS (0, mode);
16330 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16331 return 4 * HARD_REGNO_NREGS (32, mode);
16332 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16333 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16335 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16338 /* Return an RTX representing where to find the function value of a
16339 function returning MODE. */
16341 rs6000_complex_function_value (enum machine_mode mode)
16343 unsigned int regno;
16345 enum machine_mode inner = GET_MODE_INNER (mode);
16346 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16348 if (FLOAT_MODE_P (mode))
16349 regno = FP_ARG_RETURN;
16352 regno = GP_ARG_RETURN;
16354 /* 32-bit is OK since it'll go in r3/r4. */
16355 if (TARGET_32BIT && inner_bytes >= 4)
16356 return gen_rtx_REG (mode, regno);
16359 if (inner_bytes >= 8)
16360 return gen_rtx_REG (mode, regno);
16362 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16364 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16365 GEN_INT (inner_bytes));
16366 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16369 /* Define how to find the value returned by a function.
16370 VALTYPE is the data type of the value (as a tree).
16371 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16372 otherwise, FUNC is 0.
16374 On the SPE, both FPs and vectors are returned in r3.
16376 On RS/6000 an integer value is in r3 and a floating-point value is in
16377 fp1, unless -msoft-float. */
16380 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16382 enum machine_mode mode;
16383 unsigned int regno;
16385 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16387 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16388 return gen_rtx_PARALLEL (DImode,
16390 gen_rtx_EXPR_LIST (VOIDmode,
16391 gen_rtx_REG (SImode, GP_ARG_RETURN),
16393 gen_rtx_EXPR_LIST (VOIDmode,
16394 gen_rtx_REG (SImode,
16395 GP_ARG_RETURN + 1),
16399 if ((INTEGRAL_TYPE_P (valtype)
16400 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16401 || POINTER_TYPE_P (valtype))
16402 mode = TARGET_32BIT ? SImode : DImode;
16404 mode = TYPE_MODE (valtype);
16406 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
16407 regno = FP_ARG_RETURN;
16408 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16409 && TARGET_HARD_FLOAT
16410 && targetm.calls.split_complex_arg)
16411 return rs6000_complex_function_value (mode);
16412 else if (TREE_CODE (valtype) == VECTOR_TYPE
16413 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16414 regno = ALTIVEC_ARG_RETURN;
16416 regno = GP_ARG_RETURN;
16418 return gen_rtx_REG (mode, regno);
16421 /* Define how to find the value returned by a library function
16422 assuming the value has mode MODE. */
16424 rs6000_libcall_value (enum machine_mode mode)
16426 unsigned int regno;
16428 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16429 && TARGET_HARD_FLOAT && TARGET_FPRS)
16430 regno = FP_ARG_RETURN;
16431 else if (ALTIVEC_VECTOR_MODE (mode)
16432 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16433 regno = ALTIVEC_ARG_RETURN;
16434 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16435 return rs6000_complex_function_value (mode);
16437 regno = GP_ARG_RETURN;
16439 return gen_rtx_REG (mode, regno);
16442 /* Define the offset between two registers, FROM to be eliminated and its
16443 replacement TO, at the start of a routine. */
16445 rs6000_initial_elimination_offset (int from, int to)
16447 rs6000_stack_t *info = rs6000_stack_info ();
16448 HOST_WIDE_INT offset;
16450 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16451 offset = info->push_p ? 0 : -info->total_size;
16452 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16453 offset = info->total_size;
16454 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16455 offset = info->push_p ? info->total_size : 0;
16456 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16464 /* Return true if TYPE is of type __ev64_opaque__. */
16467 is_ev64_opaque_type (tree type)
16470 && (type == opaque_V2SI_type_node
16471 || type == opaque_V2SF_type_node
16472 || type == opaque_p_V2SI_type_node));
16476 rs6000_dwarf_register_span (rtx reg)
16480 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16483 regno = REGNO (reg);
16485 /* The duality of the SPE register size wreaks all kinds of havoc.
16486 This is a way of distinguishing r0 in 32-bits from r0 in
16489 gen_rtx_PARALLEL (VOIDmode,
16492 gen_rtx_REG (SImode, regno + 1200),
16493 gen_rtx_REG (SImode, regno))
16495 gen_rtx_REG (SImode, regno),
16496 gen_rtx_REG (SImode, regno + 1200)));
16499 /* Map internal gcc register numbers to DWARF2 register numbers. */
16502 rs6000_dbx_register_number (unsigned int regno)
16504 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16506 if (regno == MQ_REGNO)
16508 if (regno == LINK_REGISTER_REGNUM)
16510 if (regno == COUNT_REGISTER_REGNUM)
16512 if (CR_REGNO_P (regno))
16513 return regno - CR0_REGNO + 86;
16514 if (regno == XER_REGNO)
16516 if (ALTIVEC_REGNO_P (regno))
16517 return regno - FIRST_ALTIVEC_REGNO + 1124;
16518 if (regno == VRSAVE_REGNO)
16520 if (regno == VSCR_REGNO)
16522 if (regno == SPE_ACC_REGNO)
16524 if (regno == SPEFSCR_REGNO)
16526 /* SPE high reg number. We get these values of regno from
16527 rs6000_dwarf_register_span. */
16528 if (regno >= 1200 && regno < 1232)
16534 #include "gt-rs6000.h"